Fix cut and paste error in last change
[official-gcc.git] / gcc / calls.c
blob3561f5987f40d62ade43527b51b62dee9d01353c
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "expr.h"
28 #include "function.h"
29 #include "regs.h"
30 #include "insn-flags.h"
31 #include "toplev.h"
32 #include "output.h"
33 #include "tm_p.h"
34 #include "timevar.h"
36 #ifndef ACCUMULATE_OUTGOING_ARGS
37 #define ACCUMULATE_OUTGOING_ARGS 0
38 #endif
40 /* Supply a default definition for PUSH_ARGS. */
41 #ifndef PUSH_ARGS
42 #ifdef PUSH_ROUNDING
43 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
44 #else
45 #define PUSH_ARGS 0
46 #endif
47 #endif
49 #if !defined FUNCTION_OK_FOR_SIBCALL
50 #define FUNCTION_OK_FOR_SIBCALL(DECL) 1
51 #endif
53 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
54 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
55 #endif
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
63 #ifdef PUSH_ROUNDING
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED PUSH_ARGS
67 #endif
69 #endif
71 #ifndef PUSH_ARGS_REVERSED
72 #define PUSH_ARGS_REVERSED 0
73 #endif
75 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
76 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
78 /* Data structure and subroutines used within expand_call. */
80 struct arg_data
82 /* Tree node for this argument. */
83 tree tree_value;
84 /* Mode for value; TYPE_MODE unless promoted. */
85 enum machine_mode mode;
86 /* Current RTL value for argument, or 0 if it isn't precomputed. */
87 rtx value;
88 /* Initially-compute RTL value for argument; only for const functions. */
89 rtx initial_value;
90 /* Register to pass this argument in, 0 if passed on stack, or an
91 PARALLEL if the arg is to be copied into multiple non-contiguous
92 registers. */
93 rtx reg;
94 /* Register to pass this argument in when generating tail call sequence.
95 This is not the same register as for normal calls on machines with
96 register windows. */
97 rtx tail_call_reg;
98 /* If REG was promoted from the actual mode of the argument expression,
99 indicates whether the promotion is sign- or zero-extended. */
100 int unsignedp;
101 /* Number of registers to use. 0 means put the whole arg in registers.
102 Also 0 if not passed in registers. */
103 int partial;
104 /* Non-zero if argument must be passed on stack.
105 Note that some arguments may be passed on the stack
106 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
107 pass_on_stack identifies arguments that *cannot* go in registers. */
108 int pass_on_stack;
109 /* Offset of this argument from beginning of stack-args. */
110 struct args_size offset;
111 /* Similar, but offset to the start of the stack slot. Different from
112 OFFSET if this arg pads downward. */
113 struct args_size slot_offset;
114 /* Size of this argument on the stack, rounded up for any padding it gets,
115 parts of the argument passed in registers do not count.
116 If REG_PARM_STACK_SPACE is defined, then register parms
117 are counted here as well. */
118 struct args_size size;
119 /* Location on the stack at which parameter should be stored. The store
120 has already been done if STACK == VALUE. */
121 rtx stack;
122 /* Location on the stack of the start of this argument slot. This can
123 differ from STACK if this arg pads downward. This location is known
124 to be aligned to FUNCTION_ARG_BOUNDARY. */
125 rtx stack_slot;
126 /* Place that this stack area has been saved, if needed. */
127 rtx save_area;
128 /* If an argument's alignment does not permit direct copying into registers,
129 copy in smaller-sized pieces into pseudos. These are stored in a
130 block pointed to by this field. The next field says how many
131 word-sized pseudos we made. */
132 rtx *aligned_regs;
133 int n_aligned_regs;
134 /* The amount that the stack pointer needs to be adjusted to
135 force alignment for the next argument. */
136 struct args_size alignment_pad;
139 /* A vector of one char per byte of stack space. A byte if non-zero if
140 the corresponding stack location has been used.
141 This vector is used to prevent a function call within an argument from
142 clobbering any stack already set up. */
143 static char *stack_usage_map;
145 /* Size of STACK_USAGE_MAP. */
146 static int highest_outgoing_arg_in_use;
148 /* stack_arg_under_construction is nonzero when an argument may be
149 initialized with a constructor call (including a C function that
150 returns a BLKmode struct) and expand_call must take special action
151 to make sure the object being constructed does not overlap the
152 argument list for the constructor call. */
153 int stack_arg_under_construction;
155 static int calls_function PARAMS ((tree, int));
156 static int calls_function_1 PARAMS ((tree, int));
158 /* Nonzero if this is a call to a `const' function. */
159 #define ECF_CONST 1
160 /* Nonzero if this is a call to a `volatile' function. */
161 #define ECF_NORETURN 2
162 /* Nonzero if this is a call to malloc or a related function. */
163 #define ECF_MALLOC 4
164 /* Nonzero if it is plausible that this is a call to alloca. */
165 #define ECF_MAY_BE_ALLOCA 8
166 /* Nonzero if this is a call to a function that won't throw an exception. */
167 #define ECF_NOTHROW 16
168 /* Nonzero if this is a call to setjmp or a related function. */
169 #define ECF_RETURNS_TWICE 32
170 /* Nonzero if this is a call to `longjmp'. */
171 #define ECF_LONGJMP 64
172 /* Nonzero if this is a syscall that makes a new process in the image of
173 the current one. */
174 #define ECF_FORK_OR_EXEC 128
175 #define ECF_SIBCALL 256
176 /* Nonzero if this is a call to "pure" function (like const function,
177 but may read memory. */
178 #define ECF_PURE 512
180 static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
181 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
182 rtx, int, rtx, int));
183 static void precompute_register_parameters PARAMS ((int,
184 struct arg_data *,
185 int *));
186 static void store_one_arg PARAMS ((struct arg_data *, rtx, int, int,
187 int));
188 static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
189 int));
190 static int finalize_must_preallocate PARAMS ((int, int,
191 struct arg_data *,
192 struct args_size *));
193 static void precompute_arguments PARAMS ((int, int,
194 struct arg_data *));
195 static int compute_argument_block_size PARAMS ((int,
196 struct args_size *,
197 int));
198 static void initialize_argument_information PARAMS ((int,
199 struct arg_data *,
200 struct args_size *,
201 int, tree, tree,
202 CUMULATIVE_ARGS *,
203 int, rtx *, int *,
204 int *, int *));
205 static void compute_argument_addresses PARAMS ((struct arg_data *,
206 rtx, int));
207 static rtx rtx_for_function_call PARAMS ((tree, tree));
208 static void load_register_parameters PARAMS ((struct arg_data *,
209 int, rtx *, int));
210 static int libfunc_nothrow PARAMS ((rtx));
211 static rtx emit_library_call_value_1 PARAMS ((int, rtx, rtx, int,
212 enum machine_mode,
213 int, va_list));
214 static int special_function_p PARAMS ((tree, int));
215 static int flags_from_decl_or_type PARAMS ((tree));
216 static rtx try_to_integrate PARAMS ((tree, tree, rtx,
217 int, tree, rtx));
218 static int combine_pending_stack_adjustment_and_call
219 PARAMS ((int, struct args_size *, int));
221 #ifdef REG_PARM_STACK_SPACE
222 static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
223 static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
224 #endif
226 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
227 `alloca'.
229 If WHICH is 0, return 1 if EXP contains a call to any function.
230 Actually, we only need return 1 if evaluating EXP would require pushing
231 arguments on the stack, but that is too difficult to compute, so we just
232 assume any function call might require the stack. */
234 static tree calls_function_save_exprs;
236 static int
237 calls_function (exp, which)
238 tree exp;
239 int which;
241 int val;
243 calls_function_save_exprs = 0;
244 val = calls_function_1 (exp, which);
245 calls_function_save_exprs = 0;
246 return val;
249 /* Recursive function to do the work of above function. */
251 static int
252 calls_function_1 (exp, which)
253 tree exp;
254 int which;
256 register int i;
257 enum tree_code code = TREE_CODE (exp);
258 int class = TREE_CODE_CLASS (code);
259 int length = first_rtl_op (code);
261 /* If this code is language-specific, we don't know what it will do. */
262 if ((int) code >= NUM_TREE_CODES)
263 return 1;
265 switch (code)
267 case CALL_EXPR:
268 if (which == 0)
269 return 1;
270 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
271 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
272 == FUNCTION_DECL)
273 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
275 & ECF_MAY_BE_ALLOCA))
276 return 1;
278 break;
280 case SAVE_EXPR:
281 if (SAVE_EXPR_RTL (exp) != 0)
282 return 0;
283 if (value_member (exp, calls_function_save_exprs))
284 return 0;
285 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
286 calls_function_save_exprs);
287 return (TREE_OPERAND (exp, 0) != 0
288 && calls_function_1 (TREE_OPERAND (exp, 0), which));
290 case BLOCK:
292 register tree local;
293 register tree subblock;
295 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
296 if (DECL_INITIAL (local) != 0
297 && calls_function_1 (DECL_INITIAL (local), which))
298 return 1;
300 for (subblock = BLOCK_SUBBLOCKS (exp);
301 subblock;
302 subblock = TREE_CHAIN (subblock))
303 if (calls_function_1 (subblock, which))
304 return 1;
306 return 0;
308 case TREE_LIST:
309 for (; exp != 0; exp = TREE_CHAIN (exp))
310 if (calls_function_1 (TREE_VALUE (exp), which))
311 return 1;
312 return 0;
314 default:
315 break;
318 /* Only expressions, references, and blocks can contain calls. */
319 if (! IS_EXPR_CODE_CLASS (class) && class != 'r' && class != 'b')
320 return 0;
322 for (i = 0; i < length; i++)
323 if (TREE_OPERAND (exp, i) != 0
324 && calls_function_1 (TREE_OPERAND (exp, i), which))
325 return 1;
327 return 0;
330 /* Force FUNEXP into a form suitable for the address of a CALL,
331 and return that as an rtx. Also load the static chain register
332 if FNDECL is a nested function.
334 CALL_FUSAGE points to a variable holding the prospective
335 CALL_INSN_FUNCTION_USAGE information. */
338 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
339 rtx funexp;
340 tree fndecl;
341 rtx *call_fusage;
342 int reg_parm_seen;
344 rtx static_chain_value = 0;
346 funexp = protect_from_queue (funexp, 0);
348 if (fndecl != 0)
349 /* Get possible static chain value for nested function in C. */
350 static_chain_value = lookup_static_chain (fndecl);
352 /* Make a valid memory address and copy constants thru pseudo-regs,
353 but not for a constant address if -fno-function-cse. */
354 if (GET_CODE (funexp) != SYMBOL_REF)
355 /* If we are using registers for parameters, force the
356 function address into a register now. */
357 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
358 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
359 : memory_address (FUNCTION_MODE, funexp));
360 else
362 #ifndef NO_FUNCTION_CSE
363 if (optimize && ! flag_no_function_cse)
364 #ifdef NO_RECURSIVE_FUNCTION_CSE
365 if (fndecl != current_function_decl)
366 #endif
367 funexp = force_reg (Pmode, funexp);
368 #endif
371 if (static_chain_value != 0)
373 emit_move_insn (static_chain_rtx, static_chain_value);
375 if (GET_CODE (static_chain_rtx) == REG)
376 use_reg (call_fusage, static_chain_rtx);
379 return funexp;
382 /* Generate instructions to call function FUNEXP,
383 and optionally pop the results.
384 The CALL_INSN is the first insn generated.
386 FNDECL is the declaration node of the function. This is given to the
387 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
389 FUNTYPE is the data type of the function. This is given to the macro
390 RETURN_POPS_ARGS to determine whether this function pops its own args.
391 We used to allow an identifier for library functions, but that doesn't
392 work when the return type is an aggregate type and the calling convention
393 says that the pointer to this aggregate is to be popped by the callee.
395 STACK_SIZE is the number of bytes of arguments on the stack,
396 ROUNDED_STACK_SIZE is that number rounded up to
397 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
398 both to put into the call insn and to generate explicit popping
399 code if necessary.
401 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
402 It is zero if this call doesn't want a structure value.
404 NEXT_ARG_REG is the rtx that results from executing
405 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
406 just after all the args have had their registers assigned.
407 This could be whatever you like, but normally it is the first
408 arg-register beyond those used for args in this call,
409 or 0 if all the arg-registers are used in this call.
410 It is passed on to `gen_call' so you can put this info in the call insn.
412 VALREG is a hard register in which a value is returned,
413 or 0 if the call does not return a value.
415 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
416 the args to this call were processed.
417 We restore `inhibit_defer_pop' to that value.
419 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
420 denote registers used by the called function. */
422 static void
423 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
424 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
425 call_fusage, ecf_flags)
426 rtx funexp;
427 tree fndecl ATTRIBUTE_UNUSED;
428 tree funtype ATTRIBUTE_UNUSED;
429 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
430 HOST_WIDE_INT rounded_stack_size;
431 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
432 rtx next_arg_reg;
433 rtx valreg;
434 int old_inhibit_defer_pop;
435 rtx call_fusage;
436 int ecf_flags;
438 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
439 rtx call_insn;
440 int already_popped = 0;
441 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
442 #if defined (HAVE_call) && defined (HAVE_call_value)
443 rtx struct_value_size_rtx;
444 struct_value_size_rtx = GEN_INT (struct_value_size);
445 #endif
447 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
448 and we don't want to load it into a register as an optimization,
449 because prepare_call_address already did it if it should be done. */
450 if (GET_CODE (funexp) != SYMBOL_REF)
451 funexp = memory_address (FUNCTION_MODE, funexp);
453 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
454 if ((ecf_flags & ECF_SIBCALL)
455 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
456 && (RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0
457 || stack_size == 0))
459 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (fndecl, funtype, stack_size));
460 rtx pat;
462 /* If this subroutine pops its own args, record that in the call insn
463 if possible, for the sake of frame pointer elimination. */
465 if (valreg)
466 pat = GEN_SIBCALL_VALUE_POP (valreg,
467 gen_rtx_MEM (FUNCTION_MODE, funexp),
468 rounded_stack_size_rtx, next_arg_reg,
469 n_pop);
470 else
471 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
472 rounded_stack_size_rtx, next_arg_reg, n_pop);
474 emit_call_insn (pat);
475 already_popped = 1;
477 else
478 #endif
480 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
481 /* If the target has "call" or "call_value" insns, then prefer them
482 if no arguments are actually popped. If the target does not have
483 "call" or "call_value" insns, then we must use the popping versions
484 even if the call has no arguments to pop. */
485 #if defined (HAVE_call) && defined (HAVE_call_value)
486 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
487 && n_popped > 0)
488 #else
489 if (HAVE_call_pop && HAVE_call_value_pop)
490 #endif
492 rtx n_pop = GEN_INT (n_popped);
493 rtx pat;
495 /* If this subroutine pops its own args, record that in the call insn
496 if possible, for the sake of frame pointer elimination. */
498 if (valreg)
499 pat = GEN_CALL_VALUE_POP (valreg,
500 gen_rtx_MEM (FUNCTION_MODE, funexp),
501 rounded_stack_size_rtx, next_arg_reg, n_pop);
502 else
503 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
504 rounded_stack_size_rtx, next_arg_reg, n_pop);
506 emit_call_insn (pat);
507 already_popped = 1;
509 else
510 #endif
512 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
513 if ((ecf_flags & ECF_SIBCALL)
514 && HAVE_sibcall && HAVE_sibcall_value)
516 if (valreg)
517 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
518 gen_rtx_MEM (FUNCTION_MODE, funexp),
519 rounded_stack_size_rtx,
520 next_arg_reg, NULL_RTX));
521 else
522 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
523 rounded_stack_size_rtx, next_arg_reg,
524 struct_value_size_rtx));
526 else
527 #endif
529 #if defined (HAVE_call) && defined (HAVE_call_value)
530 if (HAVE_call && HAVE_call_value)
532 if (valreg)
533 emit_call_insn (GEN_CALL_VALUE (valreg,
534 gen_rtx_MEM (FUNCTION_MODE, funexp),
535 rounded_stack_size_rtx, next_arg_reg,
536 NULL_RTX));
537 else
538 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
539 rounded_stack_size_rtx, next_arg_reg,
540 struct_value_size_rtx));
542 else
543 #endif
544 abort ();
546 /* Find the CALL insn we just emitted. */
547 for (call_insn = get_last_insn ();
548 call_insn && GET_CODE (call_insn) != CALL_INSN;
549 call_insn = PREV_INSN (call_insn))
552 if (! call_insn)
553 abort ();
555 /* Mark memory as used for "pure" function call. */
556 if (ecf_flags & ECF_PURE)
558 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
559 gen_rtx_USE (VOIDmode,
560 gen_rtx_MEM (BLKmode,
561 gen_rtx_SCRATCH (VOIDmode))), call_fusage);
564 /* Put the register usage information on the CALL. If there is already
565 some usage information, put ours at the end. */
566 if (CALL_INSN_FUNCTION_USAGE (call_insn))
568 rtx link;
570 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
571 link = XEXP (link, 1))
574 XEXP (link, 1) = call_fusage;
576 else
577 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
579 /* If this is a const call, then set the insn's unchanging bit. */
580 if (ecf_flags & (ECF_CONST | ECF_PURE))
581 CONST_CALL_P (call_insn) = 1;
583 /* If this call can't throw, attach a REG_EH_REGION reg note to that
584 effect. */
585 if (ecf_flags & ECF_NOTHROW)
586 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
587 REG_NOTES (call_insn));
589 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
591 /* Restore this now, so that we do defer pops for this call's args
592 if the context of the call as a whole permits. */
593 inhibit_defer_pop = old_inhibit_defer_pop;
595 if (n_popped > 0)
597 if (!already_popped)
598 CALL_INSN_FUNCTION_USAGE (call_insn)
599 = gen_rtx_EXPR_LIST (VOIDmode,
600 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
601 CALL_INSN_FUNCTION_USAGE (call_insn));
602 rounded_stack_size -= n_popped;
603 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
604 stack_pointer_delta -= n_popped;
607 if (!ACCUMULATE_OUTGOING_ARGS)
609 /* If returning from the subroutine does not automatically pop the args,
610 we need an instruction to pop them sooner or later.
611 Perhaps do it now; perhaps just record how much space to pop later.
613 If returning from the subroutine does pop the args, indicate that the
614 stack pointer will be changed. */
616 if (rounded_stack_size != 0)
618 if (flag_defer_pop && inhibit_defer_pop == 0
619 && !(ecf_flags & (ECF_CONST | ECF_PURE)))
620 pending_stack_adjust += rounded_stack_size;
621 else
622 adjust_stack (rounded_stack_size_rtx);
625 /* When we accumulate outgoing args, we must avoid any stack manipulations.
626 Restore the stack pointer to its original value now. Usually
627 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
628 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
629 popping variants of functions exist as well.
631 ??? We may optimize similar to defer_pop above, but it is
632 probably not worthwhile.
634 ??? It will be worthwhile to enable combine_stack_adjustments even for
635 such machines. */
636 else if (n_popped)
637 anti_adjust_stack (GEN_INT (n_popped));
640 /* Determine if the function identified by NAME and FNDECL is one with
641 special properties we wish to know about.
643 For example, if the function might return more than one time (setjmp), then
644 set RETURNS_TWICE to a nonzero value.
646 Similarly set LONGJMP for if the function is in the longjmp family.
648 Set MALLOC for any of the standard memory allocation functions which
649 allocate from the heap.
651 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
652 space from the stack such as alloca. */
654 static int
655 special_function_p (fndecl, flags)
656 tree fndecl;
657 int flags;
659 if (! (flags & ECF_MALLOC)
660 && fndecl && DECL_NAME (fndecl)
661 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
662 /* Exclude functions not at the file scope, or not `extern',
663 since they are not the magic functions we would otherwise
664 think they are. */
665 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
667 char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
668 char *tname = name;
670 /* We assume that alloca will always be called by name. It
671 makes no sense to pass it as a pointer-to-function to
672 anything that does not understand its behavior. */
673 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
674 && name[0] == 'a'
675 && ! strcmp (name, "alloca"))
676 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
677 && name[0] == '_'
678 && ! strcmp (name, "__builtin_alloca"))))
679 flags |= ECF_MAY_BE_ALLOCA;
681 /* Disregard prefix _, __ or __x. */
682 if (name[0] == '_')
684 if (name[1] == '_' && name[2] == 'x')
685 tname += 3;
686 else if (name[1] == '_')
687 tname += 2;
688 else
689 tname += 1;
692 if (tname[0] == 's')
694 if ((tname[1] == 'e'
695 && (! strcmp (tname, "setjmp")
696 || ! strcmp (tname, "setjmp_syscall")))
697 || (tname[1] == 'i'
698 && ! strcmp (tname, "sigsetjmp"))
699 || (tname[1] == 'a'
700 && ! strcmp (tname, "savectx")))
701 flags |= ECF_RETURNS_TWICE;
703 if (tname[1] == 'i'
704 && ! strcmp (tname, "siglongjmp"))
705 flags |= ECF_LONGJMP;
707 else if ((tname[0] == 'q' && tname[1] == 's'
708 && ! strcmp (tname, "qsetjmp"))
709 || (tname[0] == 'v' && tname[1] == 'f'
710 && ! strcmp (tname, "vfork")))
711 flags |= ECF_RETURNS_TWICE;
713 else if (tname[0] == 'l' && tname[1] == 'o'
714 && ! strcmp (tname, "longjmp"))
715 flags |= ECF_LONGJMP;
717 else if ((tname[0] == 'f' && tname[1] == 'o'
718 && ! strcmp (tname, "fork"))
719 /* Linux specific: __clone. check NAME to insist on the
720 leading underscores, to avoid polluting the ISO / POSIX
721 namespace. */
722 || (name[0] == '_' && name[1] == '_'
723 && ! strcmp (tname, "clone"))
724 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
725 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
726 && (tname[5] == '\0'
727 || ((tname[5] == 'p' || tname[5] == 'e')
728 && tname[6] == '\0'))))
729 flags |= ECF_FORK_OR_EXEC;
731 /* Do not add any more malloc-like functions to this list,
732 instead mark them as malloc functions using the malloc attribute.
733 Note, realloc is not suitable for attribute malloc since
734 it may return the same address across multiple calls.
735 C++ operator new is not suitable because it is not required
736 to return a unique pointer; indeed, the standard placement new
737 just returns its argument. */
738 else if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == Pmode
739 && (! strcmp (tname, "malloc")
740 || ! strcmp (tname, "calloc")
741 || ! strcmp (tname, "strdup")))
742 flags |= ECF_MALLOC;
744 return flags;
747 /* Return nonzero when tree represent call to longjmp. */
749 setjmp_call_p (fndecl)
750 tree fndecl;
752 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
755 /* Detect flags (function attributes) from the function type node. */
756 static int
757 flags_from_decl_or_type (exp)
758 tree exp;
760 int flags = 0;
761 /* ??? We can't set IS_MALLOC for function types? */
762 if (DECL_P (exp))
764 /* The function exp may have the `malloc' attribute. */
765 if (DECL_P (exp) && DECL_IS_MALLOC (exp))
766 flags |= ECF_MALLOC;
768 /* The function exp may have the `pure' attribute. */
769 if (DECL_P (exp) && DECL_IS_PURE (exp))
770 flags |= ECF_PURE;
772 if (TREE_NOTHROW (exp))
773 flags |= ECF_NOTHROW;
776 if (TREE_READONLY (exp) && !TREE_THIS_VOLATILE (exp))
777 flags |= ECF_CONST;
779 if (TREE_THIS_VOLATILE (exp))
780 flags |= ECF_NORETURN;
782 return flags;
786 /* Precompute all register parameters as described by ARGS, storing values
787 into fields within the ARGS array.
789 NUM_ACTUALS indicates the total number elements in the ARGS array.
791 Set REG_PARM_SEEN if we encounter a register parameter. */
793 static void
794 precompute_register_parameters (num_actuals, args, reg_parm_seen)
795 int num_actuals;
796 struct arg_data *args;
797 int *reg_parm_seen;
799 int i;
801 *reg_parm_seen = 0;
803 for (i = 0; i < num_actuals; i++)
804 if (args[i].reg != 0 && ! args[i].pass_on_stack)
806 *reg_parm_seen = 1;
808 if (args[i].value == 0)
810 push_temp_slots ();
811 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
812 VOIDmode, 0);
813 preserve_temp_slots (args[i].value);
814 pop_temp_slots ();
816 /* ANSI doesn't require a sequence point here,
817 but PCC has one, so this will avoid some problems. */
818 emit_queue ();
821 /* If we are to promote the function arg to a wider mode,
822 do it now. */
824 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
825 args[i].value
826 = convert_modes (args[i].mode,
827 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
828 args[i].value, args[i].unsignedp);
830 /* If the value is expensive, and we are inside an appropriately
831 short loop, put the value into a pseudo and then put the pseudo
832 into the hard reg.
834 For small register classes, also do this if this call uses
835 register parameters. This is to avoid reload conflicts while
836 loading the parameters registers. */
838 if ((! (GET_CODE (args[i].value) == REG
839 || (GET_CODE (args[i].value) == SUBREG
840 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
841 && args[i].mode != BLKmode
842 && rtx_cost (args[i].value, SET) > 2
843 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
844 || preserve_subexpressions_p ()))
845 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
849 #ifdef REG_PARM_STACK_SPACE
851 /* The argument list is the property of the called routine and it
852 may clobber it. If the fixed area has been used for previous
853 parameters, we must save and restore it. */
855 static rtx
856 save_fixed_argument_area (reg_parm_stack_space, argblock,
857 low_to_save, high_to_save)
858 int reg_parm_stack_space;
859 rtx argblock;
860 int *low_to_save;
861 int *high_to_save;
863 int i;
864 rtx save_area = NULL_RTX;
866 /* Compute the boundary of the that needs to be saved, if any. */
867 #ifdef ARGS_GROW_DOWNWARD
868 for (i = 0; i < reg_parm_stack_space + 1; i++)
869 #else
870 for (i = 0; i < reg_parm_stack_space; i++)
871 #endif
873 if (i >= highest_outgoing_arg_in_use
874 || stack_usage_map[i] == 0)
875 continue;
877 if (*low_to_save == -1)
878 *low_to_save = i;
880 *high_to_save = i;
883 if (*low_to_save >= 0)
885 int num_to_save = *high_to_save - *low_to_save + 1;
886 enum machine_mode save_mode
887 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
888 rtx stack_area;
890 /* If we don't have the required alignment, must do this in BLKmode. */
891 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
892 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
893 save_mode = BLKmode;
895 #ifdef ARGS_GROW_DOWNWARD
896 stack_area
897 = gen_rtx_MEM (save_mode,
898 memory_address (save_mode,
899 plus_constant (argblock,
900 - *high_to_save)));
901 #else
902 stack_area = gen_rtx_MEM (save_mode,
903 memory_address (save_mode,
904 plus_constant (argblock,
905 *low_to_save)));
906 #endif
907 if (save_mode == BLKmode)
909 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
910 /* Cannot use emit_block_move here because it can be done by a
911 library call which in turn gets into this place again and deadly
912 infinite recursion happens. */
913 move_by_pieces (validize_mem (save_area), stack_area, num_to_save,
914 PARM_BOUNDARY);
916 else
918 save_area = gen_reg_rtx (save_mode);
919 emit_move_insn (save_area, stack_area);
922 return save_area;
925 static void
926 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
927 rtx save_area;
928 rtx argblock;
929 int high_to_save;
930 int low_to_save;
932 enum machine_mode save_mode = GET_MODE (save_area);
933 #ifdef ARGS_GROW_DOWNWARD
934 rtx stack_area
935 = gen_rtx_MEM (save_mode,
936 memory_address (save_mode,
937 plus_constant (argblock,
938 - high_to_save)));
939 #else
940 rtx stack_area
941 = gen_rtx_MEM (save_mode,
942 memory_address (save_mode,
943 plus_constant (argblock,
944 low_to_save)));
945 #endif
947 if (save_mode != BLKmode)
948 emit_move_insn (stack_area, save_area);
949 else
950 /* Cannot use emit_block_move here because it can be done by a library
951 call which in turn gets into this place again and deadly infinite
952 recursion happens. */
953 move_by_pieces (stack_area, validize_mem (save_area),
954 high_to_save - low_to_save + 1, PARM_BOUNDARY);
956 #endif
958 /* If any elements in ARGS refer to parameters that are to be passed in
959 registers, but not in memory, and whose alignment does not permit a
960 direct copy into registers. Copy the values into a group of pseudos
961 which we will later copy into the appropriate hard registers.
963 Pseudos for each unaligned argument will be stored into the array
964 args[argnum].aligned_regs. The caller is responsible for deallocating
965 the aligned_regs array if it is nonzero. */
967 static void
968 store_unaligned_arguments_into_pseudos (args, num_actuals)
969 struct arg_data *args;
970 int num_actuals;
972 int i, j;
974 for (i = 0; i < num_actuals; i++)
975 if (args[i].reg != 0 && ! args[i].pass_on_stack
976 && args[i].mode == BLKmode
977 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
978 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
980 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
981 int big_endian_correction = 0;
983 args[i].n_aligned_regs
984 = args[i].partial ? args[i].partial
985 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
987 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
988 * args[i].n_aligned_regs);
990 /* Structures smaller than a word are aligned to the least
991 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
992 this means we must skip the empty high order bytes when
993 calculating the bit offset. */
994 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
995 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
997 for (j = 0; j < args[i].n_aligned_regs; j++)
999 rtx reg = gen_reg_rtx (word_mode);
1000 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1001 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1002 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
1004 args[i].aligned_regs[j] = reg;
1006 /* There is no need to restrict this code to loading items
1007 in TYPE_ALIGN sized hunks. The bitfield instructions can
1008 load up entire word sized registers efficiently.
1010 ??? This may not be needed anymore.
1011 We use to emit a clobber here but that doesn't let later
1012 passes optimize the instructions we emit. By storing 0 into
1013 the register later passes know the first AND to zero out the
1014 bitfield being set in the register is unnecessary. The store
1015 of 0 will be deleted as will at least the first AND. */
1017 emit_move_insn (reg, const0_rtx);
1019 bytes -= bitsize / BITS_PER_UNIT;
1020 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
1021 extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1022 word_mode, word_mode, bitalign,
1023 BITS_PER_WORD),
1024 bitalign, BITS_PER_WORD);
1029 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1030 ACTPARMS.
1032 NUM_ACTUALS is the total number of parameters.
1034 N_NAMED_ARGS is the total number of named arguments.
1036 FNDECL is the tree code for the target of this call (if known)
1038 ARGS_SO_FAR holds state needed by the target to know where to place
1039 the next argument.
1041 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1042 for arguments which are passed in registers.
1044 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1045 and may be modified by this routine.
1047 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1048 flags which may may be modified by this routine. */
1050 static void
1051 initialize_argument_information (num_actuals, args, args_size, n_named_args,
1052 actparms, fndecl, args_so_far,
1053 reg_parm_stack_space, old_stack_level,
1054 old_pending_adj, must_preallocate,
1055 ecf_flags)
1056 int num_actuals ATTRIBUTE_UNUSED;
1057 struct arg_data *args;
1058 struct args_size *args_size;
1059 int n_named_args ATTRIBUTE_UNUSED;
1060 tree actparms;
1061 tree fndecl;
1062 CUMULATIVE_ARGS *args_so_far;
1063 int reg_parm_stack_space;
1064 rtx *old_stack_level;
1065 int *old_pending_adj;
1066 int *must_preallocate;
1067 int *ecf_flags;
1069 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1070 int inc;
1072 /* Count arg position in order args appear. */
1073 int argpos;
1075 struct args_size alignment_pad;
1076 int i;
1077 tree p;
1079 args_size->constant = 0;
1080 args_size->var = 0;
1082 /* In this loop, we consider args in the order they are written.
1083 We fill up ARGS from the front or from the back if necessary
1084 so that in any case the first arg to be pushed ends up at the front. */
1086 if (PUSH_ARGS_REVERSED)
1088 i = num_actuals - 1, inc = -1;
1089 /* In this case, must reverse order of args
1090 so that we compute and push the last arg first. */
1092 else
1094 i = 0, inc = 1;
1097 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1098 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1100 tree type = TREE_TYPE (TREE_VALUE (p));
1101 int unsignedp;
1102 enum machine_mode mode;
1104 args[i].tree_value = TREE_VALUE (p);
1106 /* Replace erroneous argument with constant zero. */
1107 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1108 args[i].tree_value = integer_zero_node, type = integer_type_node;
1110 /* If TYPE is a transparent union, pass things the way we would
1111 pass the first field of the union. We have already verified that
1112 the modes are the same. */
1113 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
1114 type = TREE_TYPE (TYPE_FIELDS (type));
1116 /* Decide where to pass this arg.
1118 args[i].reg is nonzero if all or part is passed in registers.
1120 args[i].partial is nonzero if part but not all is passed in registers,
1121 and the exact value says how many words are passed in registers.
1123 args[i].pass_on_stack is nonzero if the argument must at least be
1124 computed on the stack. It may then be loaded back into registers
1125 if args[i].reg is nonzero.
1127 These decisions are driven by the FUNCTION_... macros and must agree
1128 with those made by function.c. */
1130 /* See if this argument should be passed by invisible reference. */
1131 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1132 && contains_placeholder_p (TYPE_SIZE (type)))
1133 || TREE_ADDRESSABLE (type)
1134 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1135 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
1136 type, argpos < n_named_args)
1137 #endif
1140 /* If we're compiling a thunk, pass through invisible
1141 references instead of making a copy. */
1142 if (current_function_is_thunk
1143 #ifdef FUNCTION_ARG_CALLEE_COPIES
1144 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
1145 type, argpos < n_named_args)
1146 /* If it's in a register, we must make a copy of it too. */
1147 /* ??? Is this a sufficient test? Is there a better one? */
1148 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1149 && REG_P (DECL_RTL (args[i].tree_value)))
1150 && ! TREE_ADDRESSABLE (type))
1151 #endif
1154 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1155 new object from the argument. If we are passing by
1156 invisible reference, the callee will do that for us, so we
1157 can strip off the TARGET_EXPR. This is not always safe,
1158 but it is safe in the only case where this is a useful
1159 optimization; namely, when the argument is a plain object.
1160 In that case, the frontend is just asking the backend to
1161 make a bitwise copy of the argument. */
1163 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1164 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
1165 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1166 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1168 args[i].tree_value = build1 (ADDR_EXPR,
1169 build_pointer_type (type),
1170 args[i].tree_value);
1171 type = build_pointer_type (type);
1173 else
1175 /* We make a copy of the object and pass the address to the
1176 function being called. */
1177 rtx copy;
1179 if (!COMPLETE_TYPE_P (type)
1180 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1181 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1182 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1183 STACK_CHECK_MAX_VAR_SIZE))))
1185 /* This is a variable-sized object. Make space on the stack
1186 for it. */
1187 rtx size_rtx = expr_size (TREE_VALUE (p));
1189 if (*old_stack_level == 0)
1191 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1192 *old_pending_adj = pending_stack_adjust;
1193 pending_stack_adjust = 0;
1196 copy = gen_rtx_MEM (BLKmode,
1197 allocate_dynamic_stack_space
1198 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1199 set_mem_attributes (copy, type, 1);
1201 else
1202 copy = assign_temp (type, 0, 1, 0);
1204 store_expr (args[i].tree_value, copy, 0);
1205 *ecf_flags &= ~(ECF_CONST | ECF_PURE);
1207 args[i].tree_value = build1 (ADDR_EXPR,
1208 build_pointer_type (type),
1209 make_tree (type, copy));
1210 type = build_pointer_type (type);
1214 mode = TYPE_MODE (type);
1215 unsignedp = TREE_UNSIGNED (type);
1217 #ifdef PROMOTE_FUNCTION_ARGS
1218 mode = promote_mode (type, mode, &unsignedp, 1);
1219 #endif
1221 args[i].unsignedp = unsignedp;
1222 args[i].mode = mode;
1224 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1225 argpos < n_named_args);
1226 #ifdef FUNCTION_INCOMING_ARG
1227 /* If this is a sibling call and the machine has register windows, the
1228 register window has to be unwinded before calling the routine, so
1229 arguments have to go into the incoming registers. */
1230 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1231 argpos < n_named_args);
1232 #else
1233 args[i].tail_call_reg = args[i].reg;
1234 #endif
1236 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1237 if (args[i].reg)
1238 args[i].partial
1239 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1240 argpos < n_named_args);
1241 #endif
1243 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1245 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1246 it means that we are to pass this arg in the register(s) designated
1247 by the PARALLEL, but also to pass it in the stack. */
1248 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1249 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1250 args[i].pass_on_stack = 1;
1252 /* If this is an addressable type, we must preallocate the stack
1253 since we must evaluate the object into its final location.
1255 If this is to be passed in both registers and the stack, it is simpler
1256 to preallocate. */
1257 if (TREE_ADDRESSABLE (type)
1258 || (args[i].pass_on_stack && args[i].reg != 0))
1259 *must_preallocate = 1;
1261 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1262 we cannot consider this function call constant. */
1263 if (TREE_ADDRESSABLE (type))
1264 *ecf_flags &= ~(ECF_CONST | ECF_PURE);
1266 /* Compute the stack-size of this argument. */
1267 if (args[i].reg == 0 || args[i].partial != 0
1268 || reg_parm_stack_space > 0
1269 || args[i].pass_on_stack)
1270 locate_and_pad_parm (mode, type,
1271 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1273 #else
1274 args[i].reg != 0,
1275 #endif
1276 fndecl, args_size, &args[i].offset,
1277 &args[i].size, &alignment_pad);
1279 #ifndef ARGS_GROW_DOWNWARD
1280 args[i].slot_offset = *args_size;
1281 #endif
1283 args[i].alignment_pad = alignment_pad;
1285 /* If a part of the arg was put into registers,
1286 don't include that part in the amount pushed. */
1287 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1288 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1289 / (PARM_BOUNDARY / BITS_PER_UNIT)
1290 * (PARM_BOUNDARY / BITS_PER_UNIT));
1292 /* Update ARGS_SIZE, the total stack space for args so far. */
1294 args_size->constant += args[i].size.constant;
1295 if (args[i].size.var)
1297 ADD_PARM_SIZE (*args_size, args[i].size.var);
1300 /* Since the slot offset points to the bottom of the slot,
1301 we must record it after incrementing if the args grow down. */
1302 #ifdef ARGS_GROW_DOWNWARD
1303 args[i].slot_offset = *args_size;
1305 args[i].slot_offset.constant = -args_size->constant;
1306 if (args_size->var)
1307 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1308 #endif
1310 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1311 have been used, etc. */
1313 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1314 argpos < n_named_args);
1318 /* Update ARGS_SIZE to contain the total size for the argument block.
1319 Return the original constant component of the argument block's size.
1321 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1322 for arguments passed in registers. */
1324 static int
1325 compute_argument_block_size (reg_parm_stack_space, args_size,
1326 preferred_stack_boundary)
1327 int reg_parm_stack_space;
1328 struct args_size *args_size;
1329 int preferred_stack_boundary ATTRIBUTE_UNUSED;
1331 int unadjusted_args_size = args_size->constant;
1333 /* For accumulate outgoing args mode we don't need to align, since the frame
1334 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1335 backends from generating missaligned frame sizes. */
1336 #ifdef STACK_BOUNDARY
1337 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1338 preferred_stack_boundary = STACK_BOUNDARY;
1339 #endif
1341 /* Compute the actual size of the argument block required. The variable
1342 and constant sizes must be combined, the size may have to be rounded,
1343 and there may be a minimum required size. */
1345 if (args_size->var)
1347 args_size->var = ARGS_SIZE_TREE (*args_size);
1348 args_size->constant = 0;
1350 #ifdef PREFERRED_STACK_BOUNDARY
1351 preferred_stack_boundary /= BITS_PER_UNIT;
1352 if (preferred_stack_boundary > 1)
1354 /* We don't handle this case yet. To handle it correctly we have
1355 to add the delta, round and substract the delta.
1356 Currently no machine description requires this support. */
1357 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1358 abort();
1359 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1361 #endif
1363 if (reg_parm_stack_space > 0)
1365 args_size->var
1366 = size_binop (MAX_EXPR, args_size->var,
1367 ssize_int (reg_parm_stack_space));
1369 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1370 /* The area corresponding to register parameters is not to count in
1371 the size of the block we need. So make the adjustment. */
1372 args_size->var
1373 = size_binop (MINUS_EXPR, args_size->var,
1374 ssize_int (reg_parm_stack_space));
1375 #endif
1378 else
1380 #ifdef PREFERRED_STACK_BOUNDARY
1381 preferred_stack_boundary /= BITS_PER_UNIT;
1382 if (preferred_stack_boundary < 1)
1383 preferred_stack_boundary = 1;
1384 args_size->constant = (((args_size->constant
1385 + stack_pointer_delta
1386 + preferred_stack_boundary - 1)
1387 / preferred_stack_boundary
1388 * preferred_stack_boundary)
1389 - stack_pointer_delta);
1390 #endif
1392 args_size->constant = MAX (args_size->constant,
1393 reg_parm_stack_space);
1395 #ifdef MAYBE_REG_PARM_STACK_SPACE
1396 if (reg_parm_stack_space == 0)
1397 args_size->constant = 0;
1398 #endif
1400 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1401 args_size->constant -= reg_parm_stack_space;
1402 #endif
1404 return unadjusted_args_size;
1407 /* Precompute parameters as needed for a function call.
1409 FLAGS is mask of ECF_* constants.
1411 NUM_ACTUALS is the number of arguments.
1413 ARGS is an array containing information for each argument; this routine
1414 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1417 static void
1418 precompute_arguments (flags, num_actuals, args)
1419 int flags;
1420 int num_actuals;
1421 struct arg_data *args;
1423 int i;
1425 /* If this function call is cse'able, precompute all the parameters.
1426 Note that if the parameter is constructed into a temporary, this will
1427 cause an additional copy because the parameter will be constructed
1428 into a temporary location and then copied into the outgoing arguments.
1429 If a parameter contains a call to alloca and this function uses the
1430 stack, precompute the parameter. */
1432 /* If we preallocated the stack space, and some arguments must be passed
1433 on the stack, then we must precompute any parameter which contains a
1434 function call which will store arguments on the stack.
1435 Otherwise, evaluating the parameter may clobber previous parameters
1436 which have already been stored into the stack. (we have code to avoid
1437 such case by saving the ougoing stack arguments, but it results in
1438 worse code) */
1440 for (i = 0; i < num_actuals; i++)
1441 if ((flags & (ECF_CONST | ECF_PURE))
1442 || calls_function (args[i].tree_value, !ACCUMULATE_OUTGOING_ARGS))
1444 /* If this is an addressable type, we cannot pre-evaluate it. */
1445 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1446 abort ();
1448 push_temp_slots ();
1450 args[i].value
1451 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1453 preserve_temp_slots (args[i].value);
1454 pop_temp_slots ();
1456 /* ANSI doesn't require a sequence point here,
1457 but PCC has one, so this will avoid some problems. */
1458 emit_queue ();
1460 args[i].initial_value = args[i].value
1461 = protect_from_queue (args[i].value, 0);
1463 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1465 args[i].value
1466 = convert_modes (args[i].mode,
1467 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1468 args[i].value, args[i].unsignedp);
1469 #ifdef PROMOTE_FOR_CALL_ONLY
1470 /* CSE will replace this only if it contains args[i].value
1471 pseudo, so convert it down to the declared mode using
1472 a SUBREG. */
1473 if (GET_CODE (args[i].value) == REG
1474 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1476 args[i].initial_value
1477 = gen_rtx_SUBREG (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1478 args[i].value, 0);
1479 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1480 SUBREG_PROMOTED_UNSIGNED_P (args[i].initial_value)
1481 = args[i].unsignedp;
1483 #endif
1488 /* Given the current state of MUST_PREALLOCATE and information about
1489 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1490 compute and return the final value for MUST_PREALLOCATE. */
1492 static int
1493 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1494 int must_preallocate;
1495 int num_actuals;
1496 struct arg_data *args;
1497 struct args_size *args_size;
1499 /* See if we have or want to preallocate stack space.
1501 If we would have to push a partially-in-regs parm
1502 before other stack parms, preallocate stack space instead.
1504 If the size of some parm is not a multiple of the required stack
1505 alignment, we must preallocate.
1507 If the total size of arguments that would otherwise create a copy in
1508 a temporary (such as a CALL) is more than half the total argument list
1509 size, preallocation is faster.
1511 Another reason to preallocate is if we have a machine (like the m88k)
1512 where stack alignment is required to be maintained between every
1513 pair of insns, not just when the call is made. However, we assume here
1514 that such machines either do not have push insns (and hence preallocation
1515 would occur anyway) or the problem is taken care of with
1516 PUSH_ROUNDING. */
1518 if (! must_preallocate)
1520 int partial_seen = 0;
1521 int copy_to_evaluate_size = 0;
1522 int i;
1524 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1526 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1527 partial_seen = 1;
1528 else if (partial_seen && args[i].reg == 0)
1529 must_preallocate = 1;
1531 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1532 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1533 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1534 || TREE_CODE (args[i].tree_value) == COND_EXPR
1535 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1536 copy_to_evaluate_size
1537 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1540 if (copy_to_evaluate_size * 2 >= args_size->constant
1541 && args_size->constant > 0)
1542 must_preallocate = 1;
1544 return must_preallocate;
1547 /* If we preallocated stack space, compute the address of each argument
1548 and store it into the ARGS array.
1550 We need not ensure it is a valid memory address here; it will be
1551 validized when it is used.
1553 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1555 static void
1556 compute_argument_addresses (args, argblock, num_actuals)
1557 struct arg_data *args;
1558 rtx argblock;
1559 int num_actuals;
1561 if (argblock)
1563 rtx arg_reg = argblock;
1564 int i, arg_offset = 0;
1566 if (GET_CODE (argblock) == PLUS)
1567 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1569 for (i = 0; i < num_actuals; i++)
1571 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1572 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1573 rtx addr;
1575 /* Skip this parm if it will not be passed on the stack. */
1576 if (! args[i].pass_on_stack && args[i].reg != 0)
1577 continue;
1579 if (GET_CODE (offset) == CONST_INT)
1580 addr = plus_constant (arg_reg, INTVAL (offset));
1581 else
1582 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1584 addr = plus_constant (addr, arg_offset);
1585 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1586 set_mem_attributes (args[i].stack,
1587 TREE_TYPE (args[i].tree_value), 1);
1589 if (GET_CODE (slot_offset) == CONST_INT)
1590 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1591 else
1592 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1594 addr = plus_constant (addr, arg_offset);
1595 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1596 set_mem_attributes (args[i].stack_slot,
1597 TREE_TYPE (args[i].tree_value), 1);
1599 /* Function incoming arguments may overlap with sibling call
1600 outgoing arguments and we cannot allow reordering of reads
1601 from function arguments with stores to outgoing arguments
1602 of sibling calls. */
1603 MEM_ALIAS_SET (args[i].stack) = 0;
1604 MEM_ALIAS_SET (args[i].stack_slot) = 0;
1609 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1610 in a call instruction.
1612 FNDECL is the tree node for the target function. For an indirect call
1613 FNDECL will be NULL_TREE.
1615 EXP is the CALL_EXPR for this call. */
1617 static rtx
1618 rtx_for_function_call (fndecl, exp)
1619 tree fndecl;
1620 tree exp;
1622 rtx funexp;
1624 /* Get the function to call, in the form of RTL. */
1625 if (fndecl)
1627 /* If this is the first use of the function, see if we need to
1628 make an external definition for it. */
1629 if (! TREE_USED (fndecl))
1631 assemble_external (fndecl);
1632 TREE_USED (fndecl) = 1;
1635 /* Get a SYMBOL_REF rtx for the function address. */
1636 funexp = XEXP (DECL_RTL (fndecl), 0);
1638 else
1639 /* Generate an rtx (probably a pseudo-register) for the address. */
1641 rtx funaddr;
1642 push_temp_slots ();
1643 funaddr = funexp =
1644 expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1645 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1647 /* Check the function is executable. */
1648 if (current_function_check_memory_usage)
1650 #ifdef POINTERS_EXTEND_UNSIGNED
1651 /* It might be OK to convert funexp in place, but there's
1652 a lot going on between here and when it happens naturally
1653 that this seems safer. */
1654 funaddr = convert_memory_address (Pmode, funexp);
1655 #endif
1656 emit_library_call (chkr_check_exec_libfunc, 1,
1657 VOIDmode, 1,
1658 funaddr, Pmode);
1660 emit_queue ();
1662 return funexp;
1665 /* Do the register loads required for any wholly-register parms or any
1666 parms which are passed both on the stack and in a register. Their
1667 expressions were already evaluated.
1669 Mark all register-parms as living through the call, putting these USE
1670 insns in the CALL_INSN_FUNCTION_USAGE field. */
1672 static void
1673 load_register_parameters (args, num_actuals, call_fusage, flags)
1674 struct arg_data *args;
1675 int num_actuals;
1676 rtx *call_fusage;
1677 int flags;
1679 int i, j;
1681 #ifdef LOAD_ARGS_REVERSED
1682 for (i = num_actuals - 1; i >= 0; i--)
1683 #else
1684 for (i = 0; i < num_actuals; i++)
1685 #endif
1687 rtx reg = ((flags & ECF_SIBCALL)
1688 ? args[i].tail_call_reg : args[i].reg);
1689 int partial = args[i].partial;
1690 int nregs;
1692 if (reg)
1694 /* Set to non-negative if must move a word at a time, even if just
1695 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1696 we just use a normal move insn. This value can be zero if the
1697 argument is a zero size structure with no fields. */
1698 nregs = (partial ? partial
1699 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1700 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1701 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1702 : -1));
1704 /* Handle calls that pass values in multiple non-contiguous
1705 locations. The Irix 6 ABI has examples of this. */
1707 if (GET_CODE (reg) == PARALLEL)
1708 emit_group_load (reg, args[i].value,
1709 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1710 TYPE_ALIGN (TREE_TYPE (args[i].tree_value)));
1712 /* If simple case, just do move. If normal partial, store_one_arg
1713 has already loaded the register for us. In all other cases,
1714 load the register(s) from memory. */
1716 else if (nregs == -1)
1717 emit_move_insn (reg, args[i].value);
1719 /* If we have pre-computed the values to put in the registers in
1720 the case of non-aligned structures, copy them in now. */
1722 else if (args[i].n_aligned_regs != 0)
1723 for (j = 0; j < args[i].n_aligned_regs; j++)
1724 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1725 args[i].aligned_regs[j]);
1727 else if (partial == 0 || args[i].pass_on_stack)
1728 move_block_to_reg (REGNO (reg),
1729 validize_mem (args[i].value), nregs,
1730 args[i].mode);
1732 /* Handle calls that pass values in multiple non-contiguous
1733 locations. The Irix 6 ABI has examples of this. */
1734 if (GET_CODE (reg) == PARALLEL)
1735 use_group_regs (call_fusage, reg);
1736 else if (nregs == -1)
1737 use_reg (call_fusage, reg);
1738 else
1739 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1744 /* Try to integrate function. See expand_inline_function for documentation
1745 about the parameters. */
1747 static rtx
1748 try_to_integrate (fndecl, actparms, target, ignore, type, structure_value_addr)
1749 tree fndecl;
1750 tree actparms;
1751 rtx target;
1752 int ignore;
1753 tree type;
1754 rtx structure_value_addr;
1756 rtx temp;
1757 rtx before_call;
1758 int i;
1759 rtx old_stack_level = 0;
1760 int reg_parm_stack_space = 0;
1762 #ifdef REG_PARM_STACK_SPACE
1763 #ifdef MAYBE_REG_PARM_STACK_SPACE
1764 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1765 #else
1766 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1767 #endif
1768 #endif
1770 before_call = get_last_insn ();
1772 timevar_push (TV_INTEGRATION);
1774 temp = expand_inline_function (fndecl, actparms, target,
1775 ignore, type,
1776 structure_value_addr);
1778 timevar_pop (TV_INTEGRATION);
1780 /* If inlining succeeded, return. */
1781 if (temp != (rtx) (HOST_WIDE_INT) - 1)
1783 if (ACCUMULATE_OUTGOING_ARGS)
1785 /* If the outgoing argument list must be preserved, push
1786 the stack before executing the inlined function if it
1787 makes any calls. */
1789 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1790 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1791 break;
1793 if (stack_arg_under_construction || i >= 0)
1795 rtx first_insn
1796 = before_call ? NEXT_INSN (before_call) : get_insns ();
1797 rtx insn = NULL_RTX, seq;
1799 /* Look for a call in the inline function code.
1800 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1801 nonzero then there is a call and it is not necessary
1802 to scan the insns. */
1804 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1805 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1806 if (GET_CODE (insn) == CALL_INSN)
1807 break;
1809 if (insn)
1811 /* Reserve enough stack space so that the largest
1812 argument list of any function call in the inline
1813 function does not overlap the argument list being
1814 evaluated. This is usually an overestimate because
1815 allocate_dynamic_stack_space reserves space for an
1816 outgoing argument list in addition to the requested
1817 space, but there is no way to ask for stack space such
1818 that an argument list of a certain length can be
1819 safely constructed.
1821 Add the stack space reserved for register arguments, if
1822 any, in the inline function. What is really needed is the
1823 largest value of reg_parm_stack_space in the inline
1824 function, but that is not available. Using the current
1825 value of reg_parm_stack_space is wrong, but gives
1826 correct results on all supported machines. */
1828 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1829 + reg_parm_stack_space);
1831 start_sequence ();
1832 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1833 allocate_dynamic_stack_space (GEN_INT (adjust),
1834 NULL_RTX, BITS_PER_UNIT);
1835 seq = get_insns ();
1836 end_sequence ();
1837 emit_insns_before (seq, first_insn);
1838 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1843 /* If the result is equivalent to TARGET, return TARGET to simplify
1844 checks in store_expr. They can be equivalent but not equal in the
1845 case of a function that returns BLKmode. */
1846 if (temp != target && rtx_equal_p (temp, target))
1847 return target;
1848 return temp;
1851 /* If inlining failed, mark FNDECL as needing to be compiled
1852 separately after all. If function was declared inline,
1853 give a warning. */
1854 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1855 && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
1857 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1858 warning ("called from here");
1860 mark_addressable (fndecl);
1861 return (rtx) (HOST_WIDE_INT) - 1;
1864 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1865 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1866 bytes, then we would need to push some additional bytes to pad the
1867 arguments. So, we compute an adjust to the stack pointer for an
1868 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1869 bytes. Then, when the arguments are pushed the stack will be perfectly
1870 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1871 be popped after the call. Returns the adjustment. */
1873 static int
1874 combine_pending_stack_adjustment_and_call (unadjusted_args_size,
1875 args_size,
1876 preferred_unit_stack_boundary)
1877 int unadjusted_args_size;
1878 struct args_size *args_size;
1879 int preferred_unit_stack_boundary;
1881 /* The number of bytes to pop so that the stack will be
1882 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1883 HOST_WIDE_INT adjustment;
1884 /* The alignment of the stack after the arguments are pushed, if we
1885 just pushed the arguments without adjust the stack here. */
1886 HOST_WIDE_INT unadjusted_alignment;
1888 unadjusted_alignment
1889 = ((stack_pointer_delta + unadjusted_args_size)
1890 % preferred_unit_stack_boundary);
1892 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1893 as possible -- leaving just enough left to cancel out the
1894 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1895 PENDING_STACK_ADJUST is non-negative, and congruent to
1896 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1898 /* Begin by trying to pop all the bytes. */
1899 unadjusted_alignment
1900 = (unadjusted_alignment
1901 - (pending_stack_adjust % preferred_unit_stack_boundary));
1902 adjustment = pending_stack_adjust;
1903 /* Push enough additional bytes that the stack will be aligned
1904 after the arguments are pushed. */
1905 if (unadjusted_alignment >= 0)
1906 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1907 else
1908 adjustment += unadjusted_alignment;
1910 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1911 bytes after the call. The right number is the entire
1912 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1913 by the arguments in the first place. */
1914 args_size->constant
1915 = pending_stack_adjust - adjustment + unadjusted_args_size;
1917 return adjustment;
1920 /* Generate all the code for a function call
1921 and return an rtx for its value.
1922 Store the value in TARGET (specified as an rtx) if convenient.
1923 If the value is stored in TARGET then TARGET is returned.
1924 If IGNORE is nonzero, then we ignore the value of the function call. */
1927 expand_call (exp, target, ignore)
1928 tree exp;
1929 rtx target;
1930 int ignore;
1932 /* Nonzero if we are currently expanding a call. */
1933 static int currently_expanding_call = 0;
1935 /* List of actual parameters. */
1936 tree actparms = TREE_OPERAND (exp, 1);
1937 /* RTX for the function to be called. */
1938 rtx funexp;
1939 /* Sequence of insns to perform a tail recursive "call". */
1940 rtx tail_recursion_insns = NULL_RTX;
1941 /* Sequence of insns to perform a normal "call". */
1942 rtx normal_call_insns = NULL_RTX;
1943 /* Sequence of insns to perform a tail recursive "call". */
1944 rtx tail_call_insns = NULL_RTX;
1945 /* Data type of the function. */
1946 tree funtype;
1947 /* Declaration of the function being called,
1948 or 0 if the function is computed (not known by name). */
1949 tree fndecl = 0;
1950 char *name = 0;
1951 rtx insn;
1952 int try_tail_call = 1;
1953 int try_tail_recursion = 1;
1954 int pass;
1956 /* Register in which non-BLKmode value will be returned,
1957 or 0 if no value or if value is BLKmode. */
1958 rtx valreg;
1959 /* Address where we should return a BLKmode value;
1960 0 if value not BLKmode. */
1961 rtx structure_value_addr = 0;
1962 /* Nonzero if that address is being passed by treating it as
1963 an extra, implicit first parameter. Otherwise,
1964 it is passed by being copied directly into struct_value_rtx. */
1965 int structure_value_addr_parm = 0;
1966 /* Size of aggregate value wanted, or zero if none wanted
1967 or if we are using the non-reentrant PCC calling convention
1968 or expecting the value in registers. */
1969 HOST_WIDE_INT struct_value_size = 0;
1970 /* Nonzero if called function returns an aggregate in memory PCC style,
1971 by returning the address of where to find it. */
1972 int pcc_struct_value = 0;
1974 /* Number of actual parameters in this call, including struct value addr. */
1975 int num_actuals;
1976 /* Number of named args. Args after this are anonymous ones
1977 and they must all go on the stack. */
1978 int n_named_args;
1980 /* Vector of information about each argument.
1981 Arguments are numbered in the order they will be pushed,
1982 not the order they are written. */
1983 struct arg_data *args;
1985 /* Total size in bytes of all the stack-parms scanned so far. */
1986 struct args_size args_size;
1987 struct args_size adjusted_args_size;
1988 /* Size of arguments before any adjustments (such as rounding). */
1989 int unadjusted_args_size;
1990 /* Data on reg parms scanned so far. */
1991 CUMULATIVE_ARGS args_so_far;
1992 /* Nonzero if a reg parm has been scanned. */
1993 int reg_parm_seen;
1994 /* Nonzero if this is an indirect function call. */
1996 /* Nonzero if we must avoid push-insns in the args for this call.
1997 If stack space is allocated for register parameters, but not by the
1998 caller, then it is preallocated in the fixed part of the stack frame.
1999 So the entire argument block must then be preallocated (i.e., we
2000 ignore PUSH_ROUNDING in that case). */
2002 int must_preallocate = !PUSH_ARGS;
2004 /* Size of the stack reserved for parameter registers. */
2005 int reg_parm_stack_space = 0;
2007 /* Address of space preallocated for stack parms
2008 (on machines that lack push insns), or 0 if space not preallocated. */
2009 rtx argblock = 0;
2011 /* Mask of ECF_ flags. */
2012 int flags = 0;
2013 /* Nonzero if this is a call to an inline function. */
2014 int is_integrable = 0;
2015 #ifdef REG_PARM_STACK_SPACE
2016 /* Define the boundary of the register parm stack space that needs to be
2017 save, if any. */
2018 int low_to_save = -1, high_to_save;
2019 rtx save_area = 0; /* Place that it is saved */
2020 #endif
2022 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2023 char *initial_stack_usage_map = stack_usage_map;
2024 int old_stack_arg_under_construction = 0;
2026 rtx old_stack_level = 0;
2027 int old_pending_adj = 0;
2028 int old_inhibit_defer_pop = inhibit_defer_pop;
2029 int old_stack_allocated;
2030 rtx call_fusage;
2031 register tree p;
2032 register int i;
2033 /* The alignment of the stack, in bits. */
2034 HOST_WIDE_INT preferred_stack_boundary;
2035 /* The alignment of the stack, in bytes. */
2036 HOST_WIDE_INT preferred_unit_stack_boundary;
2038 /* The value of the function call can be put in a hard register. But
2039 if -fcheck-memory-usage, code which invokes functions (and thus
2040 damages some hard registers) can be inserted before using the value.
2041 So, target is always a pseudo-register in that case. */
2042 if (current_function_check_memory_usage)
2043 target = 0;
2045 /* See if this is "nothrow" function call. */
2046 if (TREE_NOTHROW (exp))
2047 flags |= ECF_NOTHROW;
2049 /* See if we can find a DECL-node for the actual function.
2050 As a result, decide whether this is a call to an integrable function. */
2052 fndecl = get_callee_fndecl (exp);
2053 if (fndecl)
2055 if (!flag_no_inline
2056 && fndecl != current_function_decl
2057 && DECL_INLINE (fndecl)
2058 && DECL_SAVED_INSNS (fndecl)
2059 && DECL_SAVED_INSNS (fndecl)->inlinable)
2060 is_integrable = 1;
2061 else if (! TREE_ADDRESSABLE (fndecl))
2063 /* In case this function later becomes inlinable,
2064 record that there was already a non-inline call to it.
2066 Use abstraction instead of setting TREE_ADDRESSABLE
2067 directly. */
2068 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
2069 && optimize > 0)
2071 warning_with_decl (fndecl, "can't inline call to `%s'");
2072 warning ("called from here");
2074 mark_addressable (fndecl);
2077 flags |= flags_from_decl_or_type (fndecl);
2080 /* If we don't have specific function to call, see if we have a
2081 attributes set in the type. */
2082 else
2084 p = TREE_OPERAND (exp, 0);
2085 flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
2088 #ifdef REG_PARM_STACK_SPACE
2089 #ifdef MAYBE_REG_PARM_STACK_SPACE
2090 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2091 #else
2092 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2093 #endif
2094 #endif
2096 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2097 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2098 must_preallocate = 1;
2099 #endif
2101 /* Warn if this value is an aggregate type,
2102 regardless of which calling convention we are using for it. */
2103 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2104 warning ("function call has aggregate value");
2106 /* Set up a place to return a structure. */
2108 /* Cater to broken compilers. */
2109 if (aggregate_value_p (exp))
2111 /* This call returns a big structure. */
2112 flags &= ~(ECF_CONST | ECF_PURE);
2114 #ifdef PCC_STATIC_STRUCT_RETURN
2116 pcc_struct_value = 1;
2117 /* Easier than making that case work right. */
2118 if (is_integrable)
2120 /* In case this is a static function, note that it has been
2121 used. */
2122 if (! TREE_ADDRESSABLE (fndecl))
2123 mark_addressable (fndecl);
2124 is_integrable = 0;
2127 #else /* not PCC_STATIC_STRUCT_RETURN */
2129 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2131 if (target && GET_CODE (target) == MEM)
2132 structure_value_addr = XEXP (target, 0);
2133 else
2135 /* Assign a temporary to hold the value. */
2136 tree d;
2138 /* For variable-sized objects, we must be called with a target
2139 specified. If we were to allocate space on the stack here,
2140 we would have no way of knowing when to free it. */
2142 if (struct_value_size < 0)
2143 abort ();
2145 /* This DECL is just something to feed to mark_addressable;
2146 it doesn't get pushed. */
2147 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
2148 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
2149 mark_addressable (d);
2150 mark_temp_addr_taken (DECL_RTL (d));
2151 structure_value_addr = XEXP (DECL_RTL (d), 0);
2152 TREE_USED (d) = 1;
2153 target = 0;
2156 #endif /* not PCC_STATIC_STRUCT_RETURN */
2159 /* If called function is inline, try to integrate it. */
2161 if (is_integrable)
2163 rtx temp = try_to_integrate (fndecl, actparms, target,
2164 ignore, TREE_TYPE (exp),
2165 structure_value_addr);
2166 if (temp != (rtx) (HOST_WIDE_INT) - 1)
2167 return temp;
2170 if (fndecl && DECL_NAME (fndecl))
2171 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
2173 /* Figure out the amount to which the stack should be aligned. */
2174 #ifdef PREFERRED_STACK_BOUNDARY
2175 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2176 #else
2177 preferred_stack_boundary = STACK_BOUNDARY;
2178 #endif
2180 /* Operand 0 is a pointer-to-function; get the type of the function. */
2181 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
2182 if (! POINTER_TYPE_P (funtype))
2183 abort ();
2184 funtype = TREE_TYPE (funtype);
2186 /* See if this is a call to a function that can return more than once
2187 or a call to longjmp or malloc. */
2188 flags |= special_function_p (fndecl, flags);
2190 if (flags & ECF_MAY_BE_ALLOCA)
2191 current_function_calls_alloca = 1;
2193 /* If struct_value_rtx is 0, it means pass the address
2194 as if it were an extra parameter. */
2195 if (structure_value_addr && struct_value_rtx == 0)
2197 /* If structure_value_addr is a REG other than
2198 virtual_outgoing_args_rtx, we can use always use it. If it
2199 is not a REG, we must always copy it into a register.
2200 If it is virtual_outgoing_args_rtx, we must copy it to another
2201 register in some cases. */
2202 rtx temp = (GET_CODE (structure_value_addr) != REG
2203 || (ACCUMULATE_OUTGOING_ARGS
2204 && stack_arg_under_construction
2205 && structure_value_addr == virtual_outgoing_args_rtx)
2206 ? copy_addr_to_reg (structure_value_addr)
2207 : structure_value_addr);
2209 actparms
2210 = tree_cons (error_mark_node,
2211 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2212 temp),
2213 actparms);
2214 structure_value_addr_parm = 1;
2217 /* Count the arguments and set NUM_ACTUALS. */
2218 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2219 num_actuals++;
2221 /* Compute number of named args.
2222 Normally, don't include the last named arg if anonymous args follow.
2223 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2224 (If no anonymous args follow, the result of list_length is actually
2225 one too large. This is harmless.)
2227 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2228 zero, this machine will be able to place unnamed args that were
2229 passed in registers into the stack. So treat all args as named.
2230 This allows the insns emitting for a specific argument list to be
2231 independent of the function declaration.
2233 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2234 reliable way to pass unnamed args in registers, so we must force
2235 them into memory. */
2237 if ((STRICT_ARGUMENT_NAMING
2238 || ! PRETEND_OUTGOING_VARARGS_NAMED)
2239 && TYPE_ARG_TYPES (funtype) != 0)
2240 n_named_args
2241 = (list_length (TYPE_ARG_TYPES (funtype))
2242 /* Don't include the last named arg. */
2243 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
2244 /* Count the struct value address, if it is passed as a parm. */
2245 + structure_value_addr_parm);
2246 else
2247 /* If we know nothing, treat all args as named. */
2248 n_named_args = num_actuals;
2250 /* Start updating where the next arg would go.
2252 On some machines (such as the PA) indirect calls have a different
2253 calling convention than normal calls. The last argument in
2254 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2255 or not. */
2256 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
2259 /* Make a vector to hold all the information about each arg. */
2260 args = (struct arg_data *) alloca (num_actuals
2261 * sizeof (struct arg_data));
2262 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
2264 /* Build up entries inthe ARGS array, compute the size of the arguments
2265 into ARGS_SIZE, etc. */
2266 initialize_argument_information (num_actuals, args, &args_size,
2267 n_named_args, actparms, fndecl,
2268 &args_so_far, reg_parm_stack_space,
2269 &old_stack_level, &old_pending_adj,
2270 &must_preallocate, &flags);
2272 if (args_size.var)
2274 /* If this function requires a variable-sized argument list, don't
2275 try to make a cse'able block for this call. We may be able to
2276 do this eventually, but it is too complicated to keep track of
2277 what insns go in the cse'able block and which don't. */
2279 flags &= ~(ECF_CONST | ECF_PURE);
2280 must_preallocate = 1;
2283 /* Now make final decision about preallocating stack space. */
2284 must_preallocate = finalize_must_preallocate (must_preallocate,
2285 num_actuals, args,
2286 &args_size);
2288 /* If the structure value address will reference the stack pointer, we
2289 must stabilize it. We don't need to do this if we know that we are
2290 not going to adjust the stack pointer in processing this call. */
2292 if (structure_value_addr
2293 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2294 || reg_mentioned_p (virtual_outgoing_args_rtx,
2295 structure_value_addr))
2296 && (args_size.var
2297 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2298 structure_value_addr = copy_to_reg (structure_value_addr);
2300 /* Tail calls can make things harder to debug, and we're traditionally
2301 pushed these optimizations into -O2. Don't try if we're already
2302 expanding a call, as that means we're an argument. Similarly, if
2303 there's pending loops or cleanups we know there's code to follow
2304 the call.
2306 If rtx_equal_function_value_matters is false, that means we've
2307 finished with regular parsing. Which means that some of the
2308 machinery we use to generate tail-calls is no longer in place.
2309 This is most often true of sjlj-exceptions, which we couldn't
2310 tail-call to anyway. */
2312 if (currently_expanding_call++ != 0
2313 || !flag_optimize_sibling_calls
2314 || !rtx_equal_function_value_matters
2315 || !stmt_loop_nest_empty ()
2316 || any_pending_cleanups (1)
2317 || args_size.var)
2318 try_tail_call = try_tail_recursion = 0;
2320 /* Tail recursion fails, when we are not dealing with recursive calls. */
2321 if (!try_tail_recursion
2322 || TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
2323 || TREE_OPERAND (TREE_OPERAND (exp, 0), 0) != current_function_decl)
2324 try_tail_recursion = 0;
2326 /* Rest of purposes for tail call optimizations to fail. */
2327 if (
2328 #ifdef HAVE_sibcall_epilogue
2329 !HAVE_sibcall_epilogue
2330 #else
2332 #endif
2333 || !try_tail_call
2334 /* Doing sibling call optimization needs some work, since
2335 structure_value_addr can be allocated on the stack.
2336 It does not seem worth the effort since few optimizable
2337 sibling calls will return a structure. */
2338 || structure_value_addr != NULL_RTX
2339 /* If the register holding the address is a callee saved
2340 register, then we lose. We have no way to prevent that,
2341 so we only allow calls to named functions. */
2342 /* ??? This could be done by having the insn constraints
2343 use a register class that is all call-clobbered. Any
2344 reload insns generated to fix things up would appear
2345 before the sibcall_epilogue. */
2346 || fndecl == NULL_TREE
2347 || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP))
2348 || !FUNCTION_OK_FOR_SIBCALL (fndecl)
2349 /* If this function requires more stack slots than the current
2350 function, we cannot change it into a sibling call. */
2351 || args_size.constant > current_function_args_size
2352 /* If the callee pops its own arguments, then it must pop exactly
2353 the same number of arguments as the current function. */
2354 || RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2355 != RETURN_POPS_ARGS (current_function_decl,
2356 TREE_TYPE (current_function_decl),
2357 current_function_args_size))
2358 try_tail_call = 0;
2360 if (try_tail_call || try_tail_recursion)
2362 int end, inc;
2363 actparms = NULL_TREE;
2364 /* Ok, we're going to give the tail call the old college try.
2365 This means we're going to evaluate the function arguments
2366 up to three times. There are two degrees of badness we can
2367 encounter, those that can be unsaved and those that can't.
2368 (See unsafe_for_reeval commentary for details.)
2370 Generate a new argument list. Pass safe arguments through
2371 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2372 For hard badness, evaluate them now and put their resulting
2373 rtx in a temporary VAR_DECL.
2375 initialize_argument_information has ordered the array for the
2376 order to be pushed, and we must remember this when reconstructing
2377 the original argument orde. */
2379 if (PUSH_ARGS_REVERSED)
2381 inc = 1;
2382 i = 0;
2383 end = num_actuals;
2385 else
2387 inc = -1;
2388 i = num_actuals - 1;
2389 end = -1;
2392 for (; i != end; i += inc)
2394 switch (unsafe_for_reeval (args[i].tree_value))
2396 case 0: /* Safe. */
2397 break;
2399 case 1: /* Mildly unsafe. */
2400 args[i].tree_value = unsave_expr (args[i].tree_value);
2401 break;
2403 case 2: /* Wildly unsafe. */
2405 tree var = build_decl (VAR_DECL, NULL_TREE,
2406 TREE_TYPE (args[i].tree_value));
2407 DECL_RTL (var) = expand_expr (args[i].tree_value, NULL_RTX,
2408 VOIDmode, EXPAND_NORMAL);
2409 args[i].tree_value = var;
2411 break;
2413 default:
2414 abort ();
2416 /* We need to build actparms for optimize_tail_recursion. We can
2417 safely trash away TREE_PURPOSE, since it is unused by this
2418 function. */
2419 if (try_tail_recursion)
2420 actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms);
2422 /* Expanding one of those dangerous arguments could have added
2423 cleanups, but otherwise give it a whirl. */
2424 if (any_pending_cleanups (1))
2425 try_tail_call = try_tail_recursion = 0;
2428 /* Generate a tail recursion sequence when calling ourselves. */
2430 if (try_tail_recursion)
2432 /* We want to emit any pending stack adjustments before the tail
2433 recursion "call". That way we know any adjustment after the tail
2434 recursion call can be ignored if we indeed use the tail recursion
2435 call expansion. */
2436 int save_pending_stack_adjust = pending_stack_adjust;
2437 int save_stack_pointer_delta = stack_pointer_delta;
2439 /* Use a new sequence to hold any RTL we generate. We do not even
2440 know if we will use this RTL yet. The final decision can not be
2441 made until after RTL generation for the entire function is
2442 complete. */
2443 start_sequence ();
2444 /* If expanding any of the arguments creates cleanups, we can't
2445 do a tailcall. So, we'll need to pop the pending cleanups
2446 list. If, however, all goes well, and there are no cleanups
2447 then the call to expand_start_target_temps will have no
2448 effect. */
2449 expand_start_target_temps ();
2450 if (optimize_tail_recursion (actparms, get_last_insn ()))
2452 if (any_pending_cleanups (1))
2453 try_tail_call = try_tail_recursion = 0;
2454 else
2455 tail_recursion_insns = get_insns ();
2457 expand_end_target_temps ();
2458 end_sequence ();
2460 /* Restore the original pending stack adjustment for the sibling and
2461 normal call cases below. */
2462 pending_stack_adjust = save_pending_stack_adjust;
2463 stack_pointer_delta = save_stack_pointer_delta;
2466 if (profile_arc_flag && (flags & ECF_FORK_OR_EXEC))
2468 /* A fork duplicates the profile information, and an exec discards
2469 it. We can't rely on fork/exec to be paired. So write out the
2470 profile information we have gathered so far, and clear it. */
2471 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2472 is subject to race conditions, just as with multithreaded
2473 programs. */
2475 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__bb_fork_func"), 0,
2476 VOIDmode, 0);
2479 /* Ensure current function's preferred stack boundary is at least
2480 what we need. We don't have to increase alignment for recursive
2481 functions. */
2482 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2483 && fndecl != current_function_decl)
2484 cfun->preferred_stack_boundary = preferred_stack_boundary;
2486 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2488 function_call_count++;
2490 /* We want to make two insn chains; one for a sibling call, the other
2491 for a normal call. We will select one of the two chains after
2492 initial RTL generation is complete. */
2493 for (pass = 0; pass < 2; pass++)
2495 int sibcall_failure = 0;
2496 /* We want to emit ay pending stack adjustments before the tail
2497 recursion "call". That way we know any adjustment after the tail
2498 recursion call can be ignored if we indeed use the tail recursion
2499 call expansion. */
2500 int save_pending_stack_adjust = 0;
2501 int save_stack_pointer_delta = 0;
2502 rtx insns;
2503 rtx before_call, next_arg_reg;
2505 if (pass == 0)
2507 if (! try_tail_call)
2508 continue;
2510 /* Emit any queued insns now; otherwise they would end up in
2511 only one of the alternates. */
2512 emit_queue ();
2514 /* State variables we need to save and restore between
2515 iterations. */
2516 save_pending_stack_adjust = pending_stack_adjust;
2517 save_stack_pointer_delta = stack_pointer_delta;
2519 if (pass)
2520 flags &= ~ECF_SIBCALL;
2521 else
2522 flags |= ECF_SIBCALL;
2524 /* Other state variables that we must reinitialize each time
2525 through the loop (that are not initialized by the loop itself). */
2526 argblock = 0;
2527 call_fusage = 0;
2529 /* Start a new sequence for the normal call case.
2531 From this point on, if the sibling call fails, we want to set
2532 sibcall_failure instead of continuing the loop. */
2533 start_sequence ();
2535 if (pass == 0)
2537 /* We know at this point that there are not currently any
2538 pending cleanups. If, however, in the process of evaluating
2539 the arguments we were to create some, we'll need to be
2540 able to get rid of them. */
2541 expand_start_target_temps ();
2544 /* When calling a const function, we must pop the stack args right away,
2545 so that the pop is deleted or moved with the call. */
2546 if (flags & (ECF_CONST | ECF_PURE))
2547 NO_DEFER_POP;
2549 /* Don't let pending stack adjusts add up to too much.
2550 Also, do all pending adjustments now if there is any chance
2551 this might be a call to alloca or if we are expanding a sibling
2552 call sequence. */
2553 if (pending_stack_adjust >= 32
2554 || (pending_stack_adjust > 0 && (flags & ECF_MAY_BE_ALLOCA))
2555 || pass == 0)
2556 do_pending_stack_adjust ();
2558 /* Push the temporary stack slot level so that we can free any
2559 temporaries we make. */
2560 push_temp_slots ();
2563 #ifdef FINAL_REG_PARM_STACK_SPACE
2564 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2565 args_size.var);
2566 #endif
2567 /* Precompute any arguments as needed. */
2568 if (pass)
2569 precompute_arguments (flags, num_actuals, args);
2571 /* Now we are about to start emitting insns that can be deleted
2572 if a libcall is deleted. */
2573 if (flags & (ECF_CONST | ECF_PURE | ECF_MALLOC))
2574 start_sequence ();
2576 adjusted_args_size = args_size;
2577 /* Compute the actual size of the argument block required. The variable
2578 and constant sizes must be combined, the size may have to be rounded,
2579 and there may be a minimum required size. When generating a sibcall
2580 pattern, do not round up, since we'll be re-using whatever space our
2581 caller provided. */
2582 unadjusted_args_size
2583 = compute_argument_block_size (reg_parm_stack_space, &adjusted_args_size,
2584 (pass == 0 ? 0
2585 : preferred_stack_boundary));
2587 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2589 /* The argument block when performing a sibling call is the
2590 incoming argument block. */
2591 if (pass == 0)
2592 argblock = virtual_incoming_args_rtx;
2594 /* If we have no actual push instructions, or shouldn't use them,
2595 make space for all args right now. */
2596 else if (adjusted_args_size.var != 0)
2598 if (old_stack_level == 0)
2600 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2601 old_pending_adj = pending_stack_adjust;
2602 pending_stack_adjust = 0;
2603 /* stack_arg_under_construction says whether a stack arg is
2604 being constructed at the old stack level. Pushing the stack
2605 gets a clean outgoing argument block. */
2606 old_stack_arg_under_construction = stack_arg_under_construction;
2607 stack_arg_under_construction = 0;
2609 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2611 else
2613 /* Note that we must go through the motions of allocating an argument
2614 block even if the size is zero because we may be storing args
2615 in the area reserved for register arguments, which may be part of
2616 the stack frame. */
2618 int needed = adjusted_args_size.constant;
2620 /* Store the maximum argument space used. It will be pushed by
2621 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2622 checking). */
2624 if (needed > current_function_outgoing_args_size)
2625 current_function_outgoing_args_size = needed;
2627 if (must_preallocate)
2629 if (ACCUMULATE_OUTGOING_ARGS)
2631 /* Since the stack pointer will never be pushed, it is
2632 possible for the evaluation of a parm to clobber
2633 something we have already written to the stack.
2634 Since most function calls on RISC machines do not use
2635 the stack, this is uncommon, but must work correctly.
2637 Therefore, we save any area of the stack that was already
2638 written and that we are using. Here we set up to do this
2639 by making a new stack usage map from the old one. The
2640 actual save will be done by store_one_arg.
2642 Another approach might be to try to reorder the argument
2643 evaluations to avoid this conflicting stack usage. */
2645 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2646 /* Since we will be writing into the entire argument area,
2647 the map must be allocated for its entire size, not just
2648 the part that is the responsibility of the caller. */
2649 needed += reg_parm_stack_space;
2650 #endif
2652 #ifdef ARGS_GROW_DOWNWARD
2653 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2654 needed + 1);
2655 #else
2656 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2657 needed);
2658 #endif
2659 stack_usage_map
2660 = (char *) alloca (highest_outgoing_arg_in_use);
2662 if (initial_highest_arg_in_use)
2663 bcopy (initial_stack_usage_map, stack_usage_map,
2664 initial_highest_arg_in_use);
2666 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2667 bzero (&stack_usage_map[initial_highest_arg_in_use],
2668 (highest_outgoing_arg_in_use
2669 - initial_highest_arg_in_use));
2670 needed = 0;
2672 /* The address of the outgoing argument list must not be
2673 copied to a register here, because argblock would be left
2674 pointing to the wrong place after the call to
2675 allocate_dynamic_stack_space below. */
2677 argblock = virtual_outgoing_args_rtx;
2679 else
2681 if (inhibit_defer_pop == 0)
2683 /* Try to reuse some or all of the pending_stack_adjust
2684 to get this space. */
2685 needed
2686 = (combine_pending_stack_adjustment_and_call
2687 (unadjusted_args_size,
2688 &adjusted_args_size,
2689 preferred_unit_stack_boundary));
2691 /* combine_pending_stack_adjustment_and_call computes
2692 an adjustment before the arguments are allocated.
2693 Account for them and see whether or not the stack
2694 needs to go up or down. */
2695 needed = unadjusted_args_size - needed;
2697 if (needed < 0)
2699 /* We're releasing stack space. */
2700 /* ??? We can avoid any adjustment at all if we're
2701 already aligned. FIXME. */
2702 pending_stack_adjust = -needed;
2703 do_pending_stack_adjust ();
2704 needed = 0;
2706 else
2707 /* We need to allocate space. We'll do that in
2708 push_block below. */
2709 pending_stack_adjust = 0;
2712 /* Special case this because overhead of `push_block' in
2713 this case is non-trivial. */
2714 if (needed == 0)
2715 argblock = virtual_outgoing_args_rtx;
2716 else
2717 argblock = push_block (GEN_INT (needed), 0, 0);
2719 /* We only really need to call `copy_to_reg' in the case
2720 where push insns are going to be used to pass ARGBLOCK
2721 to a function call in ARGS. In that case, the stack
2722 pointer changes value from the allocation point to the
2723 call point, and hence the value of
2724 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2725 as well always do it. */
2726 argblock = copy_to_reg (argblock);
2728 /* The save/restore code in store_one_arg handles all
2729 cases except one: a constructor call (including a C
2730 function returning a BLKmode struct) to initialize
2731 an argument. */
2732 if (stack_arg_under_construction)
2734 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2735 rtx push_size = GEN_INT (reg_parm_stack_space
2736 + adjusted_args_size.constant);
2737 #else
2738 rtx push_size = GEN_INT (adjusted_args_size.constant);
2739 #endif
2740 if (old_stack_level == 0)
2742 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2743 NULL_RTX);
2744 old_pending_adj = pending_stack_adjust;
2745 pending_stack_adjust = 0;
2746 /* stack_arg_under_construction says whether a stack
2747 arg is being constructed at the old stack level.
2748 Pushing the stack gets a clean outgoing argument
2749 block. */
2750 old_stack_arg_under_construction
2751 = stack_arg_under_construction;
2752 stack_arg_under_construction = 0;
2753 /* Make a new map for the new argument list. */
2754 stack_usage_map = (char *)
2755 alloca (highest_outgoing_arg_in_use);
2756 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2757 highest_outgoing_arg_in_use = 0;
2759 allocate_dynamic_stack_space (push_size, NULL_RTX,
2760 BITS_PER_UNIT);
2762 /* If argument evaluation might modify the stack pointer,
2763 copy the address of the argument list to a register. */
2764 for (i = 0; i < num_actuals; i++)
2765 if (args[i].pass_on_stack)
2767 argblock = copy_addr_to_reg (argblock);
2768 break;
2774 compute_argument_addresses (args, argblock, num_actuals);
2776 #ifdef PREFERRED_STACK_BOUNDARY
2777 /* If we push args individually in reverse order, perform stack alignment
2778 before the first push (the last arg). */
2779 if (PUSH_ARGS_REVERSED && argblock == 0
2780 && adjusted_args_size.constant != unadjusted_args_size)
2782 /* When the stack adjustment is pending, we get better code
2783 by combining the adjustments. */
2784 if (pending_stack_adjust
2785 && ! (flags & (ECF_CONST | ECF_PURE))
2786 && ! inhibit_defer_pop)
2788 pending_stack_adjust
2789 = (combine_pending_stack_adjustment_and_call
2790 (unadjusted_args_size,
2791 &adjusted_args_size,
2792 preferred_unit_stack_boundary));
2793 do_pending_stack_adjust ();
2795 else if (argblock == 0)
2796 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2797 - unadjusted_args_size));
2799 /* Now that the stack is properly aligned, pops can't safely
2800 be deferred during the evaluation of the arguments. */
2801 NO_DEFER_POP;
2802 #endif
2804 /* Don't try to defer pops if preallocating, not even from the first arg,
2805 since ARGBLOCK probably refers to the SP. */
2806 if (argblock)
2807 NO_DEFER_POP;
2809 funexp = rtx_for_function_call (fndecl, exp);
2811 /* Figure out the register where the value, if any, will come back. */
2812 valreg = 0;
2813 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2814 && ! structure_value_addr)
2816 if (pcc_struct_value)
2817 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2818 fndecl, (pass == 0));
2819 else
2820 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2823 /* Precompute all register parameters. It isn't safe to compute anything
2824 once we have started filling any specific hard regs. */
2825 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2827 #ifdef REG_PARM_STACK_SPACE
2828 /* Save the fixed argument area if it's part of the caller's frame and
2829 is clobbered by argument setup for this call. */
2830 if (ACCUMULATE_OUTGOING_ARGS && pass)
2831 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2832 &low_to_save, &high_to_save);
2833 #endif
2835 /* Now store (and compute if necessary) all non-register parms.
2836 These come before register parms, since they can require block-moves,
2837 which could clobber the registers used for register parms.
2838 Parms which have partial registers are not stored here,
2839 but we do preallocate space here if they want that. */
2841 for (i = 0; i < num_actuals; i++)
2842 if (args[i].reg == 0 || args[i].pass_on_stack)
2843 store_one_arg (&args[i], argblock, flags,
2844 adjusted_args_size.var != 0, reg_parm_stack_space);
2846 /* If we have a parm that is passed in registers but not in memory
2847 and whose alignment does not permit a direct copy into registers,
2848 make a group of pseudos that correspond to each register that we
2849 will later fill. */
2850 if (STRICT_ALIGNMENT)
2851 store_unaligned_arguments_into_pseudos (args, num_actuals);
2853 /* Now store any partially-in-registers parm.
2854 This is the last place a block-move can happen. */
2855 if (reg_parm_seen)
2856 for (i = 0; i < num_actuals; i++)
2857 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2858 store_one_arg (&args[i], argblock, flags,
2859 adjusted_args_size.var != 0, reg_parm_stack_space);
2861 #ifdef PREFERRED_STACK_BOUNDARY
2862 /* If we pushed args in forward order, perform stack alignment
2863 after pushing the last arg. */
2864 if (!PUSH_ARGS_REVERSED && argblock == 0)
2865 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2866 - unadjusted_args_size));
2867 #endif
2869 /* If register arguments require space on the stack and stack space
2870 was not preallocated, allocate stack space here for arguments
2871 passed in registers. */
2872 #ifdef OUTGOING_REG_PARM_STACK_SPACE
2873 if (!ACCUMULATE_OUTGOING_ARGS
2874 && must_preallocate == 0 && reg_parm_stack_space > 0)
2875 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2876 #endif
2878 /* Pass the function the address in which to return a
2879 structure value. */
2880 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2882 emit_move_insn (struct_value_rtx,
2883 force_reg (Pmode,
2884 force_operand (structure_value_addr,
2885 NULL_RTX)));
2887 /* Mark the memory for the aggregate as write-only. */
2888 if (current_function_check_memory_usage)
2889 emit_library_call (chkr_set_right_libfunc, 1,
2890 VOIDmode, 3,
2891 structure_value_addr, ptr_mode,
2892 GEN_INT (struct_value_size),
2893 TYPE_MODE (sizetype),
2894 GEN_INT (MEMORY_USE_WO),
2895 TYPE_MODE (integer_type_node));
2897 if (GET_CODE (struct_value_rtx) == REG)
2898 use_reg (&call_fusage, struct_value_rtx);
2901 funexp = prepare_call_address (funexp, fndecl, &call_fusage,
2902 reg_parm_seen);
2904 load_register_parameters (args, num_actuals, &call_fusage, flags);
2906 /* Perform postincrements before actually calling the function. */
2907 emit_queue ();
2909 /* Save a pointer to the last insn before the call, so that we can
2910 later safely search backwards to find the CALL_INSN. */
2911 before_call = get_last_insn ();
2913 /* Set up next argument register. For sibling calls on machines
2914 with register windows this should be the incoming register. */
2915 #ifdef FUNCTION_INCOMING_ARG
2916 if (pass == 0)
2917 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2918 void_type_node, 1);
2919 else
2920 #endif
2921 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2922 void_type_node, 1);
2924 /* All arguments and registers used for the call must be set up by
2925 now! */
2927 #ifdef PREFERRED_STACK_BOUNDARY
2928 /* Stack must be properly aligned now. */
2929 if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
2930 abort ();
2931 #endif
2933 /* Generate the actual call instruction. */
2934 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
2935 adjusted_args_size.constant, struct_value_size,
2936 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2937 flags);
2939 /* Verify that we've deallocated all the stack we used. */
2940 if (pass
2941 && old_stack_allocated != stack_pointer_delta - pending_stack_adjust)
2942 abort();
2944 /* If call is cse'able, make appropriate pair of reg-notes around it.
2945 Test valreg so we don't crash; may safely ignore `const'
2946 if return type is void. Disable for PARALLEL return values, because
2947 we have no way to move such values into a pseudo register. */
2948 if (pass
2949 && (flags & (ECF_CONST | ECF_PURE))
2950 && valreg != 0 && GET_CODE (valreg) != PARALLEL)
2952 rtx note = 0;
2953 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2954 rtx insns;
2956 /* Mark the return value as a pointer if needed. */
2957 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2958 mark_reg_pointer (temp, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2960 /* Construct an "equal form" for the value which mentions all the
2961 arguments in order as well as the function name. */
2962 for (i = 0; i < num_actuals; i++)
2963 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2964 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2966 insns = get_insns ();
2967 end_sequence ();
2969 if (flags & ECF_PURE)
2970 note = gen_rtx_EXPR_LIST (VOIDmode,
2971 gen_rtx_USE (VOIDmode,
2972 gen_rtx_MEM (BLKmode,
2973 gen_rtx_SCRATCH (VOIDmode))), note);
2975 emit_libcall_block (insns, temp, valreg, note);
2977 valreg = temp;
2979 else if (flags & (ECF_CONST | ECF_PURE))
2981 /* Otherwise, just write out the sequence without a note. */
2982 rtx insns = get_insns ();
2984 end_sequence ();
2985 emit_insns (insns);
2987 else if (flags & ECF_MALLOC)
2989 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2990 rtx last, insns;
2992 /* The return value from a malloc-like function is a pointer. */
2993 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2994 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2996 emit_move_insn (temp, valreg);
2998 /* The return value from a malloc-like function can not alias
2999 anything else. */
3000 last = get_last_insn ();
3001 REG_NOTES (last) =
3002 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
3004 /* Write out the sequence. */
3005 insns = get_insns ();
3006 end_sequence ();
3007 emit_insns (insns);
3008 valreg = temp;
3011 /* For calls to `setjmp', etc., inform flow.c it should complain
3012 if nonvolatile values are live. For functions that cannot return,
3013 inform flow that control does not fall through. */
3015 if ((flags & (ECF_RETURNS_TWICE | ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
3017 /* The barrier or NOTE_INSN_SETJMP note must be emitted
3018 immediately after the CALL_INSN. Some ports emit more
3019 than just a CALL_INSN above, so we must search for it here. */
3021 rtx last = get_last_insn ();
3022 while (GET_CODE (last) != CALL_INSN)
3024 last = PREV_INSN (last);
3025 /* There was no CALL_INSN? */
3026 if (last == before_call)
3027 abort ();
3030 if (flags & ECF_RETURNS_TWICE)
3032 emit_note_after (NOTE_INSN_SETJMP, last);
3033 current_function_calls_setjmp = 1;
3035 else
3036 emit_barrier_after (last);
3039 if (flags & ECF_LONGJMP)
3040 current_function_calls_longjmp = 1;
3042 /* If this function is returning into a memory location marked as
3043 readonly, it means it is initializing that location. But we normally
3044 treat functions as not clobbering such locations, so we need to
3045 specify that this one does. */
3046 if (target != 0 && GET_CODE (target) == MEM
3047 && structure_value_addr != 0 && RTX_UNCHANGING_P (target))
3048 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3050 /* If value type not void, return an rtx for the value. */
3052 /* If there are cleanups to be called, don't use a hard reg as target.
3053 We need to double check this and see if it matters anymore. */
3054 if (any_pending_cleanups (1))
3056 if (target && REG_P (target)
3057 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3058 target = 0;
3059 sibcall_failure = 1;
3062 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
3063 || ignore)
3065 target = const0_rtx;
3067 else if (structure_value_addr)
3069 if (target == 0 || GET_CODE (target) != MEM)
3071 target
3072 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3073 memory_address (TYPE_MODE (TREE_TYPE (exp)),
3074 structure_value_addr));
3075 set_mem_attributes (target, exp, 1);
3078 else if (pcc_struct_value)
3080 /* This is the special C++ case where we need to
3081 know what the true target was. We take care to
3082 never use this value more than once in one expression. */
3083 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3084 copy_to_reg (valreg));
3085 set_mem_attributes (target, exp, 1);
3087 /* Handle calls that return values in multiple non-contiguous locations.
3088 The Irix 6 ABI has examples of this. */
3089 else if (GET_CODE (valreg) == PARALLEL)
3091 int bytes = int_size_in_bytes (TREE_TYPE (exp));
3093 if (target == 0)
3095 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)),
3096 bytes, 0);
3097 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
3098 preserve_temp_slots (target);
3101 if (! rtx_equal_p (target, valreg))
3102 emit_group_store (target, valreg, bytes,
3103 TYPE_ALIGN (TREE_TYPE (exp)));
3105 /* We can not support sibling calls for this case. */
3106 sibcall_failure = 1;
3108 else if (target
3109 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
3110 && GET_MODE (target) == GET_MODE (valreg))
3112 /* TARGET and VALREG cannot be equal at this point because the
3113 latter would not have REG_FUNCTION_VALUE_P true, while the
3114 former would if it were referring to the same register.
3116 If they refer to the same register, this move will be a no-op,
3117 except when function inlining is being done. */
3118 emit_move_insn (target, valreg);
3120 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3121 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3122 else
3123 target = copy_to_reg (valreg);
3125 #ifdef PROMOTE_FUNCTION_RETURN
3126 /* If we promoted this return value, make the proper SUBREG. TARGET
3127 might be const0_rtx here, so be careful. */
3128 if (GET_CODE (target) == REG
3129 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3130 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3132 tree type = TREE_TYPE (exp);
3133 int unsignedp = TREE_UNSIGNED (type);
3135 /* If we don't promote as expected, something is wrong. */
3136 if (GET_MODE (target)
3137 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3138 abort ();
3140 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
3141 SUBREG_PROMOTED_VAR_P (target) = 1;
3142 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
3144 #endif
3146 /* If size of args is variable or this was a constructor call for a stack
3147 argument, restore saved stack-pointer value. */
3149 if (old_stack_level)
3151 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3152 pending_stack_adjust = old_pending_adj;
3153 stack_arg_under_construction = old_stack_arg_under_construction;
3154 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3155 stack_usage_map = initial_stack_usage_map;
3156 sibcall_failure = 1;
3158 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3160 #ifdef REG_PARM_STACK_SPACE
3161 if (save_area)
3163 restore_fixed_argument_area (save_area, argblock,
3164 high_to_save, low_to_save);
3166 #endif
3168 /* If we saved any argument areas, restore them. */
3169 for (i = 0; i < num_actuals; i++)
3170 if (args[i].save_area)
3172 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3173 rtx stack_area
3174 = gen_rtx_MEM (save_mode,
3175 memory_address (save_mode,
3176 XEXP (args[i].stack_slot, 0)));
3178 if (save_mode != BLKmode)
3179 emit_move_insn (stack_area, args[i].save_area);
3180 else
3181 emit_block_move (stack_area,
3182 validize_mem (args[i].save_area),
3183 GEN_INT (args[i].size.constant),
3184 PARM_BOUNDARY);
3187 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3188 stack_usage_map = initial_stack_usage_map;
3191 /* If this was alloca, record the new stack level for nonlocal gotos.
3192 Check for the handler slots since we might not have a save area
3193 for non-local gotos. */
3195 if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
3196 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3198 pop_temp_slots ();
3200 /* Free up storage we no longer need. */
3201 for (i = 0; i < num_actuals; ++i)
3202 if (args[i].aligned_regs)
3203 free (args[i].aligned_regs);
3205 if (pass == 0)
3207 /* Undo the fake expand_start_target_temps we did earlier. If
3208 there had been any cleanups created, we've already set
3209 sibcall_failure. */
3210 expand_end_target_temps ();
3213 insns = get_insns ();
3214 end_sequence ();
3216 if (pass == 0)
3218 tail_call_insns = insns;
3220 /* If something prevents making this a sibling call,
3221 zero out the sequence. */
3222 if (sibcall_failure)
3223 tail_call_insns = NULL_RTX;
3224 /* Restore the pending stack adjustment now that we have
3225 finished generating the sibling call sequence. */
3227 pending_stack_adjust = save_pending_stack_adjust;
3228 stack_pointer_delta = save_stack_pointer_delta;
3230 /* Prepare arg structure for next iteration. */
3231 for (i = 0 ; i < num_actuals ; i++)
3233 args[i].value = 0;
3234 args[i].aligned_regs = 0;
3235 args[i].stack = 0;
3238 else
3239 normal_call_insns = insns;
3242 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3243 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3244 can happen if the arguments to this function call an inline
3245 function who's expansion contains another CALL_PLACEHOLDER.
3247 If there are any C_Ps in any of these sequences, replace them
3248 with their normal call. */
3250 for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3251 if (GET_CODE (insn) == CALL_INSN
3252 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3253 replace_call_placeholder (insn, sibcall_use_normal);
3255 for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3256 if (GET_CODE (insn) == CALL_INSN
3257 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3258 replace_call_placeholder (insn, sibcall_use_normal);
3260 for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3261 if (GET_CODE (insn) == CALL_INSN
3262 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3263 replace_call_placeholder (insn, sibcall_use_normal);
3265 /* If this was a potential tail recursion site, then emit a
3266 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3267 One of them will be selected later. */
3268 if (tail_recursion_insns || tail_call_insns)
3270 /* The tail recursion label must be kept around. We could expose
3271 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3272 and makes determining true tail recursion sites difficult.
3274 So we set LABEL_PRESERVE_P here, then clear it when we select
3275 one of the call sequences after rtl generation is complete. */
3276 if (tail_recursion_insns)
3277 LABEL_PRESERVE_P (tail_recursion_label) = 1;
3278 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3279 tail_call_insns,
3280 tail_recursion_insns,
3281 tail_recursion_label));
3283 else
3284 emit_insns (normal_call_insns);
3286 currently_expanding_call--;
3288 return target;
3291 /* Returns nonzero if FUN is the symbol for a library function which can
3292 not throw. */
3294 static int
3295 libfunc_nothrow (fun)
3296 rtx fun;
3298 if (fun == throw_libfunc
3299 || fun == rethrow_libfunc
3300 || fun == sjthrow_libfunc
3301 || fun == sjpopnthrow_libfunc)
3302 return 0;
3304 return 1;
3307 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3308 The RETVAL parameter specifies whether return value needs to be saved, other
3309 parameters are documented in the emit_library_call function bellow. */
3310 static rtx
3311 emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
3312 int retval;
3313 rtx orgfun;
3314 rtx value;
3315 int fn_type;
3316 enum machine_mode outmode;
3317 int nargs;
3318 va_list p;
3320 /* Total size in bytes of all the stack-parms scanned so far. */
3321 struct args_size args_size;
3322 /* Size of arguments before any adjustments (such as rounding). */
3323 struct args_size original_args_size;
3324 register int argnum;
3325 rtx fun;
3326 int inc;
3327 int count;
3328 struct args_size alignment_pad;
3329 rtx argblock = 0;
3330 CUMULATIVE_ARGS args_so_far;
3331 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
3332 struct args_size offset; struct args_size size; rtx save_area; };
3333 struct arg *argvec;
3334 int old_inhibit_defer_pop = inhibit_defer_pop;
3335 rtx call_fusage = 0;
3336 rtx mem_value = 0;
3337 rtx valreg;
3338 int pcc_struct_value = 0;
3339 int struct_value_size = 0;
3340 int flags = 0;
3341 int reg_parm_stack_space = 0;
3342 int needed;
3344 #ifdef REG_PARM_STACK_SPACE
3345 /* Define the boundary of the register parm stack space that needs to be
3346 save, if any. */
3347 int low_to_save = -1, high_to_save = 0;
3348 rtx save_area = 0; /* Place that it is saved */
3349 #endif
3351 /* Size of the stack reserved for parameter registers. */
3352 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3353 char *initial_stack_usage_map = stack_usage_map;
3355 #ifdef REG_PARM_STACK_SPACE
3356 #ifdef MAYBE_REG_PARM_STACK_SPACE
3357 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3358 #else
3359 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3360 #endif
3361 #endif
3363 if (fn_type == 1)
3364 flags |= ECF_CONST;
3365 else if (fn_type == 2)
3366 flags |= ECF_PURE;
3367 fun = orgfun;
3369 if (libfunc_nothrow (fun))
3370 flags |= ECF_NOTHROW;
3372 #ifdef PREFERRED_STACK_BOUNDARY
3373 /* Ensure current function's preferred stack boundary is at least
3374 what we need. */
3375 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3376 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3377 #endif
3379 /* If this kind of value comes back in memory,
3380 decide where in memory it should come back. */
3381 if (outmode != VOIDmode && aggregate_value_p (type_for_mode (outmode, 0)))
3383 #ifdef PCC_STATIC_STRUCT_RETURN
3384 rtx pointer_reg
3385 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3386 0, 0);
3387 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3388 pcc_struct_value = 1;
3389 if (value == 0)
3390 value = gen_reg_rtx (outmode);
3391 #else /* not PCC_STATIC_STRUCT_RETURN */
3392 struct_value_size = GET_MODE_SIZE (outmode);
3393 if (value != 0 && GET_CODE (value) == MEM)
3394 mem_value = value;
3395 else
3396 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
3397 #endif
3399 /* This call returns a big structure. */
3400 flags &= ~(ECF_CONST | ECF_PURE);
3403 /* ??? Unfinished: must pass the memory address as an argument. */
3405 /* Copy all the libcall-arguments out of the varargs data
3406 and into a vector ARGVEC.
3408 Compute how to pass each argument. We only support a very small subset
3409 of the full argument passing conventions to limit complexity here since
3410 library functions shouldn't have many args. */
3412 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3413 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
3415 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3417 args_size.constant = 0;
3418 args_size.var = 0;
3420 count = 0;
3422 /* Now we are about to start emitting insns that can be deleted
3423 if a libcall is deleted. */
3424 if (flags & (ECF_CONST | ECF_PURE))
3425 start_sequence ();
3427 push_temp_slots ();
3429 /* If there's a structure value address to be passed,
3430 either pass it in the special place, or pass it as an extra argument. */
3431 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3433 rtx addr = XEXP (mem_value, 0);
3434 nargs++;
3436 /* Make sure it is a reasonable operand for a move or push insn. */
3437 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3438 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3439 addr = force_operand (addr, NULL_RTX);
3441 argvec[count].value = addr;
3442 argvec[count].mode = Pmode;
3443 argvec[count].partial = 0;
3445 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3446 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3447 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3448 abort ();
3449 #endif
3451 locate_and_pad_parm (Pmode, NULL_TREE,
3452 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3454 #else
3455 argvec[count].reg != 0,
3456 #endif
3457 NULL_TREE, &args_size, &argvec[count].offset,
3458 &argvec[count].size, &alignment_pad);
3461 if (argvec[count].reg == 0 || argvec[count].partial != 0
3462 || reg_parm_stack_space > 0)
3463 args_size.constant += argvec[count].size.constant;
3465 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3467 count++;
3470 for (; count < nargs; count++)
3472 rtx val = va_arg (p, rtx);
3473 enum machine_mode mode = va_arg (p, enum machine_mode);
3475 /* We cannot convert the arg value to the mode the library wants here;
3476 must do it earlier where we know the signedness of the arg. */
3477 if (mode == BLKmode
3478 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3479 abort ();
3481 /* On some machines, there's no way to pass a float to a library fcn.
3482 Pass it as a double instead. */
3483 #ifdef LIBGCC_NEEDS_DOUBLE
3484 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3485 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3486 #endif
3488 /* There's no need to call protect_from_queue, because
3489 either emit_move_insn or emit_push_insn will do that. */
3491 /* Make sure it is a reasonable operand for a move or push insn. */
3492 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3493 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3494 val = force_operand (val, NULL_RTX);
3496 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3497 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3499 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3500 be viewed as just an efficiency improvement. */
3501 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3502 emit_move_insn (slot, val);
3503 val = force_operand (XEXP (slot, 0), NULL_RTX);
3504 mode = Pmode;
3506 #endif
3508 argvec[count].value = val;
3509 argvec[count].mode = mode;
3511 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3513 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3514 argvec[count].partial
3515 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3516 #else
3517 argvec[count].partial = 0;
3518 #endif
3520 locate_and_pad_parm (mode, NULL_TREE,
3521 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3523 #else
3524 argvec[count].reg != 0,
3525 #endif
3526 NULL_TREE, &args_size, &argvec[count].offset,
3527 &argvec[count].size, &alignment_pad);
3529 if (argvec[count].size.var)
3530 abort ();
3532 if (reg_parm_stack_space == 0 && argvec[count].partial)
3533 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3535 if (argvec[count].reg == 0 || argvec[count].partial != 0
3536 || reg_parm_stack_space > 0)
3537 args_size.constant += argvec[count].size.constant;
3539 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3542 #ifdef FINAL_REG_PARM_STACK_SPACE
3543 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3544 args_size.var);
3545 #endif
3546 /* If this machine requires an external definition for library
3547 functions, write one out. */
3548 assemble_external_libcall (fun);
3550 original_args_size = args_size;
3551 #ifdef PREFERRED_STACK_BOUNDARY
3552 args_size.constant = (((args_size.constant
3553 + stack_pointer_delta
3554 + STACK_BYTES - 1)
3555 / STACK_BYTES
3556 * STACK_BYTES)
3557 - stack_pointer_delta);
3558 #endif
3560 args_size.constant = MAX (args_size.constant,
3561 reg_parm_stack_space);
3563 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3564 args_size.constant -= reg_parm_stack_space;
3565 #endif
3567 if (args_size.constant > current_function_outgoing_args_size)
3568 current_function_outgoing_args_size = args_size.constant;
3570 if (ACCUMULATE_OUTGOING_ARGS)
3572 /* Since the stack pointer will never be pushed, it is possible for
3573 the evaluation of a parm to clobber something we have already
3574 written to the stack. Since most function calls on RISC machines
3575 do not use the stack, this is uncommon, but must work correctly.
3577 Therefore, we save any area of the stack that was already written
3578 and that we are using. Here we set up to do this by making a new
3579 stack usage map from the old one.
3581 Another approach might be to try to reorder the argument
3582 evaluations to avoid this conflicting stack usage. */
3584 needed = args_size.constant;
3586 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3587 /* Since we will be writing into the entire argument area, the
3588 map must be allocated for its entire size, not just the part that
3589 is the responsibility of the caller. */
3590 needed += reg_parm_stack_space;
3591 #endif
3593 #ifdef ARGS_GROW_DOWNWARD
3594 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3595 needed + 1);
3596 #else
3597 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3598 needed);
3599 #endif
3600 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3602 if (initial_highest_arg_in_use)
3603 bcopy (initial_stack_usage_map, stack_usage_map,
3604 initial_highest_arg_in_use);
3606 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3607 bzero (&stack_usage_map[initial_highest_arg_in_use],
3608 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3609 needed = 0;
3611 /* The address of the outgoing argument list must not be copied to a
3612 register here, because argblock would be left pointing to the
3613 wrong place after the call to allocate_dynamic_stack_space below.
3616 argblock = virtual_outgoing_args_rtx;
3618 else
3620 if (!PUSH_ARGS)
3621 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3624 #ifdef PREFERRED_STACK_BOUNDARY
3625 /* If we push args individually in reverse order, perform stack alignment
3626 before the first push (the last arg). */
3627 if (argblock == 0 && PUSH_ARGS_REVERSED)
3628 anti_adjust_stack (GEN_INT (args_size.constant
3629 - original_args_size.constant));
3630 #endif
3632 if (PUSH_ARGS_REVERSED)
3634 inc = -1;
3635 argnum = nargs - 1;
3637 else
3639 inc = 1;
3640 argnum = 0;
3643 #ifdef REG_PARM_STACK_SPACE
3644 if (ACCUMULATE_OUTGOING_ARGS)
3646 /* The argument list is the property of the called routine and it
3647 may clobber it. If the fixed area has been used for previous
3648 parameters, we must save and restore it.
3650 Here we compute the boundary of the that needs to be saved, if any. */
3652 #ifdef ARGS_GROW_DOWNWARD
3653 for (count = 0; count < reg_parm_stack_space + 1; count++)
3654 #else
3655 for (count = 0; count < reg_parm_stack_space; count++)
3656 #endif
3658 if (count >= highest_outgoing_arg_in_use
3659 || stack_usage_map[count] == 0)
3660 continue;
3662 if (low_to_save == -1)
3663 low_to_save = count;
3665 high_to_save = count;
3668 if (low_to_save >= 0)
3670 int num_to_save = high_to_save - low_to_save + 1;
3671 enum machine_mode save_mode
3672 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3673 rtx stack_area;
3675 /* If we don't have the required alignment, must do this in BLKmode. */
3676 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3677 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3678 save_mode = BLKmode;
3680 #ifdef ARGS_GROW_DOWNWARD
3681 stack_area = gen_rtx_MEM (save_mode,
3682 memory_address (save_mode,
3683 plus_constant (argblock,
3684 - high_to_save)));
3685 #else
3686 stack_area = gen_rtx_MEM (save_mode,
3687 memory_address (save_mode,
3688 plus_constant (argblock,
3689 low_to_save)));
3690 #endif
3691 if (save_mode == BLKmode)
3693 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3694 emit_block_move (validize_mem (save_area), stack_area,
3695 GEN_INT (num_to_save), PARM_BOUNDARY);
3697 else
3699 save_area = gen_reg_rtx (save_mode);
3700 emit_move_insn (save_area, stack_area);
3704 #endif
3706 /* Push the args that need to be pushed. */
3708 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3709 are to be pushed. */
3710 for (count = 0; count < nargs; count++, argnum += inc)
3712 register enum machine_mode mode = argvec[argnum].mode;
3713 register rtx val = argvec[argnum].value;
3714 rtx reg = argvec[argnum].reg;
3715 int partial = argvec[argnum].partial;
3716 int lower_bound = 0, upper_bound = 0, i;
3718 if (! (reg != 0 && partial == 0))
3720 if (ACCUMULATE_OUTGOING_ARGS)
3722 /* If this is being stored into a pre-allocated, fixed-size,
3723 stack area, save any previous data at that location. */
3725 #ifdef ARGS_GROW_DOWNWARD
3726 /* stack_slot is negative, but we want to index stack_usage_map
3727 with positive values. */
3728 upper_bound = -argvec[argnum].offset.constant + 1;
3729 lower_bound = upper_bound - argvec[argnum].size.constant;
3730 #else
3731 lower_bound = argvec[argnum].offset.constant;
3732 upper_bound = lower_bound + argvec[argnum].size.constant;
3733 #endif
3735 for (i = lower_bound; i < upper_bound; i++)
3736 if (stack_usage_map[i]
3737 /* Don't store things in the fixed argument area at this
3738 point; it has already been saved. */
3739 && i > reg_parm_stack_space)
3740 break;
3742 if (i != upper_bound)
3744 /* We need to make a save area. See what mode we can make
3745 it. */
3746 enum machine_mode save_mode
3747 = mode_for_size (argvec[argnum].size.constant
3748 * BITS_PER_UNIT,
3749 MODE_INT, 1);
3750 rtx stack_area
3751 = gen_rtx_MEM
3752 (save_mode,
3753 memory_address
3754 (save_mode,
3755 plus_constant (argblock,
3756 argvec[argnum].offset.constant)));
3757 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3759 emit_move_insn (argvec[argnum].save_area, stack_area);
3763 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3764 argblock, GEN_INT (argvec[argnum].offset.constant),
3765 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
3767 /* Now mark the segment we just used. */
3768 if (ACCUMULATE_OUTGOING_ARGS)
3769 for (i = lower_bound; i < upper_bound; i++)
3770 stack_usage_map[i] = 1;
3772 NO_DEFER_POP;
3776 #ifdef PREFERRED_STACK_BOUNDARY
3777 /* If we pushed args in forward order, perform stack alignment
3778 after pushing the last arg. */
3779 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3780 anti_adjust_stack (GEN_INT (args_size.constant
3781 - original_args_size.constant));
3782 #endif
3784 if (PUSH_ARGS_REVERSED)
3785 argnum = nargs - 1;
3786 else
3787 argnum = 0;
3789 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3791 /* Now load any reg parms into their regs. */
3793 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3794 are to be pushed. */
3795 for (count = 0; count < nargs; count++, argnum += inc)
3797 register rtx val = argvec[argnum].value;
3798 rtx reg = argvec[argnum].reg;
3799 int partial = argvec[argnum].partial;
3801 /* Handle calls that pass values in multiple non-contiguous
3802 locations. The PA64 has examples of this for library calls. */
3803 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3804 emit_group_load (reg, val,
3805 GET_MODE_SIZE (GET_MODE (val)),
3806 GET_MODE_ALIGNMENT (GET_MODE (val)));
3807 else if (reg != 0 && partial == 0)
3808 emit_move_insn (reg, val);
3810 NO_DEFER_POP;
3813 /* Any regs containing parms remain in use through the call. */
3814 for (count = 0; count < nargs; count++)
3816 rtx reg = argvec[count].reg;
3817 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3818 use_group_regs (&call_fusage, reg);
3819 else if (reg != 0)
3820 use_reg (&call_fusage, reg);
3823 /* Pass the function the address in which to return a structure value. */
3824 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3826 emit_move_insn (struct_value_rtx,
3827 force_reg (Pmode,
3828 force_operand (XEXP (mem_value, 0),
3829 NULL_RTX)));
3830 if (GET_CODE (struct_value_rtx) == REG)
3831 use_reg (&call_fusage, struct_value_rtx);
3834 /* Don't allow popping to be deferred, since then
3835 cse'ing of library calls could delete a call and leave the pop. */
3836 NO_DEFER_POP;
3837 valreg = (mem_value == 0 && outmode != VOIDmode
3838 ? hard_libcall_value (outmode) : NULL_RTX);
3840 #ifdef PREFERRED_STACK_BOUNDARY
3841 /* Stack must be properly aligned now. */
3842 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
3843 abort();
3844 #endif
3846 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3847 will set inhibit_defer_pop to that value. */
3848 /* The return type is needed to decide how many bytes the function pops.
3849 Signedness plays no role in that, so for simplicity, we pretend it's
3850 always signed. We also assume that the list of arguments passed has
3851 no impact, so we pretend it is unknown. */
3853 emit_call_1 (fun,
3854 get_identifier (XSTR (orgfun, 0)),
3855 build_function_type (outmode == VOIDmode ? void_type_node
3856 : type_for_mode (outmode, 0), NULL_TREE),
3857 original_args_size.constant, args_size.constant,
3858 struct_value_size,
3859 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3860 valreg,
3861 old_inhibit_defer_pop + 1, call_fusage, flags);
3863 /* Now restore inhibit_defer_pop to its actual original value. */
3864 OK_DEFER_POP;
3866 /* If call is cse'able, make appropriate pair of reg-notes around it.
3867 Test valreg so we don't crash; may safely ignore `const'
3868 if return type is void. Disable for PARALLEL return values, because
3869 we have no way to move such values into a pseudo register. */
3870 if ((flags & (ECF_CONST | ECF_PURE))
3871 && valreg != 0 && GET_CODE (valreg) != PARALLEL)
3873 rtx note = 0;
3874 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3875 rtx insns;
3876 int i;
3878 /* Construct an "equal form" for the value which mentions all the
3879 arguments in order as well as the function name. */
3880 for (i = 0; i < nargs; i++)
3881 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3882 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
3884 insns = get_insns ();
3885 end_sequence ();
3887 if (flags & ECF_PURE)
3888 note = gen_rtx_EXPR_LIST (VOIDmode,
3889 gen_rtx_USE (VOIDmode,
3890 gen_rtx_MEM (BLKmode,
3891 gen_rtx_SCRATCH (VOIDmode))), note);
3893 emit_libcall_block (insns, temp, valreg, note);
3895 valreg = temp;
3897 else if (flags & (ECF_CONST | ECF_PURE))
3899 /* Otherwise, just write out the sequence without a note. */
3900 rtx insns = get_insns ();
3902 end_sequence ();
3903 emit_insns (insns);
3905 pop_temp_slots ();
3907 /* Copy the value to the right place. */
3908 if (outmode != VOIDmode && retval)
3910 if (mem_value)
3912 if (value == 0)
3913 value = mem_value;
3914 if (value != mem_value)
3915 emit_move_insn (value, mem_value);
3917 else if (value != 0)
3918 emit_move_insn (value, hard_libcall_value (outmode));
3919 else
3920 value = hard_libcall_value (outmode);
3923 if (ACCUMULATE_OUTGOING_ARGS)
3925 #ifdef REG_PARM_STACK_SPACE
3926 if (save_area)
3928 enum machine_mode save_mode = GET_MODE (save_area);
3929 #ifdef ARGS_GROW_DOWNWARD
3930 rtx stack_area
3931 = gen_rtx_MEM (save_mode,
3932 memory_address (save_mode,
3933 plus_constant (argblock,
3934 - high_to_save)));
3935 #else
3936 rtx stack_area
3937 = gen_rtx_MEM (save_mode,
3938 memory_address (save_mode,
3939 plus_constant (argblock, low_to_save)));
3940 #endif
3941 if (save_mode != BLKmode)
3942 emit_move_insn (stack_area, save_area);
3943 else
3944 emit_block_move (stack_area, validize_mem (save_area),
3945 GEN_INT (high_to_save - low_to_save + 1),
3946 PARM_BOUNDARY);
3948 #endif
3950 /* If we saved any argument areas, restore them. */
3951 for (count = 0; count < nargs; count++)
3952 if (argvec[count].save_area)
3954 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3955 rtx stack_area
3956 = gen_rtx_MEM (save_mode,
3957 memory_address
3958 (save_mode,
3959 plus_constant (argblock,
3960 argvec[count].offset.constant)));
3962 emit_move_insn (stack_area, argvec[count].save_area);
3965 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3966 stack_usage_map = initial_stack_usage_map;
3969 return value;
3973 /* Output a library call to function FUN (a SYMBOL_REF rtx)
3974 (emitting the queue unless NO_QUEUE is nonzero),
3975 for a value of mode OUTMODE,
3976 with NARGS different arguments, passed as alternating rtx values
3977 and machine_modes to convert them to.
3978 The rtx values should have been passed through protect_from_queue already.
3980 FN_TYPE will is zero for `normal' calls, one for `const' calls, wich
3981 which will be enclosed in REG_LIBCALL/REG_RETVAL notes and two for `pure'
3982 calls, that are handled like `const' calls with extra
3983 (use (memory (scratch)). */
3985 void
3986 emit_library_call VPARAMS((rtx orgfun, int fn_type, enum machine_mode outmode,
3987 int nargs, ...))
3989 #ifndef ANSI_PROTOTYPES
3990 rtx orgfun;
3991 int fn_type;
3992 enum machine_mode outmode;
3993 int nargs;
3994 #endif
3995 va_list p;
3997 VA_START (p, nargs);
3999 #ifndef ANSI_PROTOTYPES
4000 orgfun = va_arg (p, rtx);
4001 fn_type = va_arg (p, int);
4002 outmode = va_arg (p, enum machine_mode);
4003 nargs = va_arg (p, int);
4004 #endif
4006 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4008 va_end (p);
4011 /* Like emit_library_call except that an extra argument, VALUE,
4012 comes second and says where to store the result.
4013 (If VALUE is zero, this function chooses a convenient way
4014 to return the value.
4016 This function returns an rtx for where the value is to be found.
4017 If VALUE is nonzero, VALUE is returned. */
4020 emit_library_call_value VPARAMS((rtx orgfun, rtx value, int fn_type,
4021 enum machine_mode outmode, int nargs, ...))
4023 #ifndef ANSI_PROTOTYPES
4024 rtx orgfun;
4025 rtx value;
4026 int fn_type;
4027 enum machine_mode outmode;
4028 int nargs;
4029 #endif
4030 va_list p;
4032 VA_START (p, nargs);
4034 #ifndef ANSI_PROTOTYPES
4035 orgfun = va_arg (p, rtx);
4036 value = va_arg (p, rtx);
4037 fn_type = va_arg (p, int);
4038 outmode = va_arg (p, enum machine_mode);
4039 nargs = va_arg (p, int);
4040 #endif
4042 value = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode, nargs, p);
4044 va_end (p);
4046 return value;
4049 #if 0
4050 /* Return an rtx which represents a suitable home on the stack
4051 given TYPE, the type of the argument looking for a home.
4052 This is called only for BLKmode arguments.
4054 SIZE is the size needed for this target.
4055 ARGS_ADDR is the address of the bottom of the argument block for this call.
4056 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
4057 if this machine uses push insns. */
4059 static rtx
4060 target_for_arg (type, size, args_addr, offset)
4061 tree type;
4062 rtx size;
4063 rtx args_addr;
4064 struct args_size offset;
4066 rtx target;
4067 rtx offset_rtx = ARGS_SIZE_RTX (offset);
4069 /* We do not call memory_address if possible,
4070 because we want to address as close to the stack
4071 as possible. For non-variable sized arguments,
4072 this will be stack-pointer relative addressing. */
4073 if (GET_CODE (offset_rtx) == CONST_INT)
4074 target = plus_constant (args_addr, INTVAL (offset_rtx));
4075 else
4077 /* I have no idea how to guarantee that this
4078 will work in the presence of register parameters. */
4079 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
4080 target = memory_address (QImode, target);
4083 return gen_rtx_MEM (BLKmode, target);
4085 #endif
4087 /* Store a single argument for a function call
4088 into the register or memory area where it must be passed.
4089 *ARG describes the argument value and where to pass it.
4091 ARGBLOCK is the address of the stack-block for all the arguments,
4092 or 0 on a machine where arguments are pushed individually.
4094 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4095 so must be careful about how the stack is used.
4097 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4098 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4099 that we need not worry about saving and restoring the stack.
4101 FNDECL is the declaration of the function we are calling. */
4103 static void
4104 store_one_arg (arg, argblock, flags, variable_size,
4105 reg_parm_stack_space)
4106 struct arg_data *arg;
4107 rtx argblock;
4108 int flags;
4109 int variable_size ATTRIBUTE_UNUSED;
4110 int reg_parm_stack_space;
4112 register tree pval = arg->tree_value;
4113 rtx reg = 0;
4114 int partial = 0;
4115 int used = 0;
4116 int i, lower_bound = 0, upper_bound = 0;
4118 if (TREE_CODE (pval) == ERROR_MARK)
4119 return;
4121 /* Push a new temporary level for any temporaries we make for
4122 this argument. */
4123 push_temp_slots ();
4125 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4127 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4128 save any previous data at that location. */
4129 if (argblock && ! variable_size && arg->stack)
4131 #ifdef ARGS_GROW_DOWNWARD
4132 /* stack_slot is negative, but we want to index stack_usage_map
4133 with positive values. */
4134 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4135 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4136 else
4137 upper_bound = 0;
4139 lower_bound = upper_bound - arg->size.constant;
4140 #else
4141 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4142 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4143 else
4144 lower_bound = 0;
4146 upper_bound = lower_bound + arg->size.constant;
4147 #endif
4149 for (i = lower_bound; i < upper_bound; i++)
4150 if (stack_usage_map[i]
4151 /* Don't store things in the fixed argument area at this point;
4152 it has already been saved. */
4153 && i > reg_parm_stack_space)
4154 break;
4156 if (i != upper_bound)
4158 /* We need to make a save area. See what mode we can make it. */
4159 enum machine_mode save_mode
4160 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
4161 rtx stack_area
4162 = gen_rtx_MEM (save_mode,
4163 memory_address (save_mode,
4164 XEXP (arg->stack_slot, 0)));
4166 if (save_mode == BLKmode)
4168 arg->save_area = assign_stack_temp (BLKmode,
4169 arg->size.constant, 0);
4170 MEM_SET_IN_STRUCT_P (arg->save_area,
4171 AGGREGATE_TYPE_P (TREE_TYPE
4172 (arg->tree_value)));
4173 preserve_temp_slots (arg->save_area);
4174 emit_block_move (validize_mem (arg->save_area), stack_area,
4175 GEN_INT (arg->size.constant),
4176 PARM_BOUNDARY);
4178 else
4180 arg->save_area = gen_reg_rtx (save_mode);
4181 emit_move_insn (arg->save_area, stack_area);
4185 /* Now that we have saved any slots that will be overwritten by this
4186 store, mark all slots this store will use. We must do this before
4187 we actually expand the argument since the expansion itself may
4188 trigger library calls which might need to use the same stack slot. */
4189 if (argblock && ! variable_size && arg->stack)
4190 for (i = lower_bound; i < upper_bound; i++)
4191 stack_usage_map[i] = 1;
4194 /* If this isn't going to be placed on both the stack and in registers,
4195 set up the register and number of words. */
4196 if (! arg->pass_on_stack)
4197 reg = arg->reg, partial = arg->partial;
4199 if (reg != 0 && partial == 0)
4200 /* Being passed entirely in a register. We shouldn't be called in
4201 this case. */
4202 abort ();
4204 /* If this arg needs special alignment, don't load the registers
4205 here. */
4206 if (arg->n_aligned_regs != 0)
4207 reg = 0;
4209 /* If this is being passed partially in a register, we can't evaluate
4210 it directly into its stack slot. Otherwise, we can. */
4211 if (arg->value == 0)
4213 /* stack_arg_under_construction is nonzero if a function argument is
4214 being evaluated directly into the outgoing argument list and
4215 expand_call must take special action to preserve the argument list
4216 if it is called recursively.
4218 For scalar function arguments stack_usage_map is sufficient to
4219 determine which stack slots must be saved and restored. Scalar
4220 arguments in general have pass_on_stack == 0.
4222 If this argument is initialized by a function which takes the
4223 address of the argument (a C++ constructor or a C function
4224 returning a BLKmode structure), then stack_usage_map is
4225 insufficient and expand_call must push the stack around the
4226 function call. Such arguments have pass_on_stack == 1.
4228 Note that it is always safe to set stack_arg_under_construction,
4229 but this generates suboptimal code if set when not needed. */
4231 if (arg->pass_on_stack)
4232 stack_arg_under_construction++;
4234 arg->value = expand_expr (pval,
4235 (partial
4236 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4237 ? NULL_RTX : arg->stack,
4238 VOIDmode, 0);
4240 /* If we are promoting object (or for any other reason) the mode
4241 doesn't agree, convert the mode. */
4243 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4244 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4245 arg->value, arg->unsignedp);
4247 if (arg->pass_on_stack)
4248 stack_arg_under_construction--;
4251 /* Don't allow anything left on stack from computation
4252 of argument to alloca. */
4253 if (flags & ECF_MAY_BE_ALLOCA)
4254 do_pending_stack_adjust ();
4256 if (arg->value == arg->stack)
4258 /* If the value is already in the stack slot, we are done. */
4259 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
4261 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4262 XEXP (arg->stack, 0), Pmode,
4263 ARGS_SIZE_RTX (arg->size),
4264 TYPE_MODE (sizetype),
4265 GEN_INT (MEMORY_USE_RW),
4266 TYPE_MODE (integer_type_node));
4269 else if (arg->mode != BLKmode)
4271 register int size;
4273 /* Argument is a scalar, not entirely passed in registers.
4274 (If part is passed in registers, arg->partial says how much
4275 and emit_push_insn will take care of putting it there.)
4277 Push it, and if its size is less than the
4278 amount of space allocated to it,
4279 also bump stack pointer by the additional space.
4280 Note that in C the default argument promotions
4281 will prevent such mismatches. */
4283 size = GET_MODE_SIZE (arg->mode);
4284 /* Compute how much space the push instruction will push.
4285 On many machines, pushing a byte will advance the stack
4286 pointer by a halfword. */
4287 #ifdef PUSH_ROUNDING
4288 size = PUSH_ROUNDING (size);
4289 #endif
4290 used = size;
4292 /* Compute how much space the argument should get:
4293 round up to a multiple of the alignment for arguments. */
4294 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4295 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4296 / (PARM_BOUNDARY / BITS_PER_UNIT))
4297 * (PARM_BOUNDARY / BITS_PER_UNIT));
4299 /* This isn't already where we want it on the stack, so put it there.
4300 This can either be done with push or copy insns. */
4301 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
4302 partial, reg, used - size, argblock,
4303 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4304 ARGS_SIZE_RTX (arg->alignment_pad));
4306 else
4308 /* BLKmode, at least partly to be pushed. */
4310 register int excess;
4311 rtx size_rtx;
4313 /* Pushing a nonscalar.
4314 If part is passed in registers, PARTIAL says how much
4315 and emit_push_insn will take care of putting it there. */
4317 /* Round its size up to a multiple
4318 of the allocation unit for arguments. */
4320 if (arg->size.var != 0)
4322 excess = 0;
4323 size_rtx = ARGS_SIZE_RTX (arg->size);
4325 else
4327 /* PUSH_ROUNDING has no effect on us, because
4328 emit_push_insn for BLKmode is careful to avoid it. */
4329 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
4330 + partial * UNITS_PER_WORD);
4331 size_rtx = expr_size (pval);
4334 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4335 TYPE_ALIGN (TREE_TYPE (pval)), partial, reg, excess,
4336 argblock, ARGS_SIZE_RTX (arg->offset),
4337 reg_parm_stack_space,
4338 ARGS_SIZE_RTX (arg->alignment_pad));
4342 /* Unless this is a partially-in-register argument, the argument is now
4343 in the stack.
4345 ??? Note that this can change arg->value from arg->stack to
4346 arg->stack_slot and it matters when they are not the same.
4347 It isn't totally clear that this is correct in all cases. */
4348 if (partial == 0)
4349 arg->value = arg->stack_slot;
4351 /* Once we have pushed something, pops can't safely
4352 be deferred during the rest of the arguments. */
4353 NO_DEFER_POP;
4355 /* ANSI doesn't require a sequence point here,
4356 but PCC has one, so this will avoid some problems. */
4357 emit_queue ();
4359 /* Free any temporary slots made in processing this argument. Show
4360 that we might have taken the address of something and pushed that
4361 as an operand. */
4362 preserve_temp_slots (NULL_RTX);
4363 free_temp_slots ();
4364 pop_temp_slots ();