Add partial support for IA-64 unwind sections.
[official-gcc.git] / gcc / calls.c
blob12dea75584a5182328a825b9cd10771ecdf2fa3e
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "expr.h"
28 #include "function.h"
29 #include "regs.h"
30 #include "insn-flags.h"
31 #include "toplev.h"
32 #include "output.h"
33 #include "tm_p.h"
35 #ifndef ACCUMULATE_OUTGOING_ARGS
36 #define ACCUMULATE_OUTGOING_ARGS 0
37 #endif
39 /* Supply a default definition for PUSH_ARGS. */
40 #ifndef PUSH_ARGS
41 #ifdef PUSH_ROUNDING
42 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
43 #else
44 #define PUSH_ARGS 0
45 #endif
46 #endif
48 #if !defined FUNCTION_OK_FOR_SIBCALL
49 #define FUNCTION_OK_FOR_SIBCALL(DECL) 1
50 #endif
52 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
53 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
54 #endif
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
62 #ifdef PUSH_ROUNDING
64 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
65 #define PUSH_ARGS_REVERSED PUSH_ARGS
66 #endif
68 #endif
70 #ifndef PUSH_ARGS_REVERSED
71 #define PUSH_ARGS_REVERSED 0
72 #endif
74 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
75 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
77 /* Data structure and subroutines used within expand_call. */
79 struct arg_data
81 /* Tree node for this argument. */
82 tree tree_value;
83 /* Mode for value; TYPE_MODE unless promoted. */
84 enum machine_mode mode;
85 /* Current RTL value for argument, or 0 if it isn't precomputed. */
86 rtx value;
87 /* Initially-compute RTL value for argument; only for const functions. */
88 rtx initial_value;
89 /* Register to pass this argument in, 0 if passed on stack, or an
90 PARALLEL if the arg is to be copied into multiple non-contiguous
91 registers. */
92 rtx reg;
93 /* If REG was promoted from the actual mode of the argument expression,
94 indicates whether the promotion is sign- or zero-extended. */
95 int unsignedp;
96 /* Number of registers to use. 0 means put the whole arg in registers.
97 Also 0 if not passed in registers. */
98 int partial;
99 /* Non-zero if argument must be passed on stack.
100 Note that some arguments may be passed on the stack
101 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
102 pass_on_stack identifies arguments that *cannot* go in registers. */
103 int pass_on_stack;
104 /* Offset of this argument from beginning of stack-args. */
105 struct args_size offset;
106 /* Similar, but offset to the start of the stack slot. Different from
107 OFFSET if this arg pads downward. */
108 struct args_size slot_offset;
109 /* Size of this argument on the stack, rounded up for any padding it gets,
110 parts of the argument passed in registers do not count.
111 If REG_PARM_STACK_SPACE is defined, then register parms
112 are counted here as well. */
113 struct args_size size;
114 /* Location on the stack at which parameter should be stored. The store
115 has already been done if STACK == VALUE. */
116 rtx stack;
117 /* Location on the stack of the start of this argument slot. This can
118 differ from STACK if this arg pads downward. This location is known
119 to be aligned to FUNCTION_ARG_BOUNDARY. */
120 rtx stack_slot;
121 /* Place that this stack area has been saved, if needed. */
122 rtx save_area;
123 /* If an argument's alignment does not permit direct copying into registers,
124 copy in smaller-sized pieces into pseudos. These are stored in a
125 block pointed to by this field. The next field says how many
126 word-sized pseudos we made. */
127 rtx *aligned_regs;
128 int n_aligned_regs;
129 /* The amount that the stack pointer needs to be adjusted to
130 force alignment for the next argument. */
131 struct args_size alignment_pad;
134 /* A vector of one char per byte of stack space. A byte if non-zero if
135 the corresponding stack location has been used.
136 This vector is used to prevent a function call within an argument from
137 clobbering any stack already set up. */
138 static char *stack_usage_map;
140 /* Size of STACK_USAGE_MAP. */
141 static int highest_outgoing_arg_in_use;
143 /* stack_arg_under_construction is nonzero when an argument may be
144 initialized with a constructor call (including a C function that
145 returns a BLKmode struct) and expand_call must take special action
146 to make sure the object being constructed does not overlap the
147 argument list for the constructor call. */
148 int stack_arg_under_construction;
150 static int calls_function PARAMS ((tree, int));
151 static int calls_function_1 PARAMS ((tree, int));
153 /* Nonzero if this is a call to a `const' function. */
154 #define ECF_CONST 1
155 /* Nonzero if this is a call to a `volatile' function. */
156 #define ECF_NORETURN 2
157 /* Nonzero if this is a call to malloc or a related function. */
158 #define ECF_MALLOC 4
159 /* Nonzero if it is plausible that this is a call to alloca. */
160 #define ECF_MAY_BE_ALLOCA 8
161 /* Nonzero if this is a call to a function that won't throw an exception. */
162 #define ECF_NOTHROW 16
163 /* Nonzero if this is a call to setjmp or a related function. */
164 #define ECF_RETURNS_TWICE 32
165 /* Nonzero if this is a call to `longjmp'. */
166 #define ECF_LONGJMP 64
167 /* Nonzero if this is a syscall that makes a new process in the image of
168 the current one. */
169 #define ECF_FORK_OR_EXEC 128
170 #define ECF_SIBCALL 256
171 /* Nonzero if this is a call to "pure" function (like const function,
172 but may read memory. */
173 #define ECF_PURE 512
175 static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
176 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
177 rtx, int, rtx, int));
178 static void precompute_register_parameters PARAMS ((int,
179 struct arg_data *,
180 int *));
181 static void store_one_arg PARAMS ((struct arg_data *, rtx, int, int,
182 int));
183 static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
184 int));
185 static int finalize_must_preallocate PARAMS ((int, int,
186 struct arg_data *,
187 struct args_size *));
188 static void precompute_arguments PARAMS ((int, int,
189 struct arg_data *));
190 static int compute_argument_block_size PARAMS ((int,
191 struct args_size *,
192 int));
193 static void initialize_argument_information PARAMS ((int,
194 struct arg_data *,
195 struct args_size *,
196 int, tree, tree,
197 CUMULATIVE_ARGS *,
198 int, rtx *, int *,
199 int *, int *));
200 static void compute_argument_addresses PARAMS ((struct arg_data *,
201 rtx, int));
202 static rtx rtx_for_function_call PARAMS ((tree, tree));
203 static void load_register_parameters PARAMS ((struct arg_data *,
204 int, rtx *));
205 static int libfunc_nothrow PARAMS ((rtx));
206 static rtx emit_library_call_value_1 PARAMS ((int, rtx, rtx, int,
207 enum machine_mode,
208 int, va_list));
209 static int special_function_p PARAMS ((tree, int));
210 static int flags_from_decl_or_type PARAMS ((tree));
211 static rtx try_to_integrate PARAMS ((tree, tree, rtx,
212 int, tree, rtx));
214 #ifdef REG_PARM_STACK_SPACE
215 static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
216 static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
217 #endif
219 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
220 `alloca'.
222 If WHICH is 0, return 1 if EXP contains a call to any function.
223 Actually, we only need return 1 if evaluating EXP would require pushing
224 arguments on the stack, but that is too difficult to compute, so we just
225 assume any function call might require the stack. */
227 static tree calls_function_save_exprs;
229 static int
230 calls_function (exp, which)
231 tree exp;
232 int which;
234 int val;
235 calls_function_save_exprs = 0;
236 val = calls_function_1 (exp, which);
237 calls_function_save_exprs = 0;
238 return val;
241 static int
242 calls_function_1 (exp, which)
243 tree exp;
244 int which;
246 register int i;
247 enum tree_code code = TREE_CODE (exp);
248 int type = TREE_CODE_CLASS (code);
249 int length = tree_code_length[(int) code];
251 /* If this code is language-specific, we don't know what it will do. */
252 if ((int) code >= NUM_TREE_CODES)
253 return 1;
255 /* Only expressions and references can contain calls. */
256 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
257 && type != 'b')
258 return 0;
260 switch (code)
262 case CALL_EXPR:
263 if (which == 0)
264 return 1;
265 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
266 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
267 == FUNCTION_DECL))
269 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
271 if ((DECL_BUILT_IN (fndecl)
272 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
273 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
274 || (DECL_SAVED_INSNS (fndecl)
275 && DECL_SAVED_INSNS (fndecl)->calls_alloca))
276 return 1;
279 /* Third operand is RTL. */
280 length = 2;
281 break;
283 case SAVE_EXPR:
284 if (SAVE_EXPR_RTL (exp) != 0)
285 return 0;
286 if (value_member (exp, calls_function_save_exprs))
287 return 0;
288 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
289 calls_function_save_exprs);
290 return (TREE_OPERAND (exp, 0) != 0
291 && calls_function_1 (TREE_OPERAND (exp, 0), which));
293 case BLOCK:
295 register tree local;
297 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
298 if (DECL_INITIAL (local) != 0
299 && calls_function_1 (DECL_INITIAL (local), which))
300 return 1;
303 register tree subblock;
305 for (subblock = BLOCK_SUBBLOCKS (exp);
306 subblock;
307 subblock = TREE_CHAIN (subblock))
308 if (calls_function_1 (subblock, which))
309 return 1;
311 return 0;
313 case METHOD_CALL_EXPR:
314 length = 3;
315 break;
317 case WITH_CLEANUP_EXPR:
318 length = 1;
319 break;
321 case RTL_EXPR:
322 return 0;
324 default:
325 break;
328 for (i = 0; i < length; i++)
329 if (TREE_OPERAND (exp, i) != 0
330 && calls_function_1 (TREE_OPERAND (exp, i), which))
331 return 1;
333 return 0;
336 /* Force FUNEXP into a form suitable for the address of a CALL,
337 and return that as an rtx. Also load the static chain register
338 if FNDECL is a nested function.
340 CALL_FUSAGE points to a variable holding the prospective
341 CALL_INSN_FUNCTION_USAGE information. */
344 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
345 rtx funexp;
346 tree fndecl;
347 rtx *call_fusage;
348 int reg_parm_seen;
350 rtx static_chain_value = 0;
352 funexp = protect_from_queue (funexp, 0);
354 if (fndecl != 0)
355 /* Get possible static chain value for nested function in C. */
356 static_chain_value = lookup_static_chain (fndecl);
358 /* Make a valid memory address and copy constants thru pseudo-regs,
359 but not for a constant address if -fno-function-cse. */
360 if (GET_CODE (funexp) != SYMBOL_REF)
361 /* If we are using registers for parameters, force the
362 function address into a register now. */
363 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
364 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
365 : memory_address (FUNCTION_MODE, funexp));
366 else
368 #ifndef NO_FUNCTION_CSE
369 if (optimize && ! flag_no_function_cse)
370 #ifdef NO_RECURSIVE_FUNCTION_CSE
371 if (fndecl != current_function_decl)
372 #endif
373 funexp = force_reg (Pmode, funexp);
374 #endif
377 if (static_chain_value != 0)
379 emit_move_insn (static_chain_rtx, static_chain_value);
381 if (GET_CODE (static_chain_rtx) == REG)
382 use_reg (call_fusage, static_chain_rtx);
385 return funexp;
388 /* Generate instructions to call function FUNEXP,
389 and optionally pop the results.
390 The CALL_INSN is the first insn generated.
392 FNDECL is the declaration node of the function. This is given to the
393 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
395 FUNTYPE is the data type of the function. This is given to the macro
396 RETURN_POPS_ARGS to determine whether this function pops its own args.
397 We used to allow an identifier for library functions, but that doesn't
398 work when the return type is an aggregate type and the calling convention
399 says that the pointer to this aggregate is to be popped by the callee.
401 STACK_SIZE is the number of bytes of arguments on the stack,
402 ROUNDED_STACK_SIZE is that number rounded up to
403 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
404 both to put into the call insn and to generate explicit popping
405 code if necessary.
407 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
408 It is zero if this call doesn't want a structure value.
410 NEXT_ARG_REG is the rtx that results from executing
411 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
412 just after all the args have had their registers assigned.
413 This could be whatever you like, but normally it is the first
414 arg-register beyond those used for args in this call,
415 or 0 if all the arg-registers are used in this call.
416 It is passed on to `gen_call' so you can put this info in the call insn.
418 VALREG is a hard register in which a value is returned,
419 or 0 if the call does not return a value.
421 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
422 the args to this call were processed.
423 We restore `inhibit_defer_pop' to that value.
425 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
426 denote registers used by the called function. */
428 static void
429 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
430 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
431 call_fusage, ecf_flags)
432 rtx funexp;
433 tree fndecl ATTRIBUTE_UNUSED;
434 tree funtype ATTRIBUTE_UNUSED;
435 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
436 HOST_WIDE_INT rounded_stack_size;
437 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
438 rtx next_arg_reg;
439 rtx valreg;
440 int old_inhibit_defer_pop;
441 rtx call_fusage;
442 int ecf_flags;
444 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
445 #if defined (HAVE_call) && defined (HAVE_call_value)
446 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
447 #endif
448 rtx call_insn;
449 int already_popped = 0;
450 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
452 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
453 and we don't want to load it into a register as an optimization,
454 because prepare_call_address already did it if it should be done. */
455 if (GET_CODE (funexp) != SYMBOL_REF)
456 funexp = memory_address (FUNCTION_MODE, funexp);
458 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
459 if ((ecf_flags & ECF_SIBCALL)
460 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
461 && (RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0
462 || stack_size == 0))
464 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (fndecl, funtype, stack_size));
465 rtx pat;
467 /* If this subroutine pops its own args, record that in the call insn
468 if possible, for the sake of frame pointer elimination. */
470 if (valreg)
471 pat = gen_sibcall_value_pop (valreg,
472 gen_rtx_MEM (FUNCTION_MODE, funexp),
473 rounded_stack_size_rtx, next_arg_reg,
474 n_pop);
475 else
476 pat = gen_sibcall_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
477 rounded_stack_size_rtx, next_arg_reg, n_pop);
479 emit_call_insn (pat);
480 already_popped = 1;
482 else
483 #endif
485 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
486 /* If the target has "call" or "call_value" insns, then prefer them
487 if no arguments are actually popped. If the target does not have
488 "call" or "call_value" insns, then we must use the popping versions
489 even if the call has no arguments to pop. */
490 #if defined (HAVE_call) && defined (HAVE_call_value)
491 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
492 && n_popped > 0)
493 #else
494 if (HAVE_call_pop && HAVE_call_value_pop)
495 #endif
497 rtx n_pop = GEN_INT (n_popped);
498 rtx pat;
500 /* If this subroutine pops its own args, record that in the call insn
501 if possible, for the sake of frame pointer elimination. */
503 if (valreg)
504 pat = gen_call_value_pop (valreg,
505 gen_rtx_MEM (FUNCTION_MODE, funexp),
506 rounded_stack_size_rtx, next_arg_reg, n_pop);
507 else
508 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
509 rounded_stack_size_rtx, next_arg_reg, n_pop);
511 emit_call_insn (pat);
512 already_popped = 1;
514 else
515 #endif
517 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
518 if ((ecf_flags & ECF_SIBCALL)
519 && HAVE_sibcall && HAVE_sibcall_value)
521 if (valreg)
522 emit_call_insn (gen_sibcall_value (valreg,
523 gen_rtx_MEM (FUNCTION_MODE, funexp),
524 rounded_stack_size_rtx,
525 next_arg_reg, NULL_RTX));
526 else
527 emit_call_insn (gen_sibcall (gen_rtx_MEM (FUNCTION_MODE, funexp),
528 rounded_stack_size_rtx, next_arg_reg,
529 struct_value_size_rtx));
531 else
532 #endif
534 #if defined (HAVE_call) && defined (HAVE_call_value)
535 if (HAVE_call && HAVE_call_value)
537 if (valreg)
538 emit_call_insn (gen_call_value (valreg,
539 gen_rtx_MEM (FUNCTION_MODE, funexp),
540 rounded_stack_size_rtx, next_arg_reg,
541 NULL_RTX));
542 else
543 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
544 rounded_stack_size_rtx, next_arg_reg,
545 struct_value_size_rtx));
547 else
548 #endif
549 abort ();
551 /* Find the CALL insn we just emitted. */
552 for (call_insn = get_last_insn ();
553 call_insn && GET_CODE (call_insn) != CALL_INSN;
554 call_insn = PREV_INSN (call_insn))
557 if (! call_insn)
558 abort ();
560 /* Mark memory as used for "pure" function call. */
561 if (ecf_flags & ECF_PURE)
563 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
564 gen_rtx_USE (VOIDmode,
565 gen_rtx_MEM (BLKmode,
566 gen_rtx_SCRATCH (VOIDmode))), call_fusage);
569 /* Put the register usage information on the CALL. If there is already
570 some usage information, put ours at the end. */
571 if (CALL_INSN_FUNCTION_USAGE (call_insn))
573 rtx link;
575 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
576 link = XEXP (link, 1))
579 XEXP (link, 1) = call_fusage;
581 else
582 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
584 /* If this is a const call, then set the insn's unchanging bit. */
585 if (ecf_flags & (ECF_CONST | ECF_PURE))
586 CONST_CALL_P (call_insn) = 1;
588 /* If this call can't throw, attach a REG_EH_REGION reg note to that
589 effect. */
590 if (ecf_flags & ECF_NOTHROW)
591 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
592 REG_NOTES (call_insn));
594 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
596 /* Restore this now, so that we do defer pops for this call's args
597 if the context of the call as a whole permits. */
598 inhibit_defer_pop = old_inhibit_defer_pop;
600 if (n_popped > 0)
602 if (!already_popped)
603 CALL_INSN_FUNCTION_USAGE (call_insn)
604 = gen_rtx_EXPR_LIST (VOIDmode,
605 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
606 CALL_INSN_FUNCTION_USAGE (call_insn));
607 rounded_stack_size -= n_popped;
608 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
609 stack_pointer_delta -= n_popped;
612 if (!ACCUMULATE_OUTGOING_ARGS)
614 /* If returning from the subroutine does not automatically pop the args,
615 we need an instruction to pop them sooner or later.
616 Perhaps do it now; perhaps just record how much space to pop later.
618 If returning from the subroutine does pop the args, indicate that the
619 stack pointer will be changed. */
621 if (rounded_stack_size != 0)
623 if (flag_defer_pop && inhibit_defer_pop == 0
624 && !(ecf_flags & (ECF_CONST | ECF_PURE)))
625 pending_stack_adjust += rounded_stack_size;
626 else
627 adjust_stack (rounded_stack_size_rtx);
630 /* When we accumulate outgoing args, we must avoid any stack manipulations.
631 Restore the stack pointer to its original value now. Usually
632 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
633 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
634 popping variants of functions exist as well.
636 ??? We may optimize similar to defer_pop above, but it is
637 probably not worthwhile.
639 ??? It will be worthwhile to enable combine_stack_adjustments even for
640 such machines. */
641 else if (n_popped)
642 anti_adjust_stack (GEN_INT (n_popped));
645 /* Determine if the function identified by NAME and FNDECL is one with
646 special properties we wish to know about.
648 For example, if the function might return more than one time (setjmp), then
649 set RETURNS_TWICE to a nonzero value.
651 Similarly set LONGJMP for if the function is in the longjmp family.
653 Set MALLOC for any of the standard memory allocation functions which
654 allocate from the heap.
656 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
657 space from the stack such as alloca. */
659 static int
660 special_function_p (fndecl, flags)
661 tree fndecl;
662 int flags;
664 if (! (flags & ECF_MALLOC)
665 && fndecl && DECL_NAME (fndecl)
666 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
667 /* Exclude functions not at the file scope, or not `extern',
668 since they are not the magic functions we would otherwise
669 think they are. */
670 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
672 char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
673 char *tname = name;
675 /* We assume that alloca will always be called by name. It
676 makes no sense to pass it as a pointer-to-function to
677 anything that does not understand its behavior. */
678 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
679 && name[0] == 'a'
680 && ! strcmp (name, "alloca"))
681 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
682 && name[0] == '_'
683 && ! strcmp (name, "__builtin_alloca"))))
684 flags |= ECF_MAY_BE_ALLOCA;
686 /* Disregard prefix _, __ or __x. */
687 if (name[0] == '_')
689 if (name[1] == '_' && name[2] == 'x')
690 tname += 3;
691 else if (name[1] == '_')
692 tname += 2;
693 else
694 tname += 1;
697 if (tname[0] == 's')
699 if ((tname[1] == 'e'
700 && (! strcmp (tname, "setjmp")
701 || ! strcmp (tname, "setjmp_syscall")))
702 || (tname[1] == 'i'
703 && ! strcmp (tname, "sigsetjmp"))
704 || (tname[1] == 'a'
705 && ! strcmp (tname, "savectx")))
706 flags |= ECF_RETURNS_TWICE;
708 if (tname[1] == 'i'
709 && ! strcmp (tname, "siglongjmp"))
710 flags |= ECF_LONGJMP;
712 else if ((tname[0] == 'q' && tname[1] == 's'
713 && ! strcmp (tname, "qsetjmp"))
714 || (tname[0] == 'v' && tname[1] == 'f'
715 && ! strcmp (tname, "vfork")))
716 flags |= ECF_RETURNS_TWICE;
718 else if (tname[0] == 'l' && tname[1] == 'o'
719 && ! strcmp (tname, "longjmp"))
720 flags |= ECF_LONGJMP;
722 else if ((tname[0] == 'f' && tname[1] == 'o'
723 && ! strcmp (tname, "fork"))
724 /* Linux specific: __clone. check NAME to insist on the
725 leading underscores, to avoid polluting the ISO / POSIX
726 namespace. */
727 || (name[0] == '_' && name[1] == '_'
728 && ! strcmp (tname, "clone"))
729 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
730 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
731 && (tname[5] == '\0'
732 || ((tname[5] == 'p' || tname[5] == 'e')
733 && tname[6] == '\0'))))
734 flags |= ECF_FORK_OR_EXEC;
736 /* Do not add any more malloc-like functions to this list,
737 instead mark them as malloc functions using the malloc attribute.
738 Note, realloc is not suitable for attribute malloc since
739 it may return the same address across multiple calls.
740 C++ operator new is not suitable because it is not required
741 to return a unique pointer; indeed, the standard placement new
742 just returns its argument. */
743 else if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == Pmode
744 && (! strcmp (tname, "malloc")
745 || ! strcmp (tname, "calloc")
746 || ! strcmp (tname, "strdup")))
747 flags |= ECF_MALLOC;
749 return flags;
752 /* Return nonzero when tree represent call to longjmp. */
754 setjmp_call_p (fndecl)
755 tree fndecl;
757 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
760 /* Detect flags (function attributes) from the function type node. */
761 static int
762 flags_from_decl_or_type (exp)
763 tree exp;
765 int flags = 0;
766 /* ??? We can't set IS_MALLOC for function types? */
767 if (DECL_P (exp))
769 /* The function exp may have the `malloc' attribute. */
770 if (DECL_P (exp) && DECL_IS_MALLOC (exp))
771 flags |= ECF_MALLOC;
773 /* The function exp may have the `pure' attribute. */
774 if (DECL_P (exp) && DECL_IS_PURE (exp))
775 flags |= ECF_PURE;
777 if (TREE_NOTHROW (exp))
778 flags |= ECF_NOTHROW;
781 if (TREE_READONLY (exp) && !TREE_THIS_VOLATILE (exp))
782 flags |= ECF_CONST;
784 if (TREE_THIS_VOLATILE (exp))
785 flags |= ECF_NORETURN;
787 return flags;
791 /* Precompute all register parameters as described by ARGS, storing values
792 into fields within the ARGS array.
794 NUM_ACTUALS indicates the total number elements in the ARGS array.
796 Set REG_PARM_SEEN if we encounter a register parameter. */
798 static void
799 precompute_register_parameters (num_actuals, args, reg_parm_seen)
800 int num_actuals;
801 struct arg_data *args;
802 int *reg_parm_seen;
804 int i;
806 *reg_parm_seen = 0;
808 for (i = 0; i < num_actuals; i++)
809 if (args[i].reg != 0 && ! args[i].pass_on_stack)
811 *reg_parm_seen = 1;
813 if (args[i].value == 0)
815 push_temp_slots ();
816 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
817 VOIDmode, 0);
818 preserve_temp_slots (args[i].value);
819 pop_temp_slots ();
821 /* ANSI doesn't require a sequence point here,
822 but PCC has one, so this will avoid some problems. */
823 emit_queue ();
826 /* If we are to promote the function arg to a wider mode,
827 do it now. */
829 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
830 args[i].value
831 = convert_modes (args[i].mode,
832 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
833 args[i].value, args[i].unsignedp);
835 /* If the value is expensive, and we are inside an appropriately
836 short loop, put the value into a pseudo and then put the pseudo
837 into the hard reg.
839 For small register classes, also do this if this call uses
840 register parameters. This is to avoid reload conflicts while
841 loading the parameters registers. */
843 if ((! (GET_CODE (args[i].value) == REG
844 || (GET_CODE (args[i].value) == SUBREG
845 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
846 && args[i].mode != BLKmode
847 && rtx_cost (args[i].value, SET) > 2
848 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
849 || preserve_subexpressions_p ()))
850 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
854 #ifdef REG_PARM_STACK_SPACE
856 /* The argument list is the property of the called routine and it
857 may clobber it. If the fixed area has been used for previous
858 parameters, we must save and restore it. */
859 static rtx
860 save_fixed_argument_area (reg_parm_stack_space, argblock,
861 low_to_save, high_to_save)
862 int reg_parm_stack_space;
863 rtx argblock;
864 int *low_to_save;
865 int *high_to_save;
867 int i;
868 rtx save_area = NULL_RTX;
870 /* Compute the boundary of the that needs to be saved, if any. */
871 #ifdef ARGS_GROW_DOWNWARD
872 for (i = 0; i < reg_parm_stack_space + 1; i++)
873 #else
874 for (i = 0; i < reg_parm_stack_space; i++)
875 #endif
877 if (i >= highest_outgoing_arg_in_use
878 || stack_usage_map[i] == 0)
879 continue;
881 if (*low_to_save == -1)
882 *low_to_save = i;
884 *high_to_save = i;
887 if (*low_to_save >= 0)
889 int num_to_save = *high_to_save - *low_to_save + 1;
890 enum machine_mode save_mode
891 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
892 rtx stack_area;
894 /* If we don't have the required alignment, must do this in BLKmode. */
895 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
896 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
897 save_mode = BLKmode;
899 #ifdef ARGS_GROW_DOWNWARD
900 stack_area = gen_rtx_MEM (save_mode,
901 memory_address (save_mode,
902 plus_constant (argblock,
903 - *high_to_save)));
904 #else
905 stack_area = gen_rtx_MEM (save_mode,
906 memory_address (save_mode,
907 plus_constant (argblock,
908 *low_to_save)));
909 #endif
910 if (save_mode == BLKmode)
912 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
913 /* Cannot use emit_block_move here because it can be done by a
914 library call which in turn gets into this place again and deadly
915 infinite recursion happens. */
916 move_by_pieces (validize_mem (save_area), stack_area, num_to_save,
917 PARM_BOUNDARY);
919 else
921 save_area = gen_reg_rtx (save_mode);
922 emit_move_insn (save_area, stack_area);
925 return save_area;
928 static void
929 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
930 rtx save_area;
931 rtx argblock;
932 int high_to_save;
933 int low_to_save;
935 enum machine_mode save_mode = GET_MODE (save_area);
936 #ifdef ARGS_GROW_DOWNWARD
937 rtx stack_area
938 = gen_rtx_MEM (save_mode,
939 memory_address (save_mode,
940 plus_constant (argblock,
941 - high_to_save)));
942 #else
943 rtx stack_area
944 = gen_rtx_MEM (save_mode,
945 memory_address (save_mode,
946 plus_constant (argblock,
947 low_to_save)));
948 #endif
950 if (save_mode != BLKmode)
951 emit_move_insn (stack_area, save_area);
952 else
953 /* Cannot use emit_block_move here because it can be done by a library
954 call which in turn gets into this place again and deadly infinite
955 recursion happens. */
956 move_by_pieces (stack_area, validize_mem (save_area),
957 high_to_save - low_to_save + 1, PARM_BOUNDARY);
959 #endif
961 /* If any elements in ARGS refer to parameters that are to be passed in
962 registers, but not in memory, and whose alignment does not permit a
963 direct copy into registers. Copy the values into a group of pseudos
964 which we will later copy into the appropriate hard registers.
966 Pseudos for each unaligned argument will be stored into the array
967 args[argnum].aligned_regs. The caller is responsible for deallocating
968 the aligned_regs array if it is nonzero. */
970 static void
971 store_unaligned_arguments_into_pseudos (args, num_actuals)
972 struct arg_data *args;
973 int num_actuals;
975 int i, j;
977 for (i = 0; i < num_actuals; i++)
978 if (args[i].reg != 0 && ! args[i].pass_on_stack
979 && args[i].mode == BLKmode
980 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
981 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
983 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
984 int big_endian_correction = 0;
986 args[i].n_aligned_regs
987 = args[i].partial ? args[i].partial
988 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
990 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
991 * args[i].n_aligned_regs);
993 /* Structures smaller than a word are aligned to the least
994 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
995 this means we must skip the empty high order bytes when
996 calculating the bit offset. */
997 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
998 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
1000 for (j = 0; j < args[i].n_aligned_regs; j++)
1002 rtx reg = gen_reg_rtx (word_mode);
1003 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1004 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1005 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
1007 args[i].aligned_regs[j] = reg;
1009 /* There is no need to restrict this code to loading items
1010 in TYPE_ALIGN sized hunks. The bitfield instructions can
1011 load up entire word sized registers efficiently.
1013 ??? This may not be needed anymore.
1014 We use to emit a clobber here but that doesn't let later
1015 passes optimize the instructions we emit. By storing 0 into
1016 the register later passes know the first AND to zero out the
1017 bitfield being set in the register is unnecessary. The store
1018 of 0 will be deleted as will at least the first AND. */
1020 emit_move_insn (reg, const0_rtx);
1022 bytes -= bitsize / BITS_PER_UNIT;
1023 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
1024 extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1025 word_mode, word_mode, bitalign,
1026 BITS_PER_WORD),
1027 bitalign, BITS_PER_WORD);
1032 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1033 ACTPARMS.
1035 NUM_ACTUALS is the total number of parameters.
1037 N_NAMED_ARGS is the total number of named arguments.
1039 FNDECL is the tree code for the target of this call (if known)
1041 ARGS_SO_FAR holds state needed by the target to know where to place
1042 the next argument.
1044 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1045 for arguments which are passed in registers.
1047 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1048 and may be modified by this routine.
1050 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1051 flags which may may be modified by this routine. */
1053 static void
1054 initialize_argument_information (num_actuals, args, args_size, n_named_args,
1055 actparms, fndecl, args_so_far,
1056 reg_parm_stack_space, old_stack_level,
1057 old_pending_adj, must_preallocate,
1058 ecf_flags)
1059 int num_actuals ATTRIBUTE_UNUSED;
1060 struct arg_data *args;
1061 struct args_size *args_size;
1062 int n_named_args ATTRIBUTE_UNUSED;
1063 tree actparms;
1064 tree fndecl;
1065 CUMULATIVE_ARGS *args_so_far;
1066 int reg_parm_stack_space;
1067 rtx *old_stack_level;
1068 int *old_pending_adj;
1069 int *must_preallocate;
1070 int *ecf_flags;
1072 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1073 int inc;
1075 /* Count arg position in order args appear. */
1076 int argpos;
1078 struct args_size alignment_pad;
1079 int i;
1080 tree p;
1082 args_size->constant = 0;
1083 args_size->var = 0;
1085 /* In this loop, we consider args in the order they are written.
1086 We fill up ARGS from the front or from the back if necessary
1087 so that in any case the first arg to be pushed ends up at the front. */
1089 if (PUSH_ARGS_REVERSED)
1091 i = num_actuals - 1, inc = -1;
1092 /* In this case, must reverse order of args
1093 so that we compute and push the last arg first. */
1095 else
1097 i = 0, inc = 1;
1100 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1101 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1103 tree type = TREE_TYPE (TREE_VALUE (p));
1104 int unsignedp;
1105 enum machine_mode mode;
1107 args[i].tree_value = TREE_VALUE (p);
1109 /* Replace erroneous argument with constant zero. */
1110 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1111 args[i].tree_value = integer_zero_node, type = integer_type_node;
1113 /* If TYPE is a transparent union, pass things the way we would
1114 pass the first field of the union. We have already verified that
1115 the modes are the same. */
1116 if (TYPE_TRANSPARENT_UNION (type))
1117 type = TREE_TYPE (TYPE_FIELDS (type));
1119 /* Decide where to pass this arg.
1121 args[i].reg is nonzero if all or part is passed in registers.
1123 args[i].partial is nonzero if part but not all is passed in registers,
1124 and the exact value says how many words are passed in registers.
1126 args[i].pass_on_stack is nonzero if the argument must at least be
1127 computed on the stack. It may then be loaded back into registers
1128 if args[i].reg is nonzero.
1130 These decisions are driven by the FUNCTION_... macros and must agree
1131 with those made by function.c. */
1133 /* See if this argument should be passed by invisible reference. */
1134 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1135 && contains_placeholder_p (TYPE_SIZE (type)))
1136 || TREE_ADDRESSABLE (type)
1137 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1138 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
1139 type, argpos < n_named_args)
1140 #endif
1143 /* If we're compiling a thunk, pass through invisible
1144 references instead of making a copy. */
1145 if (current_function_is_thunk
1146 #ifdef FUNCTION_ARG_CALLEE_COPIES
1147 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
1148 type, argpos < n_named_args)
1149 /* If it's in a register, we must make a copy of it too. */
1150 /* ??? Is this a sufficient test? Is there a better one? */
1151 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1152 && REG_P (DECL_RTL (args[i].tree_value)))
1153 && ! TREE_ADDRESSABLE (type))
1154 #endif
1157 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1158 new object from the argument. If we are passing by
1159 invisible reference, the callee will do that for us, so we
1160 can strip off the TARGET_EXPR. This is not always safe,
1161 but it is safe in the only case where this is a useful
1162 optimization; namely, when the argument is a plain object.
1163 In that case, the frontend is just asking the backend to
1164 make a bitwise copy of the argument. */
1166 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1167 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
1168 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1169 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1171 args[i].tree_value = build1 (ADDR_EXPR,
1172 build_pointer_type (type),
1173 args[i].tree_value);
1174 type = build_pointer_type (type);
1176 else
1178 /* We make a copy of the object and pass the address to the
1179 function being called. */
1180 rtx copy;
1182 if (!COMPLETE_TYPE_P (type)
1183 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1184 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1185 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1186 STACK_CHECK_MAX_VAR_SIZE))))
1188 /* This is a variable-sized object. Make space on the stack
1189 for it. */
1190 rtx size_rtx = expr_size (TREE_VALUE (p));
1192 if (*old_stack_level == 0)
1194 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1195 *old_pending_adj = pending_stack_adjust;
1196 pending_stack_adjust = 0;
1199 copy = gen_rtx_MEM (BLKmode,
1200 allocate_dynamic_stack_space (size_rtx,
1201 NULL_RTX,
1202 TYPE_ALIGN (type)));
1204 else
1206 int size = int_size_in_bytes (type);
1207 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1210 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
1212 store_expr (args[i].tree_value, copy, 0);
1213 *ecf_flags &= ~(ECF_CONST | ECF_PURE);
1215 args[i].tree_value = build1 (ADDR_EXPR,
1216 build_pointer_type (type),
1217 make_tree (type, copy));
1218 type = build_pointer_type (type);
1222 mode = TYPE_MODE (type);
1223 unsignedp = TREE_UNSIGNED (type);
1225 #ifdef PROMOTE_FUNCTION_ARGS
1226 mode = promote_mode (type, mode, &unsignedp, 1);
1227 #endif
1229 args[i].unsignedp = unsignedp;
1230 args[i].mode = mode;
1232 #ifdef FUNCTION_INCOMING_ARG
1233 /* If this is a sibling call and the machine has register windows, the
1234 register window has to be unwinded before calling the routine, so
1235 arguments have to go into the incoming registers. */
1236 if (*ecf_flags & ECF_SIBCALL)
1237 args[i].reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1238 argpos < n_named_args);
1239 else
1240 #endif
1241 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1242 argpos < n_named_args);
1244 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1245 if (args[i].reg)
1246 args[i].partial
1247 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1248 argpos < n_named_args);
1249 #endif
1251 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1253 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1254 it means that we are to pass this arg in the register(s) designated
1255 by the PARALLEL, but also to pass it in the stack. */
1256 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1257 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1258 args[i].pass_on_stack = 1;
1260 /* If this is an addressable type, we must preallocate the stack
1261 since we must evaluate the object into its final location.
1263 If this is to be passed in both registers and the stack, it is simpler
1264 to preallocate. */
1265 if (TREE_ADDRESSABLE (type)
1266 || (args[i].pass_on_stack && args[i].reg != 0))
1267 *must_preallocate = 1;
1269 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1270 we cannot consider this function call constant. */
1271 if (TREE_ADDRESSABLE (type))
1272 *ecf_flags &= ~(ECF_CONST | ECF_PURE);
1274 /* Compute the stack-size of this argument. */
1275 if (args[i].reg == 0 || args[i].partial != 0
1276 || reg_parm_stack_space > 0
1277 || args[i].pass_on_stack)
1278 locate_and_pad_parm (mode, type,
1279 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1281 #else
1282 args[i].reg != 0,
1283 #endif
1284 fndecl, args_size, &args[i].offset,
1285 &args[i].size, &alignment_pad);
1287 #ifndef ARGS_GROW_DOWNWARD
1288 args[i].slot_offset = *args_size;
1289 #endif
1291 args[i].alignment_pad = alignment_pad;
1293 /* If a part of the arg was put into registers,
1294 don't include that part in the amount pushed. */
1295 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1296 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1297 / (PARM_BOUNDARY / BITS_PER_UNIT)
1298 * (PARM_BOUNDARY / BITS_PER_UNIT));
1300 /* Update ARGS_SIZE, the total stack space for args so far. */
1302 args_size->constant += args[i].size.constant;
1303 if (args[i].size.var)
1305 ADD_PARM_SIZE (*args_size, args[i].size.var);
1308 /* Since the slot offset points to the bottom of the slot,
1309 we must record it after incrementing if the args grow down. */
1310 #ifdef ARGS_GROW_DOWNWARD
1311 args[i].slot_offset = *args_size;
1313 args[i].slot_offset.constant = -args_size->constant;
1314 if (args_size->var)
1315 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1316 #endif
1318 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1319 have been used, etc. */
1321 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1322 argpos < n_named_args);
1326 /* Update ARGS_SIZE to contain the total size for the argument block.
1327 Return the original constant component of the argument block's size.
1329 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1330 for arguments passed in registers. */
1332 static int
1333 compute_argument_block_size (reg_parm_stack_space, args_size,
1334 preferred_stack_boundary)
1335 int reg_parm_stack_space;
1336 struct args_size *args_size;
1337 int preferred_stack_boundary ATTRIBUTE_UNUSED;
1339 int unadjusted_args_size = args_size->constant;
1341 /* For accumulate outgoing args mode we don't need to align, since the frame
1342 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1343 backends from generating missaligned frame sizes. */
1344 #ifdef STACK_BOUNDARY
1345 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1346 preferred_stack_boundary = STACK_BOUNDARY;
1347 #endif
1349 /* Compute the actual size of the argument block required. The variable
1350 and constant sizes must be combined, the size may have to be rounded,
1351 and there may be a minimum required size. */
1353 if (args_size->var)
1355 args_size->var = ARGS_SIZE_TREE (*args_size);
1356 args_size->constant = 0;
1358 #ifdef PREFERRED_STACK_BOUNDARY
1359 preferred_stack_boundary /= BITS_PER_UNIT;
1360 if (preferred_stack_boundary > 1)
1362 /* We don't handle this case yet. To handle it correctly we have
1363 to add the delta, round and substract the delta.
1364 Currently no machine description requires this support. */
1365 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1366 abort();
1367 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1369 #endif
1371 if (reg_parm_stack_space > 0)
1373 args_size->var
1374 = size_binop (MAX_EXPR, args_size->var,
1375 ssize_int (reg_parm_stack_space));
1377 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1378 /* The area corresponding to register parameters is not to count in
1379 the size of the block we need. So make the adjustment. */
1380 args_size->var
1381 = size_binop (MINUS_EXPR, args_size->var,
1382 ssize_int (reg_parm_stack_space));
1383 #endif
1386 else
1388 #ifdef PREFERRED_STACK_BOUNDARY
1389 preferred_stack_boundary /= BITS_PER_UNIT;
1390 if (preferred_stack_boundary < 1)
1391 preferred_stack_boundary = 1;
1392 args_size->constant = (((args_size->constant
1393 + stack_pointer_delta
1394 + preferred_stack_boundary - 1)
1395 / preferred_stack_boundary
1396 * preferred_stack_boundary)
1397 - stack_pointer_delta);
1398 #endif
1400 args_size->constant = MAX (args_size->constant,
1401 reg_parm_stack_space);
1403 #ifdef MAYBE_REG_PARM_STACK_SPACE
1404 if (reg_parm_stack_space == 0)
1405 args_size->constant = 0;
1406 #endif
1408 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1409 args_size->constant -= reg_parm_stack_space;
1410 #endif
1412 return unadjusted_args_size;
1415 /* Precompute parameters as needed for a function call.
1417 FLAGS is mask of ECF_* constants.
1419 NUM_ACTUALS is the number of arguments.
1421 ARGS is an array containing information for each argument; this routine
1422 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1425 static void
1426 precompute_arguments (flags, num_actuals, args)
1427 int flags;
1428 int num_actuals;
1429 struct arg_data *args;
1431 int i;
1433 /* If this function call is cse'able, precompute all the parameters.
1434 Note that if the parameter is constructed into a temporary, this will
1435 cause an additional copy because the parameter will be constructed
1436 into a temporary location and then copied into the outgoing arguments.
1437 If a parameter contains a call to alloca and this function uses the
1438 stack, precompute the parameter. */
1440 /* If we preallocated the stack space, and some arguments must be passed
1441 on the stack, then we must precompute any parameter which contains a
1442 function call which will store arguments on the stack.
1443 Otherwise, evaluating the parameter may clobber previous parameters
1444 which have already been stored into the stack. (we have code to avoid
1445 such case by saving the ougoing stack arguments, but it results in
1446 worse code) */
1448 for (i = 0; i < num_actuals; i++)
1449 if ((flags & (ECF_CONST | ECF_PURE))
1450 || calls_function (args[i].tree_value, !ACCUMULATE_OUTGOING_ARGS))
1452 /* If this is an addressable type, we cannot pre-evaluate it. */
1453 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1454 abort ();
1456 push_temp_slots ();
1458 args[i].value
1459 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1461 preserve_temp_slots (args[i].value);
1462 pop_temp_slots ();
1464 /* ANSI doesn't require a sequence point here,
1465 but PCC has one, so this will avoid some problems. */
1466 emit_queue ();
1468 args[i].initial_value = args[i].value
1469 = protect_from_queue (args[i].value, 0);
1471 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1473 args[i].value
1474 = convert_modes (args[i].mode,
1475 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1476 args[i].value, args[i].unsignedp);
1477 #ifdef PROMOTE_FOR_CALL_ONLY
1478 /* CSE will replace this only if it contains args[i].value
1479 pseudo, so convert it down to the declared mode using
1480 a SUBREG. */
1481 if (GET_CODE (args[i].value) == REG
1482 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1484 args[i].initial_value
1485 = gen_rtx_SUBREG (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1486 args[i].value, 0);
1487 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1488 SUBREG_PROMOTED_UNSIGNED_P (args[i].initial_value)
1489 = args[i].unsignedp;
1491 #endif
1496 /* Given the current state of MUST_PREALLOCATE and information about
1497 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1498 compute and return the final value for MUST_PREALLOCATE. */
1500 static int
1501 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1502 int must_preallocate;
1503 int num_actuals;
1504 struct arg_data *args;
1505 struct args_size *args_size;
1507 /* See if we have or want to preallocate stack space.
1509 If we would have to push a partially-in-regs parm
1510 before other stack parms, preallocate stack space instead.
1512 If the size of some parm is not a multiple of the required stack
1513 alignment, we must preallocate.
1515 If the total size of arguments that would otherwise create a copy in
1516 a temporary (such as a CALL) is more than half the total argument list
1517 size, preallocation is faster.
1519 Another reason to preallocate is if we have a machine (like the m88k)
1520 where stack alignment is required to be maintained between every
1521 pair of insns, not just when the call is made. However, we assume here
1522 that such machines either do not have push insns (and hence preallocation
1523 would occur anyway) or the problem is taken care of with
1524 PUSH_ROUNDING. */
1526 if (! must_preallocate)
1528 int partial_seen = 0;
1529 int copy_to_evaluate_size = 0;
1530 int i;
1532 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1534 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1535 partial_seen = 1;
1536 else if (partial_seen && args[i].reg == 0)
1537 must_preallocate = 1;
1539 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1540 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1541 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1542 || TREE_CODE (args[i].tree_value) == COND_EXPR
1543 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1544 copy_to_evaluate_size
1545 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1548 if (copy_to_evaluate_size * 2 >= args_size->constant
1549 && args_size->constant > 0)
1550 must_preallocate = 1;
1552 return must_preallocate;
1555 /* If we preallocated stack space, compute the address of each argument
1556 and store it into the ARGS array.
1558 We need not ensure it is a valid memory address here; it will be
1559 validized when it is used.
1561 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1563 static void
1564 compute_argument_addresses (args, argblock, num_actuals)
1565 struct arg_data *args;
1566 rtx argblock;
1567 int num_actuals;
1569 if (argblock)
1571 rtx arg_reg = argblock;
1572 int i, arg_offset = 0;
1574 if (GET_CODE (argblock) == PLUS)
1575 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1577 for (i = 0; i < num_actuals; i++)
1579 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1580 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1581 rtx addr;
1583 /* Skip this parm if it will not be passed on the stack. */
1584 if (! args[i].pass_on_stack && args[i].reg != 0)
1585 continue;
1587 if (GET_CODE (offset) == CONST_INT)
1588 addr = plus_constant (arg_reg, INTVAL (offset));
1589 else
1590 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1592 addr = plus_constant (addr, arg_offset);
1593 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1594 MEM_SET_IN_STRUCT_P
1595 (args[i].stack,
1596 AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)));
1598 if (GET_CODE (slot_offset) == CONST_INT)
1599 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1600 else
1601 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1603 addr = plus_constant (addr, arg_offset);
1604 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1609 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1610 in a call instruction.
1612 FNDECL is the tree node for the target function. For an indirect call
1613 FNDECL will be NULL_TREE.
1615 EXP is the CALL_EXPR for this call. */
1617 static rtx
1618 rtx_for_function_call (fndecl, exp)
1619 tree fndecl;
1620 tree exp;
1622 rtx funexp;
1624 /* Get the function to call, in the form of RTL. */
1625 if (fndecl)
1627 /* If this is the first use of the function, see if we need to
1628 make an external definition for it. */
1629 if (! TREE_USED (fndecl))
1631 assemble_external (fndecl);
1632 TREE_USED (fndecl) = 1;
1635 /* Get a SYMBOL_REF rtx for the function address. */
1636 funexp = XEXP (DECL_RTL (fndecl), 0);
1638 else
1639 /* Generate an rtx (probably a pseudo-register) for the address. */
1641 rtx funaddr;
1642 push_temp_slots ();
1643 funaddr = funexp =
1644 expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1645 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1647 /* Check the function is executable. */
1648 if (current_function_check_memory_usage)
1650 #ifdef POINTERS_EXTEND_UNSIGNED
1651 /* It might be OK to convert funexp in place, but there's
1652 a lot going on between here and when it happens naturally
1653 that this seems safer. */
1654 funaddr = convert_memory_address (Pmode, funexp);
1655 #endif
1656 emit_library_call (chkr_check_exec_libfunc, 1,
1657 VOIDmode, 1,
1658 funaddr, Pmode);
1660 emit_queue ();
1662 return funexp;
1665 /* Do the register loads required for any wholly-register parms or any
1666 parms which are passed both on the stack and in a register. Their
1667 expressions were already evaluated.
1669 Mark all register-parms as living through the call, putting these USE
1670 insns in the CALL_INSN_FUNCTION_USAGE field. */
1672 static void
1673 load_register_parameters (args, num_actuals, call_fusage)
1674 struct arg_data *args;
1675 int num_actuals;
1676 rtx *call_fusage;
1678 int i, j;
1680 #ifdef LOAD_ARGS_REVERSED
1681 for (i = num_actuals - 1; i >= 0; i--)
1682 #else
1683 for (i = 0; i < num_actuals; i++)
1684 #endif
1686 rtx reg = args[i].reg;
1687 int partial = args[i].partial;
1688 int nregs;
1690 if (reg)
1692 /* Set to non-negative if must move a word at a time, even if just
1693 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1694 we just use a normal move insn. This value can be zero if the
1695 argument is a zero size structure with no fields. */
1696 nregs = (partial ? partial
1697 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1698 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1699 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1700 : -1));
1702 /* Handle calls that pass values in multiple non-contiguous
1703 locations. The Irix 6 ABI has examples of this. */
1705 if (GET_CODE (reg) == PARALLEL)
1706 emit_group_load (reg, args[i].value,
1707 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1708 TYPE_ALIGN (TREE_TYPE (args[i].tree_value)));
1710 /* If simple case, just do move. If normal partial, store_one_arg
1711 has already loaded the register for us. In all other cases,
1712 load the register(s) from memory. */
1714 else if (nregs == -1)
1715 emit_move_insn (reg, args[i].value);
1717 /* If we have pre-computed the values to put in the registers in
1718 the case of non-aligned structures, copy them in now. */
1720 else if (args[i].n_aligned_regs != 0)
1721 for (j = 0; j < args[i].n_aligned_regs; j++)
1722 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1723 args[i].aligned_regs[j]);
1725 else if (partial == 0 || args[i].pass_on_stack)
1726 move_block_to_reg (REGNO (reg),
1727 validize_mem (args[i].value), nregs,
1728 args[i].mode);
1730 /* Handle calls that pass values in multiple non-contiguous
1731 locations. The Irix 6 ABI has examples of this. */
1732 if (GET_CODE (reg) == PARALLEL)
1733 use_group_regs (call_fusage, reg);
1734 else if (nregs == -1)
1735 use_reg (call_fusage, reg);
1736 else
1737 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1742 /* Try to integreate function. See expand_inline_function for documentation
1743 about the parameters. */
1745 static rtx
1746 try_to_integrate (fndecl, actparms, target, ignore, type, structure_value_addr)
1747 tree fndecl;
1748 tree actparms;
1749 rtx target;
1750 int ignore;
1751 tree type;
1752 rtx structure_value_addr;
1754 rtx temp;
1755 rtx before_call;
1756 int i;
1757 rtx old_stack_level = 0;
1758 int reg_parm_stack_space = 0;
1760 #ifdef REG_PARM_STACK_SPACE
1761 #ifdef MAYBE_REG_PARM_STACK_SPACE
1762 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1763 #else
1764 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1765 #endif
1766 #endif
1768 before_call = get_last_insn ();
1770 temp = expand_inline_function (fndecl, actparms, target,
1771 ignore, type,
1772 structure_value_addr);
1774 /* If inlining succeeded, return. */
1775 if (temp != (rtx) (HOST_WIDE_INT) - 1)
1777 if (ACCUMULATE_OUTGOING_ARGS)
1779 /* If the outgoing argument list must be preserved, push
1780 the stack before executing the inlined function if it
1781 makes any calls. */
1783 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1784 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1785 break;
1787 if (stack_arg_under_construction || i >= 0)
1789 rtx first_insn
1790 = before_call ? NEXT_INSN (before_call) : get_insns ();
1791 rtx insn = NULL_RTX, seq;
1793 /* Look for a call in the inline function code.
1794 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1795 nonzero then there is a call and it is not necessary
1796 to scan the insns. */
1798 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1799 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1800 if (GET_CODE (insn) == CALL_INSN)
1801 break;
1803 if (insn)
1805 /* Reserve enough stack space so that the largest
1806 argument list of any function call in the inline
1807 function does not overlap the argument list being
1808 evaluated. This is usually an overestimate because
1809 allocate_dynamic_stack_space reserves space for an
1810 outgoing argument list in addition to the requested
1811 space, but there is no way to ask for stack space such
1812 that an argument list of a certain length can be
1813 safely constructed.
1815 Add the stack space reserved for register arguments, if
1816 any, in the inline function. What is really needed is the
1817 largest value of reg_parm_stack_space in the inline
1818 function, but that is not available. Using the current
1819 value of reg_parm_stack_space is wrong, but gives
1820 correct results on all supported machines. */
1822 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1823 + reg_parm_stack_space);
1825 start_sequence ();
1826 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1827 allocate_dynamic_stack_space (GEN_INT (adjust),
1828 NULL_RTX, BITS_PER_UNIT);
1829 seq = get_insns ();
1830 end_sequence ();
1831 emit_insns_before (seq, first_insn);
1832 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1837 /* If the result is equivalent to TARGET, return TARGET to simplify
1838 checks in store_expr. They can be equivalent but not equal in the
1839 case of a function that returns BLKmode. */
1840 if (temp != target && rtx_equal_p (temp, target))
1841 return target;
1842 return temp;
1845 /* If inlining failed, mark FNDECL as needing to be compiled
1846 separately after all. If function was declared inline,
1847 give a warning. */
1848 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1849 && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
1851 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1852 warning ("called from here");
1854 mark_addressable (fndecl);
1855 return (rtx) (HOST_WIDE_INT) - 1;
1858 /* Generate all the code for a function call
1859 and return an rtx for its value.
1860 Store the value in TARGET (specified as an rtx) if convenient.
1861 If the value is stored in TARGET then TARGET is returned.
1862 If IGNORE is nonzero, then we ignore the value of the function call. */
1865 expand_call (exp, target, ignore)
1866 tree exp;
1867 rtx target;
1868 int ignore;
1870 /* Nonzero if we are currently expanding a call. */
1871 static int currently_expanding_call = 0;
1873 /* List of actual parameters. */
1874 tree actparms = TREE_OPERAND (exp, 1);
1875 /* RTX for the function to be called. */
1876 rtx funexp;
1877 /* Sequence of insns to perform a tail recursive "call". */
1878 rtx tail_recursion_insns = NULL_RTX;
1879 /* Sequence of insns to perform a normal "call". */
1880 rtx normal_call_insns = NULL_RTX;
1881 /* Sequence of insns to perform a tail recursive "call". */
1882 rtx tail_call_insns = NULL_RTX;
1883 /* Data type of the function. */
1884 tree funtype;
1885 /* Declaration of the function being called,
1886 or 0 if the function is computed (not known by name). */
1887 tree fndecl = 0;
1888 char *name = 0;
1889 rtx insn;
1890 int try_tail_call;
1891 int pass;
1893 /* Register in which non-BLKmode value will be returned,
1894 or 0 if no value or if value is BLKmode. */
1895 rtx valreg;
1896 /* Address where we should return a BLKmode value;
1897 0 if value not BLKmode. */
1898 rtx structure_value_addr = 0;
1899 /* Nonzero if that address is being passed by treating it as
1900 an extra, implicit first parameter. Otherwise,
1901 it is passed by being copied directly into struct_value_rtx. */
1902 int structure_value_addr_parm = 0;
1903 /* Size of aggregate value wanted, or zero if none wanted
1904 or if we are using the non-reentrant PCC calling convention
1905 or expecting the value in registers. */
1906 HOST_WIDE_INT struct_value_size = 0;
1907 /* Nonzero if called function returns an aggregate in memory PCC style,
1908 by returning the address of where to find it. */
1909 int pcc_struct_value = 0;
1911 /* Number of actual parameters in this call, including struct value addr. */
1912 int num_actuals;
1913 /* Number of named args. Args after this are anonymous ones
1914 and they must all go on the stack. */
1915 int n_named_args;
1917 /* Vector of information about each argument.
1918 Arguments are numbered in the order they will be pushed,
1919 not the order they are written. */
1920 struct arg_data *args;
1922 /* Total size in bytes of all the stack-parms scanned so far. */
1923 struct args_size args_size;
1924 /* Size of arguments before any adjustments (such as rounding). */
1925 int unadjusted_args_size;
1926 /* Data on reg parms scanned so far. */
1927 CUMULATIVE_ARGS args_so_far;
1928 /* Nonzero if a reg parm has been scanned. */
1929 int reg_parm_seen;
1930 /* Nonzero if this is an indirect function call. */
1932 /* Nonzero if we must avoid push-insns in the args for this call.
1933 If stack space is allocated for register parameters, but not by the
1934 caller, then it is preallocated in the fixed part of the stack frame.
1935 So the entire argument block must then be preallocated (i.e., we
1936 ignore PUSH_ROUNDING in that case). */
1938 int must_preallocate = !PUSH_ARGS;
1940 /* Size of the stack reserved for parameter registers. */
1941 int reg_parm_stack_space = 0;
1943 /* Address of space preallocated for stack parms
1944 (on machines that lack push insns), or 0 if space not preallocated. */
1945 rtx argblock = 0;
1947 /* Mask of ECF_ flags. */
1948 int flags = 0;
1949 /* Nonzero if this is a call to an inline function. */
1950 int is_integrable = 0;
1951 #ifdef REG_PARM_STACK_SPACE
1952 /* Define the boundary of the register parm stack space that needs to be
1953 save, if any. */
1954 int low_to_save = -1, high_to_save;
1955 rtx save_area = 0; /* Place that it is saved */
1956 #endif
1958 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1959 char *initial_stack_usage_map = stack_usage_map;
1960 int old_stack_arg_under_construction = 0;
1962 rtx old_stack_level = 0;
1963 int old_pending_adj = 0;
1964 int old_inhibit_defer_pop = inhibit_defer_pop;
1965 int old_stack_allocated;
1966 rtx call_fusage;
1967 register tree p;
1968 register int i;
1969 int preferred_stack_boundary;
1971 /* The value of the function call can be put in a hard register. But
1972 if -fcheck-memory-usage, code which invokes functions (and thus
1973 damages some hard registers) can be inserted before using the value.
1974 So, target is always a pseudo-register in that case. */
1975 if (current_function_check_memory_usage)
1976 target = 0;
1978 /* See if this is "nothrow" function call. */
1979 if (TREE_NOTHROW (exp))
1980 flags |= ECF_NOTHROW;
1982 /* See if we can find a DECL-node for the actual function.
1983 As a result, decide whether this is a call to an integrable function. */
1985 p = TREE_OPERAND (exp, 0);
1986 if (TREE_CODE (p) == ADDR_EXPR)
1988 fndecl = TREE_OPERAND (p, 0);
1989 if (TREE_CODE (fndecl) != FUNCTION_DECL)
1990 fndecl = 0;
1991 else
1993 if (!flag_no_inline
1994 && fndecl != current_function_decl
1995 && DECL_INLINE (fndecl)
1996 && DECL_SAVED_INSNS (fndecl)
1997 && DECL_SAVED_INSNS (fndecl)->inlinable)
1998 is_integrable = 1;
1999 else if (! TREE_ADDRESSABLE (fndecl))
2001 /* In case this function later becomes inlinable,
2002 record that there was already a non-inline call to it.
2004 Use abstraction instead of setting TREE_ADDRESSABLE
2005 directly. */
2006 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
2007 && optimize > 0)
2009 warning_with_decl (fndecl, "can't inline call to `%s'");
2010 warning ("called from here");
2012 mark_addressable (fndecl);
2015 flags |= flags_from_decl_or_type (fndecl);
2019 /* If we don't have specific function to call, see if we have a
2020 attributes set in the type. */
2021 if (fndecl == 0)
2022 flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
2024 #ifdef REG_PARM_STACK_SPACE
2025 #ifdef MAYBE_REG_PARM_STACK_SPACE
2026 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2027 #else
2028 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2029 #endif
2030 #endif
2032 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2033 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2034 must_preallocate = 1;
2035 #endif
2037 /* Warn if this value is an aggregate type,
2038 regardless of which calling convention we are using for it. */
2039 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2040 warning ("function call has aggregate value");
2042 /* Set up a place to return a structure. */
2044 /* Cater to broken compilers. */
2045 if (aggregate_value_p (exp))
2047 /* This call returns a big structure. */
2048 flags &= ~(ECF_CONST | ECF_PURE);
2050 #ifdef PCC_STATIC_STRUCT_RETURN
2052 pcc_struct_value = 1;
2053 /* Easier than making that case work right. */
2054 if (is_integrable)
2056 /* In case this is a static function, note that it has been
2057 used. */
2058 if (! TREE_ADDRESSABLE (fndecl))
2059 mark_addressable (fndecl);
2060 is_integrable = 0;
2063 #else /* not PCC_STATIC_STRUCT_RETURN */
2065 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2067 if (target && GET_CODE (target) == MEM)
2068 structure_value_addr = XEXP (target, 0);
2069 else
2071 /* Assign a temporary to hold the value. */
2072 tree d;
2074 /* For variable-sized objects, we must be called with a target
2075 specified. If we were to allocate space on the stack here,
2076 we would have no way of knowing when to free it. */
2078 if (struct_value_size < 0)
2079 abort ();
2081 /* This DECL is just something to feed to mark_addressable;
2082 it doesn't get pushed. */
2083 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
2084 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
2085 mark_addressable (d);
2086 mark_temp_addr_taken (DECL_RTL (d));
2087 structure_value_addr = XEXP (DECL_RTL (d), 0);
2088 TREE_USED (d) = 1;
2089 target = 0;
2092 #endif /* not PCC_STATIC_STRUCT_RETURN */
2095 /* If called function is inline, try to integrate it. */
2097 if (is_integrable)
2099 rtx temp = try_to_integrate (fndecl, actparms, target,
2100 ignore, TREE_TYPE (exp),
2101 structure_value_addr);
2102 if (temp != (rtx) (HOST_WIDE_INT) - 1)
2103 return temp;
2106 currently_expanding_call++;
2108 /* Tail calls can make things harder to debug, and we're traditionally
2109 pushed these optimizations into -O2. Don't try if we're already
2110 expanding a call, as that means we're an argument. Similarly, if
2111 there's pending loops or cleanups we know there's code to follow
2112 the call. */
2114 try_tail_call = 0;
2115 if (flag_optimize_sibling_calls
2116 && currently_expanding_call == 1
2117 && stmt_loop_nest_empty ()
2118 && ! any_pending_cleanups (1))
2120 tree new_actparms = NULL_TREE;
2122 /* Ok, we're going to give the tail call the old college try.
2123 This means we're going to evaluate the function arguments
2124 up to three times. There are two degrees of badness we can
2125 encounter, those that can be unsaved and those that can't.
2126 (See unsafe_for_reeval commentary for details.)
2128 Generate a new argument list. Pass safe arguments through
2129 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2130 For hard badness, evaluate them now and put their resulting
2131 rtx in a temporary VAR_DECL. */
2133 for (p = actparms; p; p = TREE_CHAIN (p))
2134 switch (unsafe_for_reeval (TREE_VALUE (p)))
2136 case 0: /* Safe. */
2137 new_actparms = tree_cons (TREE_PURPOSE (p), TREE_VALUE (p),
2138 new_actparms);
2139 break;
2141 case 1: /* Mildly unsafe. */
2142 new_actparms = tree_cons (TREE_PURPOSE (p),
2143 unsave_expr (TREE_VALUE (p)),
2144 new_actparms);
2145 break;
2147 case 2: /* Wildly unsafe. */
2149 tree var = build_decl (VAR_DECL, NULL_TREE,
2150 TREE_TYPE (TREE_VALUE (p)));
2151 DECL_RTL (var) = expand_expr (TREE_VALUE (p), NULL_RTX,
2152 VOIDmode, EXPAND_NORMAL);
2153 new_actparms = tree_cons (TREE_PURPOSE (p), var, new_actparms);
2155 break;
2157 default:
2158 abort ();
2161 /* We built the new argument chain backwards. */
2162 actparms = nreverse (new_actparms);
2164 /* Expanding one of those dangerous arguments could have added
2165 cleanups, but otherwise give it a whirl. */
2166 try_tail_call = ! any_pending_cleanups (1);
2169 /* Generate a tail recursion sequence when calling ourselves. */
2171 if (try_tail_call
2172 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
2173 && TREE_OPERAND (TREE_OPERAND (exp, 0), 0) == current_function_decl)
2175 /* We want to emit any pending stack adjustments before the tail
2176 recursion "call". That way we know any adjustment after the tail
2177 recursion call can be ignored if we indeed use the tail recursion
2178 call expansion. */
2179 int save_pending_stack_adjust = pending_stack_adjust;
2180 int save_stack_pointer_delta = stack_pointer_delta;
2182 /* Use a new sequence to hold any RTL we generate. We do not even
2183 know if we will use this RTL yet. The final decision can not be
2184 made until after RTL generation for the entire function is
2185 complete. */
2186 start_sequence ();
2188 /* Emit the pending stack adjustments before we expand any arguments. */
2189 do_pending_stack_adjust ();
2191 if (optimize_tail_recursion (actparms, get_last_insn ()))
2192 tail_recursion_insns = get_insns ();
2193 end_sequence ();
2195 /* Restore the original pending stack adjustment for the sibling and
2196 normal call cases below. */
2197 pending_stack_adjust = save_pending_stack_adjust;
2198 stack_pointer_delta = save_stack_pointer_delta;
2201 function_call_count++;
2203 if (fndecl && DECL_NAME (fndecl))
2204 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
2206 #ifdef PREFERRED_STACK_BOUNDARY
2207 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2208 #else
2209 preferred_stack_boundary = STACK_BOUNDARY;
2210 #endif
2212 /* Ensure current function's preferred stack boundary is at least
2213 what we need. We don't have to increase alignment for recursive
2214 functions. */
2215 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2216 && fndecl != current_function_decl)
2217 cfun->preferred_stack_boundary = preferred_stack_boundary;
2219 /* See if this is a call to a function that can return more than once
2220 or a call to longjmp or malloc. */
2221 flags |= special_function_p (fndecl, flags);
2223 if (flags & ECF_MAY_BE_ALLOCA)
2224 current_function_calls_alloca = 1;
2226 /* Operand 0 is a pointer-to-function; get the type of the function. */
2227 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
2228 if (! POINTER_TYPE_P (funtype))
2229 abort ();
2230 funtype = TREE_TYPE (funtype);
2232 /* We want to make two insn chains; one for a sibling call, the other
2233 for a normal call. We will select one of the two chains after
2234 initial RTL generation is complete. */
2235 for (pass = 0; pass < 2; pass++)
2237 int sibcall_failure = 0;
2238 /* We want to emit ay pending stack adjustments before the tail
2239 recursion "call". That way we know any adjustment after the tail
2240 recursion call can be ignored if we indeed use the tail recursion
2241 call expansion. */
2242 int save_pending_stack_adjust;
2243 int save_stack_pointer_delta;
2244 rtx insns;
2245 rtx before_call, next_arg_reg;
2247 if (pass == 0)
2249 /* Various reasons we can not use a sibling call. */
2250 if (! try_tail_call
2251 #ifdef HAVE_sibcall_epilogue
2252 || ! HAVE_sibcall_epilogue
2253 #else
2254 || 1
2255 #endif
2256 /* The structure value address is used and modified in the
2257 loop below. It does not seem worth the effort to save and
2258 restore it as a state variable since few optimizable
2259 sibling calls will return a structure. */
2260 || structure_value_addr != NULL_RTX
2261 /* If the register holding the address is a callee saved
2262 register, then we lose. We have no way to prevent that,
2263 so we only allow calls to named functions. */
2264 /* ??? This could be done by having the insn constraints
2265 use a register class that is all call-clobbered. Any
2266 reload insns generated to fix things up would appear
2267 before the sibcall_epilogue. */
2268 || fndecl == NULL_TREE
2269 || ! FUNCTION_OK_FOR_SIBCALL (fndecl))
2270 continue;
2272 /* Emit any queued insns now; otherwise they would end up in
2273 only one of the alternates. */
2274 emit_queue ();
2276 /* We know at this point that there are not currently any
2277 pending cleanups. If, however, in the process of evaluating
2278 the arguments we were to create some, we'll need to be
2279 able to get rid of them. */
2280 expand_start_target_temps ();
2282 /* State variables we need to save and restore between
2283 iterations. */
2284 save_pending_stack_adjust = pending_stack_adjust;
2285 save_stack_pointer_delta = stack_pointer_delta;
2287 if (pass)
2288 flags &= ~ECF_SIBCALL;
2289 else
2290 flags |= ECF_SIBCALL;
2292 /* Other state variables that we must reinitialize each time
2293 through the loop (that are not initialized by the loop itself). */
2294 argblock = 0;
2295 call_fusage = 0;
2297 /* Start a new sequence for the normal call case.
2299 From this point on, if the sibling call fails, we want to set
2300 sibcall_failure instead of continuing the loop. */
2301 start_sequence ();
2303 /* When calling a const function, we must pop the stack args right away,
2304 so that the pop is deleted or moved with the call. */
2305 if (flags & (ECF_CONST | ECF_PURE))
2306 NO_DEFER_POP;
2308 /* Don't let pending stack adjusts add up to too much.
2309 Also, do all pending adjustments now if there is any chance
2310 this might be a call to alloca or if we are expanding a sibling
2311 call sequence. */
2312 if (pending_stack_adjust >= 32
2313 || (pending_stack_adjust > 0 && (flags & ECF_MAY_BE_ALLOCA))
2314 || pass == 0)
2315 do_pending_stack_adjust ();
2317 if (profile_arc_flag && (flags & ECF_FORK_OR_EXEC))
2319 /* A fork duplicates the profile information, and an exec discards
2320 it. We can't rely on fork/exec to be paired. So write out the
2321 profile information we have gathered so far, and clear it. */
2322 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2323 is subject to race conditions, just as with multithreaded
2324 programs. */
2326 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__bb_fork_func"), 0,
2327 VOIDmode, 0);
2330 /* Push the temporary stack slot level so that we can free any
2331 temporaries we make. */
2332 push_temp_slots ();
2334 /* Start updating where the next arg would go.
2336 On some machines (such as the PA) indirect calls have a different
2337 calling convention than normal calls. The last argument in
2338 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2339 or not. */
2340 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
2342 /* If struct_value_rtx is 0, it means pass the address
2343 as if it were an extra parameter. */
2344 if (structure_value_addr && struct_value_rtx == 0)
2346 /* If structure_value_addr is a REG other than
2347 virtual_outgoing_args_rtx, we can use always use it. If it
2348 is not a REG, we must always copy it into a register.
2349 If it is virtual_outgoing_args_rtx, we must copy it to another
2350 register in some cases. */
2351 rtx temp = (GET_CODE (structure_value_addr) != REG
2352 || (ACCUMULATE_OUTGOING_ARGS
2353 && stack_arg_under_construction
2354 && structure_value_addr == virtual_outgoing_args_rtx)
2355 ? copy_addr_to_reg (structure_value_addr)
2356 : structure_value_addr);
2358 actparms
2359 = tree_cons (error_mark_node,
2360 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2361 temp),
2362 actparms);
2363 structure_value_addr_parm = 1;
2366 /* Count the arguments and set NUM_ACTUALS. */
2367 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
2368 num_actuals = i;
2370 /* Compute number of named args.
2371 Normally, don't include the last named arg if anonymous args follow.
2372 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2373 (If no anonymous args follow, the result of list_length is actually
2374 one too large. This is harmless.)
2376 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2377 zero, this machine will be able to place unnamed args that were
2378 passed in registers into the stack. So treat all args as named.
2379 This allows the insns emitting for a specific argument list to be
2380 independent of the function declaration.
2382 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2383 reliable way to pass unnamed args in registers, so we must force
2384 them into memory. */
2386 if ((STRICT_ARGUMENT_NAMING
2387 || ! PRETEND_OUTGOING_VARARGS_NAMED)
2388 && TYPE_ARG_TYPES (funtype) != 0)
2389 n_named_args
2390 = (list_length (TYPE_ARG_TYPES (funtype))
2391 /* Don't include the last named arg. */
2392 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
2393 /* Count the struct value address, if it is passed as a parm. */
2394 + structure_value_addr_parm);
2395 else
2396 /* If we know nothing, treat all args as named. */
2397 n_named_args = num_actuals;
2399 /* Make a vector to hold all the information about each arg. */
2400 args = (struct arg_data *) alloca (num_actuals
2401 * sizeof (struct arg_data));
2402 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
2404 /* Build up entries inthe ARGS array, compute the size of the arguments
2405 into ARGS_SIZE, etc. */
2406 initialize_argument_information (num_actuals, args, &args_size,
2407 n_named_args, actparms, fndecl,
2408 &args_so_far, reg_parm_stack_space,
2409 &old_stack_level, &old_pending_adj,
2410 &must_preallocate, &flags);
2412 #ifdef FINAL_REG_PARM_STACK_SPACE
2413 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2414 args_size.var);
2415 #endif
2417 if (args_size.var)
2419 /* If this function requires a variable-sized argument list, don't
2420 try to make a cse'able block for this call. We may be able to
2421 do this eventually, but it is too complicated to keep track of
2422 what insns go in the cse'able block and which don't.
2424 Also do not make a sibling call. */
2426 flags &= ~(ECF_CONST | ECF_PURE);
2427 must_preallocate = 1;
2428 sibcall_failure = 1;
2431 if (args_size.constant > current_function_args_size)
2433 /* If this function requires more stack slots than the current
2434 function, we cannot change it into a sibling call. */
2435 sibcall_failure = 1;
2438 /* Compute the actual size of the argument block required. The variable
2439 and constant sizes must be combined, the size may have to be rounded,
2440 and there may be a minimum required size. When generating a sibcall
2441 pattern, do not round up, since we'll be re-using whatever space our
2442 caller provided. */
2443 unadjusted_args_size
2444 = compute_argument_block_size (reg_parm_stack_space, &args_size,
2445 (pass == 0 ? 0
2446 : preferred_stack_boundary));
2448 /* If the callee pops its own arguments, then it must pop exactly
2449 the same number of arguments as the current function. */
2450 if (RETURN_POPS_ARGS (fndecl, funtype, unadjusted_args_size)
2451 != RETURN_POPS_ARGS (current_function_decl,
2452 TREE_TYPE (current_function_decl),
2453 current_function_args_size))
2454 sibcall_failure = 1;
2456 /* Now make final decision about preallocating stack space. */
2457 must_preallocate = finalize_must_preallocate (must_preallocate,
2458 num_actuals, args,
2459 &args_size);
2461 /* If the structure value address will reference the stack pointer, we
2462 must stabilize it. We don't need to do this if we know that we are
2463 not going to adjust the stack pointer in processing this call. */
2465 if (structure_value_addr
2466 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2467 || reg_mentioned_p (virtual_outgoing_args_rtx,
2468 structure_value_addr))
2469 && (args_size.var
2470 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2471 structure_value_addr = copy_to_reg (structure_value_addr);
2473 /* Precompute any arguments as needed. */
2474 if (pass)
2475 precompute_arguments (flags, num_actuals, args);
2477 /* Now we are about to start emitting insns that can be deleted
2478 if a libcall is deleted. */
2479 if (flags & (ECF_CONST | ECF_PURE | ECF_MALLOC))
2480 start_sequence ();
2482 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2483 /* The argument block when performing a sibling call is the
2484 incoming argument block. */
2485 if (pass == 0)
2486 argblock = virtual_incoming_args_rtx;
2487 /* If we have no actual push instructions, or shouldn't use them,
2488 make space for all args right now. */
2490 else if (args_size.var != 0)
2492 if (old_stack_level == 0)
2494 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2495 old_pending_adj = pending_stack_adjust;
2496 pending_stack_adjust = 0;
2497 /* stack_arg_under_construction says whether a stack arg is
2498 being constructed at the old stack level. Pushing the stack
2499 gets a clean outgoing argument block. */
2500 old_stack_arg_under_construction = stack_arg_under_construction;
2501 stack_arg_under_construction = 0;
2503 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
2505 else
2507 /* Note that we must go through the motions of allocating an argument
2508 block even if the size is zero because we may be storing args
2509 in the area reserved for register arguments, which may be part of
2510 the stack frame. */
2512 int needed = args_size.constant;
2514 /* Store the maximum argument space used. It will be pushed by
2515 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2516 checking). */
2518 if (needed > current_function_outgoing_args_size)
2519 current_function_outgoing_args_size = needed;
2521 if (must_preallocate)
2523 if (ACCUMULATE_OUTGOING_ARGS)
2525 /* Since the stack pointer will never be pushed, it is
2526 possible for the evaluation of a parm to clobber
2527 something we have already written to the stack.
2528 Since most function calls on RISC machines do not use
2529 the stack, this is uncommon, but must work correctly.
2531 Therefore, we save any area of the stack that was already
2532 written and that we are using. Here we set up to do this
2533 by making a new stack usage map from the old one. The
2534 actual save will be done by store_one_arg.
2536 Another approach might be to try to reorder the argument
2537 evaluations to avoid this conflicting stack usage. */
2539 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2540 /* Since we will be writing into the entire argument area,
2541 the map must be allocated for its entire size, not just
2542 the part that is the responsibility of the caller. */
2543 needed += reg_parm_stack_space;
2544 #endif
2546 #ifdef ARGS_GROW_DOWNWARD
2547 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2548 needed + 1);
2549 #else
2550 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2551 needed);
2552 #endif
2553 stack_usage_map
2554 = (char *) alloca (highest_outgoing_arg_in_use);
2556 if (initial_highest_arg_in_use)
2557 bcopy (initial_stack_usage_map, stack_usage_map,
2558 initial_highest_arg_in_use);
2560 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2561 bzero (&stack_usage_map[initial_highest_arg_in_use],
2562 (highest_outgoing_arg_in_use
2563 - initial_highest_arg_in_use));
2564 needed = 0;
2566 /* The address of the outgoing argument list must not be
2567 copied to a register here, because argblock would be left
2568 pointing to the wrong place after the call to
2569 allocate_dynamic_stack_space below. */
2571 argblock = virtual_outgoing_args_rtx;
2573 else
2575 if (inhibit_defer_pop == 0)
2577 /* Try to reuse some or all of the pending_stack_adjust
2578 to get this space. Maybe we can avoid any pushing. */
2579 if (needed > pending_stack_adjust)
2581 needed -= pending_stack_adjust;
2582 pending_stack_adjust = 0;
2584 else
2586 pending_stack_adjust -= needed;
2587 needed = 0;
2590 /* Special case this because overhead of `push_block' in this
2591 case is non-trivial. */
2592 if (needed == 0)
2593 argblock = virtual_outgoing_args_rtx;
2594 else
2595 argblock = push_block (GEN_INT (needed), 0, 0);
2597 /* We only really need to call `copy_to_reg' in the case
2598 where push insns are going to be used to pass ARGBLOCK
2599 to a function call in ARGS. In that case, the stack
2600 pointer changes value from the allocation point to the
2601 call point, and hence the value of
2602 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2603 as well always do it. */
2604 argblock = copy_to_reg (argblock);
2606 /* The save/restore code in store_one_arg handles all
2607 cases except one:
2608 a constructor call (including a C function returning
2609 a BLKmode struct) to initialize an argument. */
2610 if (stack_arg_under_construction)
2612 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2613 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
2614 #else
2615 rtx push_size = GEN_INT (args_size.constant);
2616 #endif
2617 if (old_stack_level == 0)
2619 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2620 old_pending_adj = pending_stack_adjust;
2621 pending_stack_adjust = 0;
2622 /* stack_arg_under_construction says whether a stack arg is
2623 being constructed at the old stack level. Pushing the stack
2624 gets a clean outgoing argument block. */
2625 old_stack_arg_under_construction = stack_arg_under_construction;
2626 stack_arg_under_construction = 0;
2627 /* Make a new map for the new argument list. */
2628 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
2629 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2630 highest_outgoing_arg_in_use = 0;
2632 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
2634 /* If argument evaluation might modify the stack pointer, copy the
2635 address of the argument list to a register. */
2636 for (i = 0; i < num_actuals; i++)
2637 if (args[i].pass_on_stack)
2639 argblock = copy_addr_to_reg (argblock);
2640 break;
2646 compute_argument_addresses (args, argblock, num_actuals);
2648 #ifdef PREFERRED_STACK_BOUNDARY
2649 /* If we push args individually in reverse order, perform stack alignment
2650 before the first push (the last arg). */
2651 if (PUSH_ARGS_REVERSED && argblock == 0
2652 && args_size.constant != unadjusted_args_size)
2654 /* When the stack adjustment is pending, we get better code
2655 by combining the adjustments. */
2656 if (pending_stack_adjust && ! (flags & (ECF_CONST | ECF_PURE))
2657 && ! inhibit_defer_pop)
2659 int adjust;
2660 args_size.constant = (unadjusted_args_size
2661 + ((pending_stack_adjust
2662 + args_size.constant
2663 - unadjusted_args_size)
2664 % (preferred_stack_boundary
2665 / BITS_PER_UNIT)));
2666 adjust = (pending_stack_adjust - args_size.constant
2667 + unadjusted_args_size);
2668 adjust_stack (GEN_INT (adjust));
2669 pending_stack_adjust = 0;
2671 else if (argblock == 0)
2672 anti_adjust_stack (GEN_INT (args_size.constant
2673 - unadjusted_args_size));
2675 /* Now that the stack is properly aligned, pops can't safely
2676 be deferred during the evaluation of the arguments. */
2677 NO_DEFER_POP;
2678 #endif
2680 /* Don't try to defer pops if preallocating, not even from the first arg,
2681 since ARGBLOCK probably refers to the SP. */
2682 if (argblock)
2683 NO_DEFER_POP;
2685 funexp = rtx_for_function_call (fndecl, exp);
2687 /* Figure out the register where the value, if any, will come back. */
2688 valreg = 0;
2689 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2690 && ! structure_value_addr)
2692 if (pcc_struct_value)
2693 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2694 fndecl, (pass == 0));
2695 else
2696 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2699 /* Precompute all register parameters. It isn't safe to compute anything
2700 once we have started filling any specific hard regs. */
2701 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2703 #ifdef REG_PARM_STACK_SPACE
2704 /* Save the fixed argument area if it's part of the caller's frame and
2705 is clobbered by argument setup for this call. */
2706 if (ACCUMULATE_OUTGOING_ARGS && pass)
2707 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2708 &low_to_save, &high_to_save);
2709 #endif
2711 /* Now store (and compute if necessary) all non-register parms.
2712 These come before register parms, since they can require block-moves,
2713 which could clobber the registers used for register parms.
2714 Parms which have partial registers are not stored here,
2715 but we do preallocate space here if they want that. */
2717 for (i = 0; i < num_actuals; i++)
2718 if (args[i].reg == 0 || args[i].pass_on_stack)
2719 store_one_arg (&args[i], argblock, flags,
2720 args_size.var != 0, reg_parm_stack_space);
2722 /* If we have a parm that is passed in registers but not in memory
2723 and whose alignment does not permit a direct copy into registers,
2724 make a group of pseudos that correspond to each register that we
2725 will later fill. */
2726 if (STRICT_ALIGNMENT)
2727 store_unaligned_arguments_into_pseudos (args, num_actuals);
2729 /* Now store any partially-in-registers parm.
2730 This is the last place a block-move can happen. */
2731 if (reg_parm_seen)
2732 for (i = 0; i < num_actuals; i++)
2733 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2734 store_one_arg (&args[i], argblock, flags,
2735 args_size.var != 0, reg_parm_stack_space);
2737 #ifdef PREFERRED_STACK_BOUNDARY
2738 /* If we pushed args in forward order, perform stack alignment
2739 after pushing the last arg. */
2740 if (!PUSH_ARGS_REVERSED && argblock == 0)
2741 anti_adjust_stack (GEN_INT (args_size.constant
2742 - unadjusted_args_size));
2743 #endif
2745 /* If register arguments require space on the stack and stack space
2746 was not preallocated, allocate stack space here for arguments
2747 passed in registers. */
2748 #ifdef OUTGOING_REG_PARM_STACK_SPACE
2749 if (!ACCUMULATE_OUTGOING_ARGS
2750 && must_preallocate == 0 && reg_parm_stack_space > 0)
2751 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2752 #endif
2754 /* Pass the function the address in which to return a
2755 structure value. */
2756 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2758 emit_move_insn (struct_value_rtx,
2759 force_reg (Pmode,
2760 force_operand (structure_value_addr,
2761 NULL_RTX)));
2763 /* Mark the memory for the aggregate as write-only. */
2764 if (current_function_check_memory_usage)
2765 emit_library_call (chkr_set_right_libfunc, 1,
2766 VOIDmode, 3,
2767 structure_value_addr, ptr_mode,
2768 GEN_INT (struct_value_size),
2769 TYPE_MODE (sizetype),
2770 GEN_INT (MEMORY_USE_WO),
2771 TYPE_MODE (integer_type_node));
2773 if (GET_CODE (struct_value_rtx) == REG)
2774 use_reg (&call_fusage, struct_value_rtx);
2777 funexp = prepare_call_address (funexp, fndecl, &call_fusage,
2778 reg_parm_seen);
2780 load_register_parameters (args, num_actuals, &call_fusage);
2782 /* Perform postincrements before actually calling the function. */
2783 emit_queue ();
2785 /* Save a pointer to the last insn before the call, so that we can
2786 later safely search backwards to find the CALL_INSN. */
2787 before_call = get_last_insn ();
2789 /* Set up next argument register. For sibling calls on machines
2790 with register windows this should be the incoming register. */
2791 #ifdef FUNCTION_INCOMING_ARG
2792 if (pass == 0)
2793 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2794 void_type_node, 1);
2795 else
2796 #endif
2797 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2798 void_type_node, 1);
2800 /* All arguments and registers used for the call must be set up by
2801 now! */
2803 #ifdef PREFERRED_STACK_BOUNDARY
2804 /* Stack must to be properly aligned now. */
2805 if (stack_pointer_delta & (preferred_stack_boundary / BITS_PER_UNIT - 1))
2806 abort();
2807 #endif
2809 /* Generate the actual call instruction. */
2810 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
2811 args_size.constant, struct_value_size,
2812 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2813 flags);
2815 /* Verify that we've deallocated all the stack we used. */
2816 if (pass
2817 && old_stack_allocated != stack_pointer_delta - pending_stack_adjust)
2818 abort();
2820 /* If call is cse'able, make appropriate pair of reg-notes around it.
2821 Test valreg so we don't crash; may safely ignore `const'
2822 if return type is void. Disable for PARALLEL return values, because
2823 we have no way to move such values into a pseudo register. */
2824 if ((flags & (ECF_CONST | ECF_PURE))
2825 && valreg != 0 && GET_CODE (valreg) != PARALLEL)
2827 rtx note = 0;
2828 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2829 rtx insns;
2831 /* Mark the return value as a pointer if needed. */
2832 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2833 mark_reg_pointer (temp, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2835 /* Construct an "equal form" for the value which mentions all the
2836 arguments in order as well as the function name. */
2837 for (i = 0; i < num_actuals; i++)
2838 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2839 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2841 insns = get_insns ();
2842 end_sequence ();
2844 if (flags & ECF_PURE)
2845 note = gen_rtx_EXPR_LIST (VOIDmode,
2846 gen_rtx_USE (VOIDmode,
2847 gen_rtx_MEM (BLKmode,
2848 gen_rtx_SCRATCH (VOIDmode))), note);
2850 emit_libcall_block (insns, temp, valreg, note);
2852 valreg = temp;
2854 else if (flags & (ECF_CONST | ECF_PURE))
2856 /* Otherwise, just write out the sequence without a note. */
2857 rtx insns = get_insns ();
2859 end_sequence ();
2860 emit_insns (insns);
2862 else if (flags & ECF_MALLOC)
2864 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2865 rtx last, insns;
2867 /* The return value from a malloc-like function is a pointer. */
2868 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2869 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2871 emit_move_insn (temp, valreg);
2873 /* The return value from a malloc-like function can not alias
2874 anything else. */
2875 last = get_last_insn ();
2876 REG_NOTES (last) =
2877 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2879 /* Write out the sequence. */
2880 insns = get_insns ();
2881 end_sequence ();
2882 emit_insns (insns);
2883 valreg = temp;
2886 /* For calls to `setjmp', etc., inform flow.c it should complain
2887 if nonvolatile values are live. For functions that cannot return,
2888 inform flow that control does not fall through. */
2890 if ((flags & (ECF_RETURNS_TWICE | ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
2892 /* The barrier or NOTE_INSN_SETJMP note must be emitted
2893 immediately after the CALL_INSN. Some ports emit more
2894 than just a CALL_INSN above, so we must search for it here. */
2896 rtx last = get_last_insn ();
2897 while (GET_CODE (last) != CALL_INSN)
2899 last = PREV_INSN (last);
2900 /* There was no CALL_INSN? */
2901 if (last == before_call)
2902 abort ();
2905 if (flags & ECF_RETURNS_TWICE)
2907 emit_note_after (NOTE_INSN_SETJMP, last);
2908 current_function_calls_setjmp = 1;
2909 sibcall_failure = 1;
2911 else
2912 emit_barrier_after (last);
2915 if (flags & ECF_LONGJMP)
2916 current_function_calls_longjmp = 1, sibcall_failure = 1;
2918 /* If this function is returning into a memory location marked as
2919 readonly, it means it is initializing that location. But we normally
2920 treat functions as not clobbering such locations, so we need to
2921 specify that this one does. */
2922 if (target != 0 && GET_CODE (target) == MEM
2923 && structure_value_addr != 0 && RTX_UNCHANGING_P (target))
2924 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
2926 /* If value type not void, return an rtx for the value. */
2928 /* If there are cleanups to be called, don't use a hard reg as target.
2929 We need to double check this and see if it matters anymore. */
2930 if (any_pending_cleanups (1))
2932 if (target && REG_P (target)
2933 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2934 target = 0;
2935 sibcall_failure = 1;
2938 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2939 || ignore)
2941 target = const0_rtx;
2943 else if (structure_value_addr)
2945 if (target == 0 || GET_CODE (target) != MEM)
2947 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2948 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2949 structure_value_addr));
2950 MEM_SET_IN_STRUCT_P (target,
2951 AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2954 else if (pcc_struct_value)
2956 /* This is the special C++ case where we need to
2957 know what the true target was. We take care to
2958 never use this value more than once in one expression. */
2959 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2960 copy_to_reg (valreg));
2961 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2963 /* Handle calls that return values in multiple non-contiguous locations.
2964 The Irix 6 ABI has examples of this. */
2965 else if (GET_CODE (valreg) == PARALLEL)
2967 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2969 if (target == 0)
2971 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)),
2972 bytes, 0);
2973 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2974 preserve_temp_slots (target);
2977 if (! rtx_equal_p (target, valreg))
2978 emit_group_store (target, valreg, bytes,
2979 TYPE_ALIGN (TREE_TYPE (exp)));
2981 /* We can not support sibling calls for this case. */
2982 sibcall_failure = 1;
2984 else if (target
2985 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2986 && GET_MODE (target) == GET_MODE (valreg))
2988 /* TARGET and VALREG cannot be equal at this point because the
2989 latter would not have REG_FUNCTION_VALUE_P true, while the
2990 former would if it were referring to the same register.
2992 If they refer to the same register, this move will be a no-op,
2993 except when function inlining is being done. */
2994 emit_move_insn (target, valreg);
2996 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2997 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2998 else
2999 target = copy_to_reg (valreg);
3001 #ifdef PROMOTE_FUNCTION_RETURN
3002 /* If we promoted this return value, make the proper SUBREG. TARGET
3003 might be const0_rtx here, so be careful. */
3004 if (GET_CODE (target) == REG
3005 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3006 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3008 tree type = TREE_TYPE (exp);
3009 int unsignedp = TREE_UNSIGNED (type);
3011 /* If we don't promote as expected, something is wrong. */
3012 if (GET_MODE (target)
3013 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3014 abort ();
3016 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
3017 SUBREG_PROMOTED_VAR_P (target) = 1;
3018 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
3020 #endif
3022 /* If size of args is variable or this was a constructor call for a stack
3023 argument, restore saved stack-pointer value. */
3025 if (old_stack_level)
3027 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3028 pending_stack_adjust = old_pending_adj;
3029 stack_arg_under_construction = old_stack_arg_under_construction;
3030 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3031 stack_usage_map = initial_stack_usage_map;
3032 sibcall_failure = 1;
3034 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3036 #ifdef REG_PARM_STACK_SPACE
3037 if (save_area)
3039 restore_fixed_argument_area (save_area, argblock,
3040 high_to_save, low_to_save);
3042 #endif
3044 /* If we saved any argument areas, restore them. */
3045 for (i = 0; i < num_actuals; i++)
3046 if (args[i].save_area)
3048 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3049 rtx stack_area
3050 = gen_rtx_MEM (save_mode,
3051 memory_address (save_mode,
3052 XEXP (args[i].stack_slot, 0)));
3054 if (save_mode != BLKmode)
3055 emit_move_insn (stack_area, args[i].save_area);
3056 else
3057 emit_block_move (stack_area,
3058 validize_mem (args[i].save_area),
3059 GEN_INT (args[i].size.constant),
3060 PARM_BOUNDARY);
3063 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3064 stack_usage_map = initial_stack_usage_map;
3067 /* If this was alloca, record the new stack level for nonlocal gotos.
3068 Check for the handler slots since we might not have a save area
3069 for non-local gotos. */
3071 if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
3072 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3074 pop_temp_slots ();
3076 /* Free up storage we no longer need. */
3077 for (i = 0; i < num_actuals; ++i)
3078 if (args[i].aligned_regs)
3079 free (args[i].aligned_regs);
3081 if (pass == 0)
3083 /* Undo the fake expand_start_target_temps we did earlier. If
3084 there had been any cleanups created, we've already set
3085 sibcall_failure. */
3086 expand_end_target_temps ();
3089 insns = get_insns ();
3090 end_sequence ();
3092 if (pass == 0)
3094 tail_call_insns = insns;
3096 /* If something prevents making this a sibling call,
3097 zero out the sequence. */
3098 if (sibcall_failure)
3099 tail_call_insns = NULL_RTX;
3100 /* Restore the pending stack adjustment now that we have
3101 finished generating the sibling call sequence. */
3103 pending_stack_adjust = save_pending_stack_adjust;
3104 stack_pointer_delta = save_stack_pointer_delta;
3106 else
3107 normal_call_insns = insns;
3110 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3111 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3112 can happen if the arguments to this function call an inline
3113 function who's expansion contains another CALL_PLACEHOLDER.
3115 If there are any C_Ps in any of these sequences, replace them
3116 with their normal call. */
3118 for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3119 if (GET_CODE (insn) == CALL_INSN
3120 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3121 replace_call_placeholder (insn, sibcall_use_normal);
3123 for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3124 if (GET_CODE (insn) == CALL_INSN
3125 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3126 replace_call_placeholder (insn, sibcall_use_normal);
3128 for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3129 if (GET_CODE (insn) == CALL_INSN
3130 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3131 replace_call_placeholder (insn, sibcall_use_normal);
3133 /* If this was a potential tail recursion site, then emit a
3134 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3135 One of them will be selected later. */
3136 if (tail_recursion_insns || tail_call_insns)
3138 /* The tail recursion label must be kept around. We could expose
3139 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3140 and makes determining true tail recursion sites difficult.
3142 So we set LABEL_PRESERVE_P here, then clear it when we select
3143 one of the call sequences after rtl generation is complete. */
3144 if (tail_recursion_insns)
3145 LABEL_PRESERVE_P (tail_recursion_label) = 1;
3146 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3147 tail_call_insns,
3148 tail_recursion_insns,
3149 tail_recursion_label));
3151 else
3152 emit_insns (normal_call_insns);
3154 currently_expanding_call--;
3156 return target;
3159 /* Returns nonzero if FUN is the symbol for a library function which can
3160 not throw. */
3162 static int
3163 libfunc_nothrow (fun)
3164 rtx fun;
3166 if (fun == throw_libfunc
3167 || fun == rethrow_libfunc
3168 || fun == sjthrow_libfunc
3169 || fun == sjpopnthrow_libfunc)
3170 return 0;
3172 return 1;
3175 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3176 The RETVAL parameter specifies whether return value needs to be saved, other
3177 parameters are documented in the emit_library_call function bellow. */
3178 static rtx
3179 emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
3180 int retval;
3181 rtx orgfun;
3182 rtx value;
3183 int fn_type;
3184 enum machine_mode outmode;
3185 int nargs;
3186 va_list p;
3188 /* Total size in bytes of all the stack-parms scanned so far. */
3189 struct args_size args_size;
3190 /* Size of arguments before any adjustments (such as rounding). */
3191 struct args_size original_args_size;
3192 register int argnum;
3193 rtx fun;
3194 int inc;
3195 int count;
3196 struct args_size alignment_pad;
3197 rtx argblock = 0;
3198 CUMULATIVE_ARGS args_so_far;
3199 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
3200 struct args_size offset; struct args_size size; rtx save_area; };
3201 struct arg *argvec;
3202 int old_inhibit_defer_pop = inhibit_defer_pop;
3203 rtx call_fusage = 0;
3204 rtx mem_value = 0;
3205 rtx valreg;
3206 int pcc_struct_value = 0;
3207 int struct_value_size = 0;
3208 int flags = 0;
3209 int reg_parm_stack_space = 0;
3210 int needed;
3212 #ifdef REG_PARM_STACK_SPACE
3213 /* Define the boundary of the register parm stack space that needs to be
3214 save, if any. */
3215 int low_to_save = -1, high_to_save = 0;
3216 rtx save_area = 0; /* Place that it is saved */
3217 #endif
3219 /* Size of the stack reserved for parameter registers. */
3220 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3221 char *initial_stack_usage_map = stack_usage_map;
3223 #ifdef REG_PARM_STACK_SPACE
3224 #ifdef MAYBE_REG_PARM_STACK_SPACE
3225 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3226 #else
3227 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3228 #endif
3229 #endif
3231 if (fn_type == 1)
3232 flags |= ECF_CONST;
3233 else if (fn_type == 2)
3234 flags |= ECF_PURE;
3235 fun = orgfun;
3237 if (libfunc_nothrow (fun))
3238 flags |= ECF_NOTHROW;
3240 #ifdef PREFERRED_STACK_BOUNDARY
3241 /* Ensure current function's preferred stack boundary is at least
3242 what we need. */
3243 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3244 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3245 #endif
3247 /* If this kind of value comes back in memory,
3248 decide where in memory it should come back. */
3249 if (outmode != VOIDmode && aggregate_value_p (type_for_mode (outmode, 0)))
3251 #ifdef PCC_STATIC_STRUCT_RETURN
3252 rtx pointer_reg
3253 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3254 0, 0);
3255 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3256 pcc_struct_value = 1;
3257 if (value == 0)
3258 value = gen_reg_rtx (outmode);
3259 #else /* not PCC_STATIC_STRUCT_RETURN */
3260 struct_value_size = GET_MODE_SIZE (outmode);
3261 if (value != 0 && GET_CODE (value) == MEM)
3262 mem_value = value;
3263 else
3264 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
3265 #endif
3267 /* This call returns a big structure. */
3268 flags &= ~(ECF_CONST | ECF_PURE);
3271 /* ??? Unfinished: must pass the memory address as an argument. */
3273 /* Copy all the libcall-arguments out of the varargs data
3274 and into a vector ARGVEC.
3276 Compute how to pass each argument. We only support a very small subset
3277 of the full argument passing conventions to limit complexity here since
3278 library functions shouldn't have many args. */
3280 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3281 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
3283 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3285 args_size.constant = 0;
3286 args_size.var = 0;
3288 count = 0;
3290 /* Now we are about to start emitting insns that can be deleted
3291 if a libcall is deleted. */
3292 if (flags & (ECF_CONST | ECF_PURE))
3293 start_sequence ();
3295 push_temp_slots ();
3297 /* If there's a structure value address to be passed,
3298 either pass it in the special place, or pass it as an extra argument. */
3299 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3301 rtx addr = XEXP (mem_value, 0);
3302 nargs++;
3304 /* Make sure it is a reasonable operand for a move or push insn. */
3305 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3306 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3307 addr = force_operand (addr, NULL_RTX);
3309 argvec[count].value = addr;
3310 argvec[count].mode = Pmode;
3311 argvec[count].partial = 0;
3313 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3314 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3315 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3316 abort ();
3317 #endif
3319 locate_and_pad_parm (Pmode, NULL_TREE,
3320 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3322 #else
3323 argvec[count].reg != 0,
3324 #endif
3325 NULL_TREE, &args_size, &argvec[count].offset,
3326 &argvec[count].size, &alignment_pad);
3329 if (argvec[count].reg == 0 || argvec[count].partial != 0
3330 || reg_parm_stack_space > 0)
3331 args_size.constant += argvec[count].size.constant;
3333 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3335 count++;
3338 for (; count < nargs; count++)
3340 rtx val = va_arg (p, rtx);
3341 enum machine_mode mode = va_arg (p, enum machine_mode);
3343 /* We cannot convert the arg value to the mode the library wants here;
3344 must do it earlier where we know the signedness of the arg. */
3345 if (mode == BLKmode
3346 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3347 abort ();
3349 /* On some machines, there's no way to pass a float to a library fcn.
3350 Pass it as a double instead. */
3351 #ifdef LIBGCC_NEEDS_DOUBLE
3352 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3353 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3354 #endif
3356 /* There's no need to call protect_from_queue, because
3357 either emit_move_insn or emit_push_insn will do that. */
3359 /* Make sure it is a reasonable operand for a move or push insn. */
3360 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3361 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3362 val = force_operand (val, NULL_RTX);
3364 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3365 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3367 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3368 be viewed as just an efficiency improvement. */
3369 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3370 emit_move_insn (slot, val);
3371 val = force_operand (XEXP (slot, 0), NULL_RTX);
3372 mode = Pmode;
3374 #endif
3376 argvec[count].value = val;
3377 argvec[count].mode = mode;
3379 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3381 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3382 argvec[count].partial
3383 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3384 #else
3385 argvec[count].partial = 0;
3386 #endif
3388 locate_and_pad_parm (mode, NULL_TREE,
3389 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3391 #else
3392 argvec[count].reg != 0,
3393 #endif
3394 NULL_TREE, &args_size, &argvec[count].offset,
3395 &argvec[count].size, &alignment_pad);
3397 if (argvec[count].size.var)
3398 abort ();
3400 if (reg_parm_stack_space == 0 && argvec[count].partial)
3401 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3403 if (argvec[count].reg == 0 || argvec[count].partial != 0
3404 || reg_parm_stack_space > 0)
3405 args_size.constant += argvec[count].size.constant;
3407 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3410 #ifdef FINAL_REG_PARM_STACK_SPACE
3411 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3412 args_size.var);
3413 #endif
3414 /* If this machine requires an external definition for library
3415 functions, write one out. */
3416 assemble_external_libcall (fun);
3418 original_args_size = args_size;
3419 #ifdef PREFERRED_STACK_BOUNDARY
3420 args_size.constant = (((args_size.constant
3421 + stack_pointer_delta
3422 + STACK_BYTES - 1)
3423 / STACK_BYTES
3424 * STACK_BYTES)
3425 - stack_pointer_delta);
3426 #endif
3428 args_size.constant = MAX (args_size.constant,
3429 reg_parm_stack_space);
3431 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3432 args_size.constant -= reg_parm_stack_space;
3433 #endif
3435 if (args_size.constant > current_function_outgoing_args_size)
3436 current_function_outgoing_args_size = args_size.constant;
3438 if (ACCUMULATE_OUTGOING_ARGS)
3440 /* Since the stack pointer will never be pushed, it is possible for
3441 the evaluation of a parm to clobber something we have already
3442 written to the stack. Since most function calls on RISC machines
3443 do not use the stack, this is uncommon, but must work correctly.
3445 Therefore, we save any area of the stack that was already written
3446 and that we are using. Here we set up to do this by making a new
3447 stack usage map from the old one.
3449 Another approach might be to try to reorder the argument
3450 evaluations to avoid this conflicting stack usage. */
3452 needed = args_size.constant;
3454 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3455 /* Since we will be writing into the entire argument area, the
3456 map must be allocated for its entire size, not just the part that
3457 is the responsibility of the caller. */
3458 needed += reg_parm_stack_space;
3459 #endif
3461 #ifdef ARGS_GROW_DOWNWARD
3462 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3463 needed + 1);
3464 #else
3465 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3466 needed);
3467 #endif
3468 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3470 if (initial_highest_arg_in_use)
3471 bcopy (initial_stack_usage_map, stack_usage_map,
3472 initial_highest_arg_in_use);
3474 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3475 bzero (&stack_usage_map[initial_highest_arg_in_use],
3476 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3477 needed = 0;
3479 /* The address of the outgoing argument list must not be copied to a
3480 register here, because argblock would be left pointing to the
3481 wrong place after the call to allocate_dynamic_stack_space below.
3484 argblock = virtual_outgoing_args_rtx;
3486 else
3488 if (!PUSH_ARGS)
3489 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3492 #ifdef PREFERRED_STACK_BOUNDARY
3493 /* If we push args individually in reverse order, perform stack alignment
3494 before the first push (the last arg). */
3495 if (argblock == 0 && PUSH_ARGS_REVERSED)
3496 anti_adjust_stack (GEN_INT (args_size.constant
3497 - original_args_size.constant));
3498 #endif
3500 if (PUSH_ARGS_REVERSED)
3502 inc = -1;
3503 argnum = nargs - 1;
3505 else
3507 inc = 1;
3508 argnum = 0;
3511 #ifdef REG_PARM_STACK_SPACE
3512 if (ACCUMULATE_OUTGOING_ARGS)
3514 /* The argument list is the property of the called routine and it
3515 may clobber it. If the fixed area has been used for previous
3516 parameters, we must save and restore it.
3518 Here we compute the boundary of the that needs to be saved, if any. */
3520 #ifdef ARGS_GROW_DOWNWARD
3521 for (count = 0; count < reg_parm_stack_space + 1; count++)
3522 #else
3523 for (count = 0; count < reg_parm_stack_space; count++)
3524 #endif
3526 if (count >= highest_outgoing_arg_in_use
3527 || stack_usage_map[count] == 0)
3528 continue;
3530 if (low_to_save == -1)
3531 low_to_save = count;
3533 high_to_save = count;
3536 if (low_to_save >= 0)
3538 int num_to_save = high_to_save - low_to_save + 1;
3539 enum machine_mode save_mode
3540 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3541 rtx stack_area;
3543 /* If we don't have the required alignment, must do this in BLKmode. */
3544 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3545 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3546 save_mode = BLKmode;
3548 #ifdef ARGS_GROW_DOWNWARD
3549 stack_area = gen_rtx_MEM (save_mode,
3550 memory_address (save_mode,
3551 plus_constant (argblock,
3552 - high_to_save)));
3553 #else
3554 stack_area = gen_rtx_MEM (save_mode,
3555 memory_address (save_mode,
3556 plus_constant (argblock,
3557 low_to_save)));
3558 #endif
3559 if (save_mode == BLKmode)
3561 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3562 emit_block_move (validize_mem (save_area), stack_area,
3563 GEN_INT (num_to_save), PARM_BOUNDARY);
3565 else
3567 save_area = gen_reg_rtx (save_mode);
3568 emit_move_insn (save_area, stack_area);
3572 #endif
3574 /* Push the args that need to be pushed. */
3576 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3577 are to be pushed. */
3578 for (count = 0; count < nargs; count++, argnum += inc)
3580 register enum machine_mode mode = argvec[argnum].mode;
3581 register rtx val = argvec[argnum].value;
3582 rtx reg = argvec[argnum].reg;
3583 int partial = argvec[argnum].partial;
3584 int lower_bound = 0, upper_bound = 0, i;
3586 if (! (reg != 0 && partial == 0))
3588 if (ACCUMULATE_OUTGOING_ARGS)
3590 /* If this is being stored into a pre-allocated, fixed-size,
3591 stack area, save any previous data at that location. */
3593 #ifdef ARGS_GROW_DOWNWARD
3594 /* stack_slot is negative, but we want to index stack_usage_map
3595 with positive values. */
3596 upper_bound = -argvec[argnum].offset.constant + 1;
3597 lower_bound = upper_bound - argvec[argnum].size.constant;
3598 #else
3599 lower_bound = argvec[argnum].offset.constant;
3600 upper_bound = lower_bound + argvec[argnum].size.constant;
3601 #endif
3603 for (i = lower_bound; i < upper_bound; i++)
3604 if (stack_usage_map[i]
3605 /* Don't store things in the fixed argument area at this
3606 point; it has already been saved. */
3607 && i > reg_parm_stack_space)
3608 break;
3610 if (i != upper_bound)
3612 /* We need to make a save area. See what mode we can make
3613 it. */
3614 enum machine_mode save_mode
3615 = mode_for_size (argvec[argnum].size.constant
3616 * BITS_PER_UNIT,
3617 MODE_INT, 1);
3618 rtx stack_area
3619 = gen_rtx_MEM
3620 (save_mode,
3621 memory_address
3622 (save_mode,
3623 plus_constant (argblock,
3624 argvec[argnum].offset.constant)));
3625 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3627 emit_move_insn (argvec[argnum].save_area, stack_area);
3631 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3632 argblock, GEN_INT (argvec[argnum].offset.constant),
3633 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
3635 /* Now mark the segment we just used. */
3636 if (ACCUMULATE_OUTGOING_ARGS)
3637 for (i = lower_bound; i < upper_bound; i++)
3638 stack_usage_map[i] = 1;
3640 NO_DEFER_POP;
3644 #ifdef PREFERRED_STACK_BOUNDARY
3645 /* If we pushed args in forward order, perform stack alignment
3646 after pushing the last arg. */
3647 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3648 anti_adjust_stack (GEN_INT (args_size.constant
3649 - original_args_size.constant));
3650 #endif
3652 if (PUSH_ARGS_REVERSED)
3653 argnum = nargs - 1;
3654 else
3655 argnum = 0;
3657 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3659 /* Now load any reg parms into their regs. */
3661 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3662 are to be pushed. */
3663 for (count = 0; count < nargs; count++, argnum += inc)
3665 register rtx val = argvec[argnum].value;
3666 rtx reg = argvec[argnum].reg;
3667 int partial = argvec[argnum].partial;
3669 /* Handle calls that pass values in multiple non-contiguous
3670 locations. The PA64 has examples of this for library calls. */
3671 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3672 emit_group_load (reg, val,
3673 GET_MODE_SIZE (GET_MODE (val)),
3674 GET_MODE_ALIGNMENT (GET_MODE (val)));
3675 else if (reg != 0 && partial == 0)
3676 emit_move_insn (reg, val);
3678 NO_DEFER_POP;
3681 /* Any regs containing parms remain in use through the call. */
3682 for (count = 0; count < nargs; count++)
3684 rtx reg = argvec[count].reg;
3685 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3686 use_group_regs (&call_fusage, reg);
3687 else if (reg != 0)
3688 use_reg (&call_fusage, reg);
3691 /* Pass the function the address in which to return a structure value. */
3692 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3694 emit_move_insn (struct_value_rtx,
3695 force_reg (Pmode,
3696 force_operand (XEXP (mem_value, 0),
3697 NULL_RTX)));
3698 if (GET_CODE (struct_value_rtx) == REG)
3699 use_reg (&call_fusage, struct_value_rtx);
3702 /* Don't allow popping to be deferred, since then
3703 cse'ing of library calls could delete a call and leave the pop. */
3704 NO_DEFER_POP;
3705 valreg = (mem_value == 0 && outmode != VOIDmode
3706 ? hard_libcall_value (outmode) : NULL_RTX);
3708 #ifdef PREFERRED_STACK_BOUNDARY
3709 /* Stack must to be properly aligned now. */
3710 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
3711 abort();
3712 #endif
3714 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3715 will set inhibit_defer_pop to that value. */
3716 /* The return type is needed to decide how many bytes the function pops.
3717 Signedness plays no role in that, so for simplicity, we pretend it's
3718 always signed. We also assume that the list of arguments passed has
3719 no impact, so we pretend it is unknown. */
3721 emit_call_1 (fun,
3722 get_identifier (XSTR (orgfun, 0)),
3723 build_function_type (outmode == VOIDmode ? void_type_node
3724 : type_for_mode (outmode, 0), NULL_TREE),
3725 original_args_size.constant, args_size.constant,
3726 struct_value_size,
3727 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3728 valreg,
3729 old_inhibit_defer_pop + 1, call_fusage, flags);
3731 /* Now restore inhibit_defer_pop to its actual original value. */
3732 OK_DEFER_POP;
3734 /* If call is cse'able, make appropriate pair of reg-notes around it.
3735 Test valreg so we don't crash; may safely ignore `const'
3736 if return type is void. Disable for PARALLEL return values, because
3737 we have no way to move such values into a pseudo register. */
3738 if ((flags & (ECF_CONST | ECF_PURE))
3739 && valreg != 0 && GET_CODE (valreg) != PARALLEL)
3741 rtx note = 0;
3742 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3743 rtx insns;
3744 int i;
3746 /* Construct an "equal form" for the value which mentions all the
3747 arguments in order as well as the function name. */
3748 for (i = 0; i < nargs; i++)
3749 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3750 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
3752 insns = get_insns ();
3753 end_sequence ();
3755 if (flags & ECF_PURE)
3756 note = gen_rtx_EXPR_LIST (VOIDmode,
3757 gen_rtx_USE (VOIDmode,
3758 gen_rtx_MEM (BLKmode,
3759 gen_rtx_SCRATCH (VOIDmode))), note);
3761 emit_libcall_block (insns, temp, valreg, note);
3763 valreg = temp;
3765 else if (flags & (ECF_CONST | ECF_PURE))
3767 /* Otherwise, just write out the sequence without a note. */
3768 rtx insns = get_insns ();
3770 end_sequence ();
3771 emit_insns (insns);
3773 pop_temp_slots ();
3775 /* Copy the value to the right place. */
3776 if (outmode != VOIDmode && retval)
3778 if (mem_value)
3780 if (value == 0)
3781 value = mem_value;
3782 if (value != mem_value)
3783 emit_move_insn (value, mem_value);
3785 else if (value != 0)
3786 emit_move_insn (value, hard_libcall_value (outmode));
3787 else
3788 value = hard_libcall_value (outmode);
3791 if (ACCUMULATE_OUTGOING_ARGS)
3793 #ifdef REG_PARM_STACK_SPACE
3794 if (save_area)
3796 enum machine_mode save_mode = GET_MODE (save_area);
3797 #ifdef ARGS_GROW_DOWNWARD
3798 rtx stack_area
3799 = gen_rtx_MEM (save_mode,
3800 memory_address (save_mode,
3801 plus_constant (argblock,
3802 - high_to_save)));
3803 #else
3804 rtx stack_area
3805 = gen_rtx_MEM (save_mode,
3806 memory_address (save_mode,
3807 plus_constant (argblock, low_to_save)));
3808 #endif
3809 if (save_mode != BLKmode)
3810 emit_move_insn (stack_area, save_area);
3811 else
3812 emit_block_move (stack_area, validize_mem (save_area),
3813 GEN_INT (high_to_save - low_to_save + 1),
3814 PARM_BOUNDARY);
3816 #endif
3818 /* If we saved any argument areas, restore them. */
3819 for (count = 0; count < nargs; count++)
3820 if (argvec[count].save_area)
3822 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3823 rtx stack_area
3824 = gen_rtx_MEM (save_mode,
3825 memory_address
3826 (save_mode,
3827 plus_constant (argblock,
3828 argvec[count].offset.constant)));
3830 emit_move_insn (stack_area, argvec[count].save_area);
3833 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3834 stack_usage_map = initial_stack_usage_map;
3837 return value;
3841 /* Output a library call to function FUN (a SYMBOL_REF rtx)
3842 (emitting the queue unless NO_QUEUE is nonzero),
3843 for a value of mode OUTMODE,
3844 with NARGS different arguments, passed as alternating rtx values
3845 and machine_modes to convert them to.
3846 The rtx values should have been passed through protect_from_queue already.
3848 FN_TYPE will is zero for `normal' calls, one for `const' calls, wich
3849 which will be enclosed in REG_LIBCALL/REG_RETVAL notes and two for `pure'
3850 calls, that are handled like `const' calls with extra
3851 (use (memory (scratch)). */
3853 void
3854 emit_library_call VPARAMS((rtx orgfun, int fn_type, enum machine_mode outmode,
3855 int nargs, ...))
3857 #ifndef ANSI_PROTOTYPES
3858 rtx orgfun;
3859 int fn_type;
3860 enum machine_mode outmode;
3861 int nargs;
3862 #endif
3863 va_list p;
3865 VA_START (p, nargs);
3867 #ifndef ANSI_PROTOTYPES
3868 orgfun = va_arg (p, rtx);
3869 fn_type = va_arg (p, int);
3870 outmode = va_arg (p, enum machine_mode);
3871 nargs = va_arg (p, int);
3872 #endif
3874 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
3876 va_end (p);
3879 /* Like emit_library_call except that an extra argument, VALUE,
3880 comes second and says where to store the result.
3881 (If VALUE is zero, this function chooses a convenient way
3882 to return the value.
3884 This function returns an rtx for where the value is to be found.
3885 If VALUE is nonzero, VALUE is returned. */
3888 emit_library_call_value VPARAMS((rtx orgfun, rtx value, int fn_type,
3889 enum machine_mode outmode, int nargs, ...))
3891 #ifndef ANSI_PROTOTYPES
3892 rtx orgfun;
3893 rtx value;
3894 int fn_type;
3895 enum machine_mode outmode;
3896 int nargs;
3897 #endif
3898 va_list p;
3900 VA_START (p, nargs);
3902 #ifndef ANSI_PROTOTYPES
3903 orgfun = va_arg (p, rtx);
3904 value = va_arg (p, rtx);
3905 fn_type = va_arg (p, int);
3906 outmode = va_arg (p, enum machine_mode);
3907 nargs = va_arg (p, int);
3908 #endif
3910 value = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode, nargs, p);
3912 va_end (p);
3914 return value;
3917 #if 0
3918 /* Return an rtx which represents a suitable home on the stack
3919 given TYPE, the type of the argument looking for a home.
3920 This is called only for BLKmode arguments.
3922 SIZE is the size needed for this target.
3923 ARGS_ADDR is the address of the bottom of the argument block for this call.
3924 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3925 if this machine uses push insns. */
3927 static rtx
3928 target_for_arg (type, size, args_addr, offset)
3929 tree type;
3930 rtx size;
3931 rtx args_addr;
3932 struct args_size offset;
3934 rtx target;
3935 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3937 /* We do not call memory_address if possible,
3938 because we want to address as close to the stack
3939 as possible. For non-variable sized arguments,
3940 this will be stack-pointer relative addressing. */
3941 if (GET_CODE (offset_rtx) == CONST_INT)
3942 target = plus_constant (args_addr, INTVAL (offset_rtx));
3943 else
3945 /* I have no idea how to guarantee that this
3946 will work in the presence of register parameters. */
3947 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
3948 target = memory_address (QImode, target);
3951 return gen_rtx_MEM (BLKmode, target);
3953 #endif
3955 /* Store a single argument for a function call
3956 into the register or memory area where it must be passed.
3957 *ARG describes the argument value and where to pass it.
3959 ARGBLOCK is the address of the stack-block for all the arguments,
3960 or 0 on a machine where arguments are pushed individually.
3962 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3963 so must be careful about how the stack is used.
3965 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3966 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3967 that we need not worry about saving and restoring the stack.
3969 FNDECL is the declaration of the function we are calling. */
3971 static void
3972 store_one_arg (arg, argblock, flags, variable_size,
3973 reg_parm_stack_space)
3974 struct arg_data *arg;
3975 rtx argblock;
3976 int flags;
3977 int variable_size ATTRIBUTE_UNUSED;
3978 int reg_parm_stack_space;
3980 register tree pval = arg->tree_value;
3981 rtx reg = 0;
3982 int partial = 0;
3983 int used = 0;
3984 int i, lower_bound = 0, upper_bound = 0;
3986 if (TREE_CODE (pval) == ERROR_MARK)
3987 return;
3989 /* Push a new temporary level for any temporaries we make for
3990 this argument. */
3991 push_temp_slots ();
3993 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
3995 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3996 save any previous data at that location. */
3997 if (argblock && ! variable_size && arg->stack)
3999 #ifdef ARGS_GROW_DOWNWARD
4000 /* stack_slot is negative, but we want to index stack_usage_map
4001 with positive values. */
4002 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4003 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4004 else
4005 upper_bound = 0;
4007 lower_bound = upper_bound - arg->size.constant;
4008 #else
4009 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4010 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4011 else
4012 lower_bound = 0;
4014 upper_bound = lower_bound + arg->size.constant;
4015 #endif
4017 for (i = lower_bound; i < upper_bound; i++)
4018 if (stack_usage_map[i]
4019 /* Don't store things in the fixed argument area at this point;
4020 it has already been saved. */
4021 && i > reg_parm_stack_space)
4022 break;
4024 if (i != upper_bound)
4026 /* We need to make a save area. See what mode we can make it. */
4027 enum machine_mode save_mode
4028 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
4029 rtx stack_area
4030 = gen_rtx_MEM (save_mode,
4031 memory_address (save_mode,
4032 XEXP (arg->stack_slot, 0)));
4034 if (save_mode == BLKmode)
4036 arg->save_area = assign_stack_temp (BLKmode,
4037 arg->size.constant, 0);
4038 MEM_SET_IN_STRUCT_P (arg->save_area,
4039 AGGREGATE_TYPE_P (TREE_TYPE
4040 (arg->tree_value)));
4041 preserve_temp_slots (arg->save_area);
4042 emit_block_move (validize_mem (arg->save_area), stack_area,
4043 GEN_INT (arg->size.constant),
4044 PARM_BOUNDARY);
4046 else
4048 arg->save_area = gen_reg_rtx (save_mode);
4049 emit_move_insn (arg->save_area, stack_area);
4053 /* Now that we have saved any slots that will be overwritten by this
4054 store, mark all slots this store will use. We must do this before
4055 we actually expand the argument since the expansion itself may
4056 trigger library calls which might need to use the same stack slot. */
4057 if (argblock && ! variable_size && arg->stack)
4058 for (i = lower_bound; i < upper_bound; i++)
4059 stack_usage_map[i] = 1;
4062 /* If this isn't going to be placed on both the stack and in registers,
4063 set up the register and number of words. */
4064 if (! arg->pass_on_stack)
4065 reg = arg->reg, partial = arg->partial;
4067 if (reg != 0 && partial == 0)
4068 /* Being passed entirely in a register. We shouldn't be called in
4069 this case. */
4070 abort ();
4072 /* If this arg needs special alignment, don't load the registers
4073 here. */
4074 if (arg->n_aligned_regs != 0)
4075 reg = 0;
4077 /* If this is being passed partially in a register, we can't evaluate
4078 it directly into its stack slot. Otherwise, we can. */
4079 if (arg->value == 0)
4081 /* stack_arg_under_construction is nonzero if a function argument is
4082 being evaluated directly into the outgoing argument list and
4083 expand_call must take special action to preserve the argument list
4084 if it is called recursively.
4086 For scalar function arguments stack_usage_map is sufficient to
4087 determine which stack slots must be saved and restored. Scalar
4088 arguments in general have pass_on_stack == 0.
4090 If this argument is initialized by a function which takes the
4091 address of the argument (a C++ constructor or a C function
4092 returning a BLKmode structure), then stack_usage_map is
4093 insufficient and expand_call must push the stack around the
4094 function call. Such arguments have pass_on_stack == 1.
4096 Note that it is always safe to set stack_arg_under_construction,
4097 but this generates suboptimal code if set when not needed. */
4099 if (arg->pass_on_stack)
4100 stack_arg_under_construction++;
4102 arg->value = expand_expr (pval,
4103 (partial
4104 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4105 ? NULL_RTX : arg->stack,
4106 VOIDmode, 0);
4108 /* If we are promoting object (or for any other reason) the mode
4109 doesn't agree, convert the mode. */
4111 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4112 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4113 arg->value, arg->unsignedp);
4115 if (arg->pass_on_stack)
4116 stack_arg_under_construction--;
4119 /* Don't allow anything left on stack from computation
4120 of argument to alloca. */
4121 if (flags & ECF_MAY_BE_ALLOCA)
4122 do_pending_stack_adjust ();
4124 if (arg->value == arg->stack)
4126 /* If the value is already in the stack slot, we are done. */
4127 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
4129 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4130 XEXP (arg->stack, 0), Pmode,
4131 ARGS_SIZE_RTX (arg->size),
4132 TYPE_MODE (sizetype),
4133 GEN_INT (MEMORY_USE_RW),
4134 TYPE_MODE (integer_type_node));
4137 else if (arg->mode != BLKmode)
4139 register int size;
4141 /* Argument is a scalar, not entirely passed in registers.
4142 (If part is passed in registers, arg->partial says how much
4143 and emit_push_insn will take care of putting it there.)
4145 Push it, and if its size is less than the
4146 amount of space allocated to it,
4147 also bump stack pointer by the additional space.
4148 Note that in C the default argument promotions
4149 will prevent such mismatches. */
4151 size = GET_MODE_SIZE (arg->mode);
4152 /* Compute how much space the push instruction will push.
4153 On many machines, pushing a byte will advance the stack
4154 pointer by a halfword. */
4155 #ifdef PUSH_ROUNDING
4156 size = PUSH_ROUNDING (size);
4157 #endif
4158 used = size;
4160 /* Compute how much space the argument should get:
4161 round up to a multiple of the alignment for arguments. */
4162 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4163 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4164 / (PARM_BOUNDARY / BITS_PER_UNIT))
4165 * (PARM_BOUNDARY / BITS_PER_UNIT));
4167 /* This isn't already where we want it on the stack, so put it there.
4168 This can either be done with push or copy insns. */
4169 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
4170 partial, reg, used - size, argblock,
4171 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4172 ARGS_SIZE_RTX (arg->alignment_pad));
4174 else
4176 /* BLKmode, at least partly to be pushed. */
4178 register int excess;
4179 rtx size_rtx;
4181 /* Pushing a nonscalar.
4182 If part is passed in registers, PARTIAL says how much
4183 and emit_push_insn will take care of putting it there. */
4185 /* Round its size up to a multiple
4186 of the allocation unit for arguments. */
4188 if (arg->size.var != 0)
4190 excess = 0;
4191 size_rtx = ARGS_SIZE_RTX (arg->size);
4193 else
4195 /* PUSH_ROUNDING has no effect on us, because
4196 emit_push_insn for BLKmode is careful to avoid it. */
4197 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
4198 + partial * UNITS_PER_WORD);
4199 size_rtx = expr_size (pval);
4202 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4203 TYPE_ALIGN (TREE_TYPE (pval)), partial, reg, excess,
4204 argblock, ARGS_SIZE_RTX (arg->offset),
4205 reg_parm_stack_space,
4206 ARGS_SIZE_RTX (arg->alignment_pad));
4210 /* Unless this is a partially-in-register argument, the argument is now
4211 in the stack.
4213 ??? Note that this can change arg->value from arg->stack to
4214 arg->stack_slot and it matters when they are not the same.
4215 It isn't totally clear that this is correct in all cases. */
4216 if (partial == 0)
4217 arg->value = arg->stack_slot;
4219 /* Once we have pushed something, pops can't safely
4220 be deferred during the rest of the arguments. */
4221 NO_DEFER_POP;
4223 /* ANSI doesn't require a sequence point here,
4224 but PCC has one, so this will avoid some problems. */
4225 emit_queue ();
4227 /* Free any temporary slots made in processing this argument. Show
4228 that we might have taken the address of something and pushed that
4229 as an operand. */
4230 preserve_temp_slots (NULL_RTX);
4231 free_temp_slots ();
4232 pop_temp_slots ();