calls.c (alloca_call_p): New global function.
[official-gcc.git] / gcc / calls.c
blob99722b5da2e356a4a8461282f0ba084a7abe0af3
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "expr.h"
28 #include "libfuncs.h"
29 #include "function.h"
30 #include "regs.h"
31 #include "toplev.h"
32 #include "output.h"
33 #include "tm_p.h"
34 #include "timevar.h"
35 #include "sbitmap.h"
36 #include "langhooks.h"
37 #include "target.h"
39 #if !defined FUNCTION_OK_FOR_SIBCALL
40 #define FUNCTION_OK_FOR_SIBCALL(DECL) 1
41 #endif
43 /* Decide whether a function's arguments should be processed
44 from first to last or from last to first.
46 They should if the stack and args grow in opposite directions, but
47 only if we have push insns. */
49 #ifdef PUSH_ROUNDING
51 #ifndef PUSH_ARGS_REVERSED
52 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
53 #define PUSH_ARGS_REVERSED PUSH_ARGS
54 #endif
55 #endif
57 #endif
59 #ifndef PUSH_ARGS_REVERSED
60 #define PUSH_ARGS_REVERSED 0
61 #endif
63 #ifndef STACK_POINTER_OFFSET
64 #define STACK_POINTER_OFFSET 0
65 #endif
67 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
68 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
70 /* Data structure and subroutines used within expand_call. */
72 struct arg_data
74 /* Tree node for this argument. */
75 tree tree_value;
76 /* Mode for value; TYPE_MODE unless promoted. */
77 enum machine_mode mode;
78 /* Current RTL value for argument, or 0 if it isn't precomputed. */
79 rtx value;
80 /* Initially-compute RTL value for argument; only for const functions. */
81 rtx initial_value;
82 /* Register to pass this argument in, 0 if passed on stack, or an
83 PARALLEL if the arg is to be copied into multiple non-contiguous
84 registers. */
85 rtx reg;
86 /* Register to pass this argument in when generating tail call sequence.
87 This is not the same register as for normal calls on machines with
88 register windows. */
89 rtx tail_call_reg;
90 /* If REG was promoted from the actual mode of the argument expression,
91 indicates whether the promotion is sign- or zero-extended. */
92 int unsignedp;
93 /* Number of registers to use. 0 means put the whole arg in registers.
94 Also 0 if not passed in registers. */
95 int partial;
96 /* Nonzero if argument must be passed on stack.
97 Note that some arguments may be passed on the stack
98 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
99 pass_on_stack identifies arguments that *cannot* go in registers. */
100 int pass_on_stack;
101 /* Offset of this argument from beginning of stack-args. */
102 struct args_size offset;
103 /* Similar, but offset to the start of the stack slot. Different from
104 OFFSET if this arg pads downward. */
105 struct args_size slot_offset;
106 /* Size of this argument on the stack, rounded up for any padding it gets,
107 parts of the argument passed in registers do not count.
108 If REG_PARM_STACK_SPACE is defined, then register parms
109 are counted here as well. */
110 struct args_size size;
111 /* Location on the stack at which parameter should be stored. The store
112 has already been done if STACK == VALUE. */
113 rtx stack;
114 /* Location on the stack of the start of this argument slot. This can
115 differ from STACK if this arg pads downward. This location is known
116 to be aligned to FUNCTION_ARG_BOUNDARY. */
117 rtx stack_slot;
118 /* Place that this stack area has been saved, if needed. */
119 rtx save_area;
120 /* If an argument's alignment does not permit direct copying into registers,
121 copy in smaller-sized pieces into pseudos. These are stored in a
122 block pointed to by this field. The next field says how many
123 word-sized pseudos we made. */
124 rtx *aligned_regs;
125 int n_aligned_regs;
126 /* The amount that the stack pointer needs to be adjusted to
127 force alignment for the next argument. */
128 struct args_size alignment_pad;
131 /* A vector of one char per byte of stack space. A byte if nonzero if
132 the corresponding stack location has been used.
133 This vector is used to prevent a function call within an argument from
134 clobbering any stack already set up. */
135 static char *stack_usage_map;
137 /* Size of STACK_USAGE_MAP. */
138 static int highest_outgoing_arg_in_use;
140 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
141 stack location's tail call argument has been already stored into the stack.
142 This bitmap is used to prevent sibling call optimization if function tries
143 to use parent's incoming argument slots when they have been already
144 overwritten with tail call arguments. */
145 static sbitmap stored_args_map;
147 /* stack_arg_under_construction is nonzero when an argument may be
148 initialized with a constructor call (including a C function that
149 returns a BLKmode struct) and expand_call must take special action
150 to make sure the object being constructed does not overlap the
151 argument list for the constructor call. */
152 int stack_arg_under_construction;
154 static int calls_function PARAMS ((tree, int));
155 static int calls_function_1 PARAMS ((tree, int));
157 /* Nonzero if this is a call to a `const' function. */
158 #define ECF_CONST 1
159 /* Nonzero if this is a call to a `volatile' function. */
160 #define ECF_NORETURN 2
161 /* Nonzero if this is a call to malloc or a related function. */
162 #define ECF_MALLOC 4
163 /* Nonzero if it is plausible that this is a call to alloca. */
164 #define ECF_MAY_BE_ALLOCA 8
165 /* Nonzero if this is a call to a function that won't throw an exception. */
166 #define ECF_NOTHROW 16
167 /* Nonzero if this is a call to setjmp or a related function. */
168 #define ECF_RETURNS_TWICE 32
169 /* Nonzero if this is a call to `longjmp'. */
170 #define ECF_LONGJMP 64
171 /* Nonzero if this is a syscall that makes a new process in the image of
172 the current one. */
173 #define ECF_FORK_OR_EXEC 128
174 #define ECF_SIBCALL 256
175 /* Nonzero if this is a call to "pure" function (like const function,
176 but may read memory. */
177 #define ECF_PURE 512
178 /* Nonzero if this is a call to a function that returns with the stack
179 pointer depressed. */
180 #define ECF_SP_DEPRESSED 1024
181 /* Nonzero if this call is known to always return. */
182 #define ECF_ALWAYS_RETURN 2048
183 /* Create libcall block around the call. */
184 #define ECF_LIBCALL_BLOCK 4096
186 static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
187 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
188 rtx, int, rtx, int,
189 CUMULATIVE_ARGS *));
190 static void precompute_register_parameters PARAMS ((int,
191 struct arg_data *,
192 int *));
193 static int store_one_arg PARAMS ((struct arg_data *, rtx, int, int,
194 int));
195 static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
196 int));
197 static int finalize_must_preallocate PARAMS ((int, int,
198 struct arg_data *,
199 struct args_size *));
200 static void precompute_arguments PARAMS ((int, int,
201 struct arg_data *));
202 static int compute_argument_block_size PARAMS ((int,
203 struct args_size *,
204 int));
205 static void initialize_argument_information PARAMS ((int,
206 struct arg_data *,
207 struct args_size *,
208 int, tree, tree,
209 CUMULATIVE_ARGS *,
210 int, rtx *, int *,
211 int *, int *));
212 static void compute_argument_addresses PARAMS ((struct arg_data *,
213 rtx, int));
214 static rtx rtx_for_function_call PARAMS ((tree, tree));
215 static void load_register_parameters PARAMS ((struct arg_data *,
216 int, rtx *, int));
217 static rtx emit_library_call_value_1 PARAMS ((int, rtx, rtx,
218 enum libcall_type,
219 enum machine_mode,
220 int, va_list));
221 static int special_function_p PARAMS ((tree, int));
222 static int flags_from_decl_or_type PARAMS ((tree));
223 static rtx try_to_integrate PARAMS ((tree, tree, rtx,
224 int, tree, rtx));
225 static int check_sibcall_argument_overlap_1 PARAMS ((rtx));
226 static int check_sibcall_argument_overlap PARAMS ((rtx, struct arg_data *));
228 static int combine_pending_stack_adjustment_and_call
229 PARAMS ((int, struct args_size *, int));
231 #ifdef REG_PARM_STACK_SPACE
232 static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
233 static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
234 #endif
236 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
237 `alloca'.
239 If WHICH is 0, return 1 if EXP contains a call to any function.
240 Actually, we only need return 1 if evaluating EXP would require pushing
241 arguments on the stack, but that is too difficult to compute, so we just
242 assume any function call might require the stack. */
244 static tree calls_function_save_exprs;
246 static int
247 calls_function (exp, which)
248 tree exp;
249 int which;
251 int val;
253 calls_function_save_exprs = 0;
254 val = calls_function_1 (exp, which);
255 calls_function_save_exprs = 0;
256 return val;
259 /* Recursive function to do the work of above function. */
261 static int
262 calls_function_1 (exp, which)
263 tree exp;
264 int which;
266 int i;
267 enum tree_code code = TREE_CODE (exp);
268 int class = TREE_CODE_CLASS (code);
269 int length = first_rtl_op (code);
271 /* If this code is language-specific, we don't know what it will do. */
272 if ((int) code >= NUM_TREE_CODES)
273 return 1;
275 switch (code)
277 case CALL_EXPR:
278 if (which == 0)
279 return 1;
280 else if ((TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
281 == FUNCTION_TYPE)
282 && (TYPE_RETURNS_STACK_DEPRESSED
283 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
284 return 1;
285 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
286 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
287 == FUNCTION_DECL)
288 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
290 & ECF_MAY_BE_ALLOCA))
291 return 1;
293 break;
295 case CONSTRUCTOR:
297 tree tem;
299 for (tem = CONSTRUCTOR_ELTS (exp); tem != 0; tem = TREE_CHAIN (tem))
300 if (calls_function_1 (TREE_VALUE (tem), which))
301 return 1;
304 return 0;
306 case SAVE_EXPR:
307 if (SAVE_EXPR_RTL (exp) != 0)
308 return 0;
309 if (value_member (exp, calls_function_save_exprs))
310 return 0;
311 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
312 calls_function_save_exprs);
313 return (TREE_OPERAND (exp, 0) != 0
314 && calls_function_1 (TREE_OPERAND (exp, 0), which));
316 case BLOCK:
318 tree local;
319 tree subblock;
321 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
322 if (DECL_INITIAL (local) != 0
323 && calls_function_1 (DECL_INITIAL (local), which))
324 return 1;
326 for (subblock = BLOCK_SUBBLOCKS (exp);
327 subblock;
328 subblock = TREE_CHAIN (subblock))
329 if (calls_function_1 (subblock, which))
330 return 1;
332 return 0;
334 case TREE_LIST:
335 for (; exp != 0; exp = TREE_CHAIN (exp))
336 if (calls_function_1 (TREE_VALUE (exp), which))
337 return 1;
338 return 0;
340 default:
341 break;
344 /* Only expressions, references, and blocks can contain calls. */
345 if (! IS_EXPR_CODE_CLASS (class) && class != 'r' && class != 'b')
346 return 0;
348 for (i = 0; i < length; i++)
349 if (TREE_OPERAND (exp, i) != 0
350 && calls_function_1 (TREE_OPERAND (exp, i), which))
351 return 1;
353 return 0;
356 /* Force FUNEXP into a form suitable for the address of a CALL,
357 and return that as an rtx. Also load the static chain register
358 if FNDECL is a nested function.
360 CALL_FUSAGE points to a variable holding the prospective
361 CALL_INSN_FUNCTION_USAGE information. */
364 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen, sibcallp)
365 rtx funexp;
366 tree fndecl;
367 rtx *call_fusage;
368 int reg_parm_seen;
369 int sibcallp;
371 rtx static_chain_value = 0;
373 funexp = protect_from_queue (funexp, 0);
375 if (fndecl != 0)
376 /* Get possible static chain value for nested function in C. */
377 static_chain_value = lookup_static_chain (fndecl);
379 /* Make a valid memory address and copy constants thru pseudo-regs,
380 but not for a constant address if -fno-function-cse. */
381 if (GET_CODE (funexp) != SYMBOL_REF)
382 /* If we are using registers for parameters, force the
383 function address into a register now. */
384 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
385 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
386 : memory_address (FUNCTION_MODE, funexp));
387 else if (! sibcallp)
389 #ifndef NO_FUNCTION_CSE
390 if (optimize && ! flag_no_function_cse)
391 #ifdef NO_RECURSIVE_FUNCTION_CSE
392 if (fndecl != current_function_decl)
393 #endif
394 funexp = force_reg (Pmode, funexp);
395 #endif
398 if (static_chain_value != 0)
400 emit_move_insn (static_chain_rtx, static_chain_value);
402 if (GET_CODE (static_chain_rtx) == REG)
403 use_reg (call_fusage, static_chain_rtx);
406 return funexp;
409 /* Generate instructions to call function FUNEXP,
410 and optionally pop the results.
411 The CALL_INSN is the first insn generated.
413 FNDECL is the declaration node of the function. This is given to the
414 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
416 FUNTYPE is the data type of the function. This is given to the macro
417 RETURN_POPS_ARGS to determine whether this function pops its own args.
418 We used to allow an identifier for library functions, but that doesn't
419 work when the return type is an aggregate type and the calling convention
420 says that the pointer to this aggregate is to be popped by the callee.
422 STACK_SIZE is the number of bytes of arguments on the stack,
423 ROUNDED_STACK_SIZE is that number rounded up to
424 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
425 both to put into the call insn and to generate explicit popping
426 code if necessary.
428 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
429 It is zero if this call doesn't want a structure value.
431 NEXT_ARG_REG is the rtx that results from executing
432 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
433 just after all the args have had their registers assigned.
434 This could be whatever you like, but normally it is the first
435 arg-register beyond those used for args in this call,
436 or 0 if all the arg-registers are used in this call.
437 It is passed on to `gen_call' so you can put this info in the call insn.
439 VALREG is a hard register in which a value is returned,
440 or 0 if the call does not return a value.
442 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
443 the args to this call were processed.
444 We restore `inhibit_defer_pop' to that value.
446 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
447 denote registers used by the called function. */
449 static void
450 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
451 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
452 call_fusage, ecf_flags, args_so_far)
453 rtx funexp;
454 tree fndecl ATTRIBUTE_UNUSED;
455 tree funtype ATTRIBUTE_UNUSED;
456 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
457 HOST_WIDE_INT rounded_stack_size;
458 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
459 rtx next_arg_reg ATTRIBUTE_UNUSED;
460 rtx valreg;
461 int old_inhibit_defer_pop;
462 rtx call_fusage;
463 int ecf_flags;
464 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED;
466 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
467 rtx call_insn;
468 int already_popped = 0;
469 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
470 #if defined (HAVE_call) && defined (HAVE_call_value)
471 rtx struct_value_size_rtx;
472 struct_value_size_rtx = GEN_INT (struct_value_size);
473 #endif
475 #ifdef CALL_POPS_ARGS
476 n_popped += CALL_POPS_ARGS (* args_so_far);
477 #endif
479 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
480 and we don't want to load it into a register as an optimization,
481 because prepare_call_address already did it if it should be done. */
482 if (GET_CODE (funexp) != SYMBOL_REF)
483 funexp = memory_address (FUNCTION_MODE, funexp);
485 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
486 if ((ecf_flags & ECF_SIBCALL)
487 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
488 && (n_popped > 0 || stack_size == 0))
490 rtx n_pop = GEN_INT (n_popped);
491 rtx pat;
493 /* If this subroutine pops its own args, record that in the call insn
494 if possible, for the sake of frame pointer elimination. */
496 if (valreg)
497 pat = GEN_SIBCALL_VALUE_POP (valreg,
498 gen_rtx_MEM (FUNCTION_MODE, funexp),
499 rounded_stack_size_rtx, next_arg_reg,
500 n_pop);
501 else
502 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
503 rounded_stack_size_rtx, next_arg_reg, n_pop);
505 emit_call_insn (pat);
506 already_popped = 1;
508 else
509 #endif
511 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
512 /* If the target has "call" or "call_value" insns, then prefer them
513 if no arguments are actually popped. If the target does not have
514 "call" or "call_value" insns, then we must use the popping versions
515 even if the call has no arguments to pop. */
516 #if defined (HAVE_call) && defined (HAVE_call_value)
517 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
518 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
519 #else
520 if (HAVE_call_pop && HAVE_call_value_pop)
521 #endif
523 rtx n_pop = GEN_INT (n_popped);
524 rtx pat;
526 /* If this subroutine pops its own args, record that in the call insn
527 if possible, for the sake of frame pointer elimination. */
529 if (valreg)
530 pat = GEN_CALL_VALUE_POP (valreg,
531 gen_rtx_MEM (FUNCTION_MODE, funexp),
532 rounded_stack_size_rtx, next_arg_reg, n_pop);
533 else
534 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
535 rounded_stack_size_rtx, next_arg_reg, n_pop);
537 emit_call_insn (pat);
538 already_popped = 1;
540 else
541 #endif
543 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
544 if ((ecf_flags & ECF_SIBCALL)
545 && HAVE_sibcall && HAVE_sibcall_value)
547 if (valreg)
548 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
549 gen_rtx_MEM (FUNCTION_MODE, funexp),
550 rounded_stack_size_rtx,
551 next_arg_reg, NULL_RTX));
552 else
553 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
554 rounded_stack_size_rtx, next_arg_reg,
555 struct_value_size_rtx));
557 else
558 #endif
560 #if defined (HAVE_call) && defined (HAVE_call_value)
561 if (HAVE_call && HAVE_call_value)
563 if (valreg)
564 emit_call_insn (GEN_CALL_VALUE (valreg,
565 gen_rtx_MEM (FUNCTION_MODE, funexp),
566 rounded_stack_size_rtx, next_arg_reg,
567 NULL_RTX));
568 else
569 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
570 rounded_stack_size_rtx, next_arg_reg,
571 struct_value_size_rtx));
573 else
574 #endif
575 abort ();
577 /* Find the CALL insn we just emitted. */
578 for (call_insn = get_last_insn ();
579 call_insn && GET_CODE (call_insn) != CALL_INSN;
580 call_insn = PREV_INSN (call_insn))
583 if (! call_insn)
584 abort ();
586 /* Mark memory as used for "pure" function call. */
587 if (ecf_flags & ECF_PURE)
588 call_fusage
589 = gen_rtx_EXPR_LIST
590 (VOIDmode,
591 gen_rtx_USE (VOIDmode,
592 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
593 call_fusage);
595 /* Put the register usage information on the CALL. If there is already
596 some usage information, put ours at the end. */
597 if (CALL_INSN_FUNCTION_USAGE (call_insn))
599 rtx link;
601 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
602 link = XEXP (link, 1))
605 XEXP (link, 1) = call_fusage;
607 else
608 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
610 /* If this is a const call, then set the insn's unchanging bit. */
611 if (ecf_flags & (ECF_CONST | ECF_PURE))
612 CONST_OR_PURE_CALL_P (call_insn) = 1;
614 /* If this call can't throw, attach a REG_EH_REGION reg note to that
615 effect. */
616 if (ecf_flags & ECF_NOTHROW)
617 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
618 REG_NOTES (call_insn));
620 if (ecf_flags & ECF_NORETURN)
621 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
622 REG_NOTES (call_insn));
623 if (ecf_flags & ECF_ALWAYS_RETURN)
624 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
625 REG_NOTES (call_insn));
627 if (ecf_flags & ECF_RETURNS_TWICE)
629 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
630 REG_NOTES (call_insn));
631 current_function_calls_setjmp = 1;
634 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
636 /* Restore this now, so that we do defer pops for this call's args
637 if the context of the call as a whole permits. */
638 inhibit_defer_pop = old_inhibit_defer_pop;
640 if (n_popped > 0)
642 if (!already_popped)
643 CALL_INSN_FUNCTION_USAGE (call_insn)
644 = gen_rtx_EXPR_LIST (VOIDmode,
645 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
646 CALL_INSN_FUNCTION_USAGE (call_insn));
647 rounded_stack_size -= n_popped;
648 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
649 stack_pointer_delta -= n_popped;
652 if (!ACCUMULATE_OUTGOING_ARGS)
654 /* If returning from the subroutine does not automatically pop the args,
655 we need an instruction to pop them sooner or later.
656 Perhaps do it now; perhaps just record how much space to pop later.
658 If returning from the subroutine does pop the args, indicate that the
659 stack pointer will be changed. */
661 if (rounded_stack_size != 0)
663 if (ecf_flags & ECF_SP_DEPRESSED)
664 /* Just pretend we did the pop. */
665 stack_pointer_delta -= rounded_stack_size;
666 else if (flag_defer_pop && inhibit_defer_pop == 0
667 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
668 pending_stack_adjust += rounded_stack_size;
669 else
670 adjust_stack (rounded_stack_size_rtx);
673 /* When we accumulate outgoing args, we must avoid any stack manipulations.
674 Restore the stack pointer to its original value now. Usually
675 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
676 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
677 popping variants of functions exist as well.
679 ??? We may optimize similar to defer_pop above, but it is
680 probably not worthwhile.
682 ??? It will be worthwhile to enable combine_stack_adjustments even for
683 such machines. */
684 else if (n_popped)
685 anti_adjust_stack (GEN_INT (n_popped));
688 /* Determine if the function identified by NAME and FNDECL is one with
689 special properties we wish to know about.
691 For example, if the function might return more than one time (setjmp), then
692 set RETURNS_TWICE to a nonzero value.
694 Similarly set LONGJMP for if the function is in the longjmp family.
696 Set MALLOC for any of the standard memory allocation functions which
697 allocate from the heap.
699 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
700 space from the stack such as alloca. */
702 static int
703 special_function_p (fndecl, flags)
704 tree fndecl;
705 int flags;
707 if (! (flags & ECF_MALLOC)
708 && fndecl && DECL_NAME (fndecl)
709 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
710 /* Exclude functions not at the file scope, or not `extern',
711 since they are not the magic functions we would otherwise
712 think they are. */
713 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
715 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
716 const char *tname = name;
718 /* We assume that alloca will always be called by name. It
719 makes no sense to pass it as a pointer-to-function to
720 anything that does not understand its behavior. */
721 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
722 && name[0] == 'a'
723 && ! strcmp (name, "alloca"))
724 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
725 && name[0] == '_'
726 && ! strcmp (name, "__builtin_alloca"))))
727 flags |= ECF_MAY_BE_ALLOCA;
729 /* Disregard prefix _, __ or __x. */
730 if (name[0] == '_')
732 if (name[1] == '_' && name[2] == 'x')
733 tname += 3;
734 else if (name[1] == '_')
735 tname += 2;
736 else
737 tname += 1;
740 if (tname[0] == 's')
742 if ((tname[1] == 'e'
743 && (! strcmp (tname, "setjmp")
744 || ! strcmp (tname, "setjmp_syscall")))
745 || (tname[1] == 'i'
746 && ! strcmp (tname, "sigsetjmp"))
747 || (tname[1] == 'a'
748 && ! strcmp (tname, "savectx")))
749 flags |= ECF_RETURNS_TWICE;
751 if (tname[1] == 'i'
752 && ! strcmp (tname, "siglongjmp"))
753 flags |= ECF_LONGJMP;
755 else if ((tname[0] == 'q' && tname[1] == 's'
756 && ! strcmp (tname, "qsetjmp"))
757 || (tname[0] == 'v' && tname[1] == 'f'
758 && ! strcmp (tname, "vfork")))
759 flags |= ECF_RETURNS_TWICE;
761 else if (tname[0] == 'l' && tname[1] == 'o'
762 && ! strcmp (tname, "longjmp"))
763 flags |= ECF_LONGJMP;
765 else if ((tname[0] == 'f' && tname[1] == 'o'
766 && ! strcmp (tname, "fork"))
767 /* Linux specific: __clone. check NAME to insist on the
768 leading underscores, to avoid polluting the ISO / POSIX
769 namespace. */
770 || (name[0] == '_' && name[1] == '_'
771 && ! strcmp (tname, "clone"))
772 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
773 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
774 && (tname[5] == '\0'
775 || ((tname[5] == 'p' || tname[5] == 'e')
776 && tname[6] == '\0'))))
777 flags |= ECF_FORK_OR_EXEC;
779 /* Do not add any more malloc-like functions to this list,
780 instead mark them as malloc functions using the malloc attribute.
781 Note, realloc is not suitable for attribute malloc since
782 it may return the same address across multiple calls.
783 C++ operator new is not suitable because it is not required
784 to return a unique pointer; indeed, the standard placement new
785 just returns its argument. */
786 else if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == Pmode
787 && (! strcmp (tname, "malloc")
788 || ! strcmp (tname, "calloc")
789 || ! strcmp (tname, "strdup")))
790 flags |= ECF_MALLOC;
792 return flags;
795 /* Return nonzero when tree represent call to longjmp. */
798 setjmp_call_p (fndecl)
799 tree fndecl;
801 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
804 /* Return true when exp contains alloca call. */
805 bool
806 alloca_call_p (exp)
807 tree exp;
809 if (TREE_CODE (exp) == CALL_EXPR
810 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
811 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
812 == FUNCTION_DECL)
813 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
814 0) & ECF_MAY_BE_ALLOCA))
815 return true;
816 return false;
819 /* Detect flags (function attributes) from the function decl or type node. */
821 static int
822 flags_from_decl_or_type (exp)
823 tree exp;
825 int flags = 0;
826 tree type = exp;
827 /* ??? We can't set IS_MALLOC for function types? */
828 if (DECL_P (exp))
830 type = TREE_TYPE (exp);
832 /* The function exp may have the `malloc' attribute. */
833 if (DECL_P (exp) && DECL_IS_MALLOC (exp))
834 flags |= ECF_MALLOC;
836 /* The function exp may have the `pure' attribute. */
837 if (DECL_P (exp) && DECL_IS_PURE (exp))
838 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
840 if (TREE_NOTHROW (exp))
841 flags |= ECF_NOTHROW;
844 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
845 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
847 if (TREE_THIS_VOLATILE (exp))
848 flags |= ECF_NORETURN;
850 /* Mark if the function returns with the stack pointer depressed. We
851 cannot consider it pure or constant in that case. */
852 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
854 flags |= ECF_SP_DEPRESSED;
855 flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
858 return flags;
861 /* Precompute all register parameters as described by ARGS, storing values
862 into fields within the ARGS array.
864 NUM_ACTUALS indicates the total number elements in the ARGS array.
866 Set REG_PARM_SEEN if we encounter a register parameter. */
868 static void
869 precompute_register_parameters (num_actuals, args, reg_parm_seen)
870 int num_actuals;
871 struct arg_data *args;
872 int *reg_parm_seen;
874 int i;
876 *reg_parm_seen = 0;
878 for (i = 0; i < num_actuals; i++)
879 if (args[i].reg != 0 && ! args[i].pass_on_stack)
881 *reg_parm_seen = 1;
883 if (args[i].value == 0)
885 push_temp_slots ();
886 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
887 VOIDmode, 0);
888 preserve_temp_slots (args[i].value);
889 pop_temp_slots ();
891 /* ANSI doesn't require a sequence point here,
892 but PCC has one, so this will avoid some problems. */
893 emit_queue ();
896 /* If the value is a non-legitimate constant, force it into a
897 pseudo now. TLS symbols sometimes need a call to resolve. */
898 if (CONSTANT_P (args[i].value)
899 && !LEGITIMATE_CONSTANT_P (args[i].value))
900 args[i].value = force_reg (args[i].mode, args[i].value);
902 /* If we are to promote the function arg to a wider mode,
903 do it now. */
905 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
906 args[i].value
907 = convert_modes (args[i].mode,
908 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
909 args[i].value, args[i].unsignedp);
911 /* If the value is expensive, and we are inside an appropriately
912 short loop, put the value into a pseudo and then put the pseudo
913 into the hard reg.
915 For small register classes, also do this if this call uses
916 register parameters. This is to avoid reload conflicts while
917 loading the parameters registers. */
919 if ((! (GET_CODE (args[i].value) == REG
920 || (GET_CODE (args[i].value) == SUBREG
921 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
922 && args[i].mode != BLKmode
923 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
924 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
925 || preserve_subexpressions_p ()))
926 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
930 #ifdef REG_PARM_STACK_SPACE
932 /* The argument list is the property of the called routine and it
933 may clobber it. If the fixed area has been used for previous
934 parameters, we must save and restore it. */
936 static rtx
937 save_fixed_argument_area (reg_parm_stack_space, argblock,
938 low_to_save, high_to_save)
939 int reg_parm_stack_space;
940 rtx argblock;
941 int *low_to_save;
942 int *high_to_save;
944 int i;
945 rtx save_area = NULL_RTX;
947 /* Compute the boundary of the that needs to be saved, if any. */
948 #ifdef ARGS_GROW_DOWNWARD
949 for (i = 0; i < reg_parm_stack_space + 1; i++)
950 #else
951 for (i = 0; i < reg_parm_stack_space; i++)
952 #endif
954 if (i >= highest_outgoing_arg_in_use
955 || stack_usage_map[i] == 0)
956 continue;
958 if (*low_to_save == -1)
959 *low_to_save = i;
961 *high_to_save = i;
964 if (*low_to_save >= 0)
966 int num_to_save = *high_to_save - *low_to_save + 1;
967 enum machine_mode save_mode
968 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
969 rtx stack_area;
971 /* If we don't have the required alignment, must do this in BLKmode. */
972 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
973 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
974 save_mode = BLKmode;
976 #ifdef ARGS_GROW_DOWNWARD
977 stack_area
978 = gen_rtx_MEM (save_mode,
979 memory_address (save_mode,
980 plus_constant (argblock,
981 - *high_to_save)));
982 #else
983 stack_area = gen_rtx_MEM (save_mode,
984 memory_address (save_mode,
985 plus_constant (argblock,
986 *low_to_save)));
987 #endif
989 set_mem_align (stack_area, PARM_BOUNDARY);
990 if (save_mode == BLKmode)
992 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
993 emit_block_move (validize_mem (save_area), stack_area,
994 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
996 else
998 save_area = gen_reg_rtx (save_mode);
999 emit_move_insn (save_area, stack_area);
1003 return save_area;
1006 static void
1007 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
1008 rtx save_area;
1009 rtx argblock;
1010 int high_to_save;
1011 int low_to_save;
1013 enum machine_mode save_mode = GET_MODE (save_area);
1014 #ifdef ARGS_GROW_DOWNWARD
1015 rtx stack_area
1016 = gen_rtx_MEM (save_mode,
1017 memory_address (save_mode,
1018 plus_constant (argblock,
1019 - high_to_save)));
1020 #else
1021 rtx stack_area
1022 = gen_rtx_MEM (save_mode,
1023 memory_address (save_mode,
1024 plus_constant (argblock,
1025 low_to_save)));
1026 #endif
1028 if (save_mode != BLKmode)
1029 emit_move_insn (stack_area, save_area);
1030 else
1031 emit_block_move (stack_area, validize_mem (save_area),
1032 GEN_INT (high_to_save - low_to_save + 1),
1033 BLOCK_OP_CALL_PARM);
1035 #endif /* REG_PARM_STACK_SPACE */
1037 /* If any elements in ARGS refer to parameters that are to be passed in
1038 registers, but not in memory, and whose alignment does not permit a
1039 direct copy into registers. Copy the values into a group of pseudos
1040 which we will later copy into the appropriate hard registers.
1042 Pseudos for each unaligned argument will be stored into the array
1043 args[argnum].aligned_regs. The caller is responsible for deallocating
1044 the aligned_regs array if it is nonzero. */
1046 static void
1047 store_unaligned_arguments_into_pseudos (args, num_actuals)
1048 struct arg_data *args;
1049 int num_actuals;
1051 int i, j;
1053 for (i = 0; i < num_actuals; i++)
1054 if (args[i].reg != 0 && ! args[i].pass_on_stack
1055 && args[i].mode == BLKmode
1056 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1057 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1059 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1060 int big_endian_correction = 0;
1062 args[i].n_aligned_regs
1063 = args[i].partial ? args[i].partial
1064 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1066 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
1067 * args[i].n_aligned_regs);
1069 /* Structures smaller than a word are aligned to the least
1070 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
1071 this means we must skip the empty high order bytes when
1072 calculating the bit offset. */
1073 if (BYTES_BIG_ENDIAN
1074 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
1075 && bytes < UNITS_PER_WORD)
1076 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
1078 for (j = 0; j < args[i].n_aligned_regs; j++)
1080 rtx reg = gen_reg_rtx (word_mode);
1081 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1082 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1084 args[i].aligned_regs[j] = reg;
1086 /* There is no need to restrict this code to loading items
1087 in TYPE_ALIGN sized hunks. The bitfield instructions can
1088 load up entire word sized registers efficiently.
1090 ??? This may not be needed anymore.
1091 We use to emit a clobber here but that doesn't let later
1092 passes optimize the instructions we emit. By storing 0 into
1093 the register later passes know the first AND to zero out the
1094 bitfield being set in the register is unnecessary. The store
1095 of 0 will be deleted as will at least the first AND. */
1097 emit_move_insn (reg, const0_rtx);
1099 bytes -= bitsize / BITS_PER_UNIT;
1100 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
1101 extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1102 word_mode, word_mode,
1103 BITS_PER_WORD),
1104 BITS_PER_WORD);
1109 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1110 ACTPARMS.
1112 NUM_ACTUALS is the total number of parameters.
1114 N_NAMED_ARGS is the total number of named arguments.
1116 FNDECL is the tree code for the target of this call (if known)
1118 ARGS_SO_FAR holds state needed by the target to know where to place
1119 the next argument.
1121 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1122 for arguments which are passed in registers.
1124 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1125 and may be modified by this routine.
1127 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1128 flags which may may be modified by this routine. */
1130 static void
1131 initialize_argument_information (num_actuals, args, args_size, n_named_args,
1132 actparms, fndecl, args_so_far,
1133 reg_parm_stack_space, old_stack_level,
1134 old_pending_adj, must_preallocate,
1135 ecf_flags)
1136 int num_actuals ATTRIBUTE_UNUSED;
1137 struct arg_data *args;
1138 struct args_size *args_size;
1139 int n_named_args ATTRIBUTE_UNUSED;
1140 tree actparms;
1141 tree fndecl;
1142 CUMULATIVE_ARGS *args_so_far;
1143 int reg_parm_stack_space;
1144 rtx *old_stack_level;
1145 int *old_pending_adj;
1146 int *must_preallocate;
1147 int *ecf_flags;
1149 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1150 int inc;
1152 /* Count arg position in order args appear. */
1153 int argpos;
1155 struct args_size alignment_pad;
1156 int i;
1157 tree p;
1159 args_size->constant = 0;
1160 args_size->var = 0;
1162 /* In this loop, we consider args in the order they are written.
1163 We fill up ARGS from the front or from the back if necessary
1164 so that in any case the first arg to be pushed ends up at the front. */
1166 if (PUSH_ARGS_REVERSED)
1168 i = num_actuals - 1, inc = -1;
1169 /* In this case, must reverse order of args
1170 so that we compute and push the last arg first. */
1172 else
1174 i = 0, inc = 1;
1177 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1178 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1180 tree type = TREE_TYPE (TREE_VALUE (p));
1181 int unsignedp;
1182 enum machine_mode mode;
1184 args[i].tree_value = TREE_VALUE (p);
1186 /* Replace erroneous argument with constant zero. */
1187 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1188 args[i].tree_value = integer_zero_node, type = integer_type_node;
1190 /* If TYPE is a transparent union, pass things the way we would
1191 pass the first field of the union. We have already verified that
1192 the modes are the same. */
1193 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
1194 type = TREE_TYPE (TYPE_FIELDS (type));
1196 /* Decide where to pass this arg.
1198 args[i].reg is nonzero if all or part is passed in registers.
1200 args[i].partial is nonzero if part but not all is passed in registers,
1201 and the exact value says how many words are passed in registers.
1203 args[i].pass_on_stack is nonzero if the argument must at least be
1204 computed on the stack. It may then be loaded back into registers
1205 if args[i].reg is nonzero.
1207 These decisions are driven by the FUNCTION_... macros and must agree
1208 with those made by function.c. */
1210 /* See if this argument should be passed by invisible reference. */
1211 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1212 && contains_placeholder_p (TYPE_SIZE (type)))
1213 || TREE_ADDRESSABLE (type)
1214 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1215 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
1216 type, argpos < n_named_args)
1217 #endif
1220 /* If we're compiling a thunk, pass through invisible
1221 references instead of making a copy. */
1222 if (current_function_is_thunk
1223 #ifdef FUNCTION_ARG_CALLEE_COPIES
1224 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
1225 type, argpos < n_named_args)
1226 /* If it's in a register, we must make a copy of it too. */
1227 /* ??? Is this a sufficient test? Is there a better one? */
1228 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1229 && REG_P (DECL_RTL (args[i].tree_value)))
1230 && ! TREE_ADDRESSABLE (type))
1231 #endif
1234 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1235 new object from the argument. If we are passing by
1236 invisible reference, the callee will do that for us, so we
1237 can strip off the TARGET_EXPR. This is not always safe,
1238 but it is safe in the only case where this is a useful
1239 optimization; namely, when the argument is a plain object.
1240 In that case, the frontend is just asking the backend to
1241 make a bitwise copy of the argument. */
1243 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1244 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
1245 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1246 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1248 args[i].tree_value = build1 (ADDR_EXPR,
1249 build_pointer_type (type),
1250 args[i].tree_value);
1251 type = build_pointer_type (type);
1253 else if (TREE_CODE (args[i].tree_value) == TARGET_EXPR)
1255 /* In the V3 C++ ABI, parameters are destroyed in the caller.
1256 We implement this by passing the address of the temporary
1257 rather than expanding it into another allocated slot. */
1258 args[i].tree_value = build1 (ADDR_EXPR,
1259 build_pointer_type (type),
1260 args[i].tree_value);
1261 type = build_pointer_type (type);
1263 else
1265 /* We make a copy of the object and pass the address to the
1266 function being called. */
1267 rtx copy;
1269 if (!COMPLETE_TYPE_P (type)
1270 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1271 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1272 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1273 STACK_CHECK_MAX_VAR_SIZE))))
1275 /* This is a variable-sized object. Make space on the stack
1276 for it. */
1277 rtx size_rtx = expr_size (TREE_VALUE (p));
1279 if (*old_stack_level == 0)
1281 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1282 *old_pending_adj = pending_stack_adjust;
1283 pending_stack_adjust = 0;
1286 copy = gen_rtx_MEM (BLKmode,
1287 allocate_dynamic_stack_space
1288 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1289 set_mem_attributes (copy, type, 1);
1291 else
1292 copy = assign_temp (type, 0, 1, 0);
1294 store_expr (args[i].tree_value, copy, 0);
1295 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1297 args[i].tree_value = build1 (ADDR_EXPR,
1298 build_pointer_type (type),
1299 make_tree (type, copy));
1300 type = build_pointer_type (type);
1304 mode = TYPE_MODE (type);
1305 unsignedp = TREE_UNSIGNED (type);
1307 #ifdef PROMOTE_FUNCTION_ARGS
1308 mode = promote_mode (type, mode, &unsignedp, 1);
1309 #endif
1311 args[i].unsignedp = unsignedp;
1312 args[i].mode = mode;
1314 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1315 argpos < n_named_args);
1316 #ifdef FUNCTION_INCOMING_ARG
1317 /* If this is a sibling call and the machine has register windows, the
1318 register window has to be unwinded before calling the routine, so
1319 arguments have to go into the incoming registers. */
1320 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1321 argpos < n_named_args);
1322 #else
1323 args[i].tail_call_reg = args[i].reg;
1324 #endif
1326 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1327 if (args[i].reg)
1328 args[i].partial
1329 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1330 argpos < n_named_args);
1331 #endif
1333 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1335 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1336 it means that we are to pass this arg in the register(s) designated
1337 by the PARALLEL, but also to pass it in the stack. */
1338 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1339 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1340 args[i].pass_on_stack = 1;
1342 /* If this is an addressable type, we must preallocate the stack
1343 since we must evaluate the object into its final location.
1345 If this is to be passed in both registers and the stack, it is simpler
1346 to preallocate. */
1347 if (TREE_ADDRESSABLE (type)
1348 || (args[i].pass_on_stack && args[i].reg != 0))
1349 *must_preallocate = 1;
1351 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1352 we cannot consider this function call constant. */
1353 if (TREE_ADDRESSABLE (type))
1354 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1356 /* Compute the stack-size of this argument. */
1357 if (args[i].reg == 0 || args[i].partial != 0
1358 || reg_parm_stack_space > 0
1359 || args[i].pass_on_stack)
1360 locate_and_pad_parm (mode, type,
1361 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1363 #else
1364 args[i].reg != 0,
1365 #endif
1366 fndecl, args_size, &args[i].offset,
1367 &args[i].size, &alignment_pad);
1369 #ifndef ARGS_GROW_DOWNWARD
1370 args[i].slot_offset = *args_size;
1371 #endif
1373 args[i].alignment_pad = alignment_pad;
1375 /* If a part of the arg was put into registers,
1376 don't include that part in the amount pushed. */
1377 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1378 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1379 / (PARM_BOUNDARY / BITS_PER_UNIT)
1380 * (PARM_BOUNDARY / BITS_PER_UNIT));
1382 /* Update ARGS_SIZE, the total stack space for args so far. */
1384 args_size->constant += args[i].size.constant;
1385 if (args[i].size.var)
1387 ADD_PARM_SIZE (*args_size, args[i].size.var);
1390 /* Since the slot offset points to the bottom of the slot,
1391 we must record it after incrementing if the args grow down. */
1392 #ifdef ARGS_GROW_DOWNWARD
1393 args[i].slot_offset = *args_size;
1395 args[i].slot_offset.constant = -args_size->constant;
1396 if (args_size->var)
1397 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1398 #endif
1400 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1401 have been used, etc. */
1403 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1404 argpos < n_named_args);
1408 /* Update ARGS_SIZE to contain the total size for the argument block.
1409 Return the original constant component of the argument block's size.
1411 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1412 for arguments passed in registers. */
1414 static int
1415 compute_argument_block_size (reg_parm_stack_space, args_size,
1416 preferred_stack_boundary)
1417 int reg_parm_stack_space;
1418 struct args_size *args_size;
1419 int preferred_stack_boundary ATTRIBUTE_UNUSED;
1421 int unadjusted_args_size = args_size->constant;
1423 /* For accumulate outgoing args mode we don't need to align, since the frame
1424 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1425 backends from generating misaligned frame sizes. */
1426 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1427 preferred_stack_boundary = STACK_BOUNDARY;
1429 /* Compute the actual size of the argument block required. The variable
1430 and constant sizes must be combined, the size may have to be rounded,
1431 and there may be a minimum required size. */
1433 if (args_size->var)
1435 args_size->var = ARGS_SIZE_TREE (*args_size);
1436 args_size->constant = 0;
1438 preferred_stack_boundary /= BITS_PER_UNIT;
1439 if (preferred_stack_boundary > 1)
1441 /* We don't handle this case yet. To handle it correctly we have
1442 to add the delta, round and subtract the delta.
1443 Currently no machine description requires this support. */
1444 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1445 abort ();
1446 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1449 if (reg_parm_stack_space > 0)
1451 args_size->var
1452 = size_binop (MAX_EXPR, args_size->var,
1453 ssize_int (reg_parm_stack_space));
1455 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1456 /* The area corresponding to register parameters is not to count in
1457 the size of the block we need. So make the adjustment. */
1458 args_size->var
1459 = size_binop (MINUS_EXPR, args_size->var,
1460 ssize_int (reg_parm_stack_space));
1461 #endif
1464 else
1466 preferred_stack_boundary /= BITS_PER_UNIT;
1467 if (preferred_stack_boundary < 1)
1468 preferred_stack_boundary = 1;
1469 args_size->constant = (((args_size->constant
1470 + stack_pointer_delta
1471 + preferred_stack_boundary - 1)
1472 / preferred_stack_boundary
1473 * preferred_stack_boundary)
1474 - stack_pointer_delta);
1476 args_size->constant = MAX (args_size->constant,
1477 reg_parm_stack_space);
1479 #ifdef MAYBE_REG_PARM_STACK_SPACE
1480 if (reg_parm_stack_space == 0)
1481 args_size->constant = 0;
1482 #endif
1484 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1485 args_size->constant -= reg_parm_stack_space;
1486 #endif
1488 return unadjusted_args_size;
1491 /* Precompute parameters as needed for a function call.
1493 FLAGS is mask of ECF_* constants.
1495 NUM_ACTUALS is the number of arguments.
1497 ARGS is an array containing information for each argument; this
1498 routine fills in the INITIAL_VALUE and VALUE fields for each
1499 precomputed argument. */
1501 static void
1502 precompute_arguments (flags, num_actuals, args)
1503 int flags;
1504 int num_actuals;
1505 struct arg_data *args;
1507 int i;
1509 /* If this function call is cse'able, precompute all the parameters.
1510 Note that if the parameter is constructed into a temporary, this will
1511 cause an additional copy because the parameter will be constructed
1512 into a temporary location and then copied into the outgoing arguments.
1513 If a parameter contains a call to alloca and this function uses the
1514 stack, precompute the parameter. */
1516 /* If we preallocated the stack space, and some arguments must be passed
1517 on the stack, then we must precompute any parameter which contains a
1518 function call which will store arguments on the stack.
1519 Otherwise, evaluating the parameter may clobber previous parameters
1520 which have already been stored into the stack. (we have code to avoid
1521 such case by saving the outgoing stack arguments, but it results in
1522 worse code) */
1524 for (i = 0; i < num_actuals; i++)
1525 if ((flags & ECF_LIBCALL_BLOCK)
1526 || calls_function (args[i].tree_value, !ACCUMULATE_OUTGOING_ARGS))
1528 enum machine_mode mode;
1530 /* If this is an addressable type, we cannot pre-evaluate it. */
1531 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1532 abort ();
1534 args[i].value
1535 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1537 /* ANSI doesn't require a sequence point here,
1538 but PCC has one, so this will avoid some problems. */
1539 emit_queue ();
1541 args[i].initial_value = args[i].value
1542 = protect_from_queue (args[i].value, 0);
1544 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1545 if (mode != args[i].mode)
1547 args[i].value
1548 = convert_modes (args[i].mode, mode,
1549 args[i].value, args[i].unsignedp);
1550 #ifdef PROMOTE_FOR_CALL_ONLY
1551 /* CSE will replace this only if it contains args[i].value
1552 pseudo, so convert it down to the declared mode using
1553 a SUBREG. */
1554 if (GET_CODE (args[i].value) == REG
1555 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1557 args[i].initial_value
1558 = gen_lowpart_SUBREG (mode, args[i].value);
1559 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1560 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1561 args[i].unsignedp);
1563 #endif
1568 /* Given the current state of MUST_PREALLOCATE and information about
1569 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1570 compute and return the final value for MUST_PREALLOCATE. */
1572 static int
1573 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1574 int must_preallocate;
1575 int num_actuals;
1576 struct arg_data *args;
1577 struct args_size *args_size;
1579 /* See if we have or want to preallocate stack space.
1581 If we would have to push a partially-in-regs parm
1582 before other stack parms, preallocate stack space instead.
1584 If the size of some parm is not a multiple of the required stack
1585 alignment, we must preallocate.
1587 If the total size of arguments that would otherwise create a copy in
1588 a temporary (such as a CALL) is more than half the total argument list
1589 size, preallocation is faster.
1591 Another reason to preallocate is if we have a machine (like the m88k)
1592 where stack alignment is required to be maintained between every
1593 pair of insns, not just when the call is made. However, we assume here
1594 that such machines either do not have push insns (and hence preallocation
1595 would occur anyway) or the problem is taken care of with
1596 PUSH_ROUNDING. */
1598 if (! must_preallocate)
1600 int partial_seen = 0;
1601 int copy_to_evaluate_size = 0;
1602 int i;
1604 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1606 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1607 partial_seen = 1;
1608 else if (partial_seen && args[i].reg == 0)
1609 must_preallocate = 1;
1611 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1612 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1613 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1614 || TREE_CODE (args[i].tree_value) == COND_EXPR
1615 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1616 copy_to_evaluate_size
1617 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1620 if (copy_to_evaluate_size * 2 >= args_size->constant
1621 && args_size->constant > 0)
1622 must_preallocate = 1;
1624 return must_preallocate;
1627 /* If we preallocated stack space, compute the address of each argument
1628 and store it into the ARGS array.
1630 We need not ensure it is a valid memory address here; it will be
1631 validized when it is used.
1633 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1635 static void
1636 compute_argument_addresses (args, argblock, num_actuals)
1637 struct arg_data *args;
1638 rtx argblock;
1639 int num_actuals;
1641 if (argblock)
1643 rtx arg_reg = argblock;
1644 int i, arg_offset = 0;
1646 if (GET_CODE (argblock) == PLUS)
1647 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1649 for (i = 0; i < num_actuals; i++)
1651 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1652 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1653 rtx addr;
1655 /* Skip this parm if it will not be passed on the stack. */
1656 if (! args[i].pass_on_stack && args[i].reg != 0)
1657 continue;
1659 if (GET_CODE (offset) == CONST_INT)
1660 addr = plus_constant (arg_reg, INTVAL (offset));
1661 else
1662 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1664 addr = plus_constant (addr, arg_offset);
1665 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1666 set_mem_attributes (args[i].stack,
1667 TREE_TYPE (args[i].tree_value), 1);
1669 if (GET_CODE (slot_offset) == CONST_INT)
1670 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1671 else
1672 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1674 addr = plus_constant (addr, arg_offset);
1675 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1676 set_mem_attributes (args[i].stack_slot,
1677 TREE_TYPE (args[i].tree_value), 1);
1679 /* Function incoming arguments may overlap with sibling call
1680 outgoing arguments and we cannot allow reordering of reads
1681 from function arguments with stores to outgoing arguments
1682 of sibling calls. */
1683 set_mem_alias_set (args[i].stack, 0);
1684 set_mem_alias_set (args[i].stack_slot, 0);
1689 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1690 in a call instruction.
1692 FNDECL is the tree node for the target function. For an indirect call
1693 FNDECL will be NULL_TREE.
1695 EXP is the CALL_EXPR for this call. */
1697 static rtx
1698 rtx_for_function_call (fndecl, exp)
1699 tree fndecl;
1700 tree exp;
1702 rtx funexp;
1704 /* Get the function to call, in the form of RTL. */
1705 if (fndecl)
1707 /* If this is the first use of the function, see if we need to
1708 make an external definition for it. */
1709 if (! TREE_USED (fndecl))
1711 assemble_external (fndecl);
1712 TREE_USED (fndecl) = 1;
1715 /* Get a SYMBOL_REF rtx for the function address. */
1716 funexp = XEXP (DECL_RTL (fndecl), 0);
1718 else
1719 /* Generate an rtx (probably a pseudo-register) for the address. */
1721 rtx funaddr;
1722 push_temp_slots ();
1723 funaddr = funexp
1724 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1725 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1726 emit_queue ();
1728 return funexp;
1731 /* Do the register loads required for any wholly-register parms or any
1732 parms which are passed both on the stack and in a register. Their
1733 expressions were already evaluated.
1735 Mark all register-parms as living through the call, putting these USE
1736 insns in the CALL_INSN_FUNCTION_USAGE field. */
1738 static void
1739 load_register_parameters (args, num_actuals, call_fusage, flags)
1740 struct arg_data *args;
1741 int num_actuals;
1742 rtx *call_fusage;
1743 int flags;
1745 int i, j;
1747 #ifdef LOAD_ARGS_REVERSED
1748 for (i = num_actuals - 1; i >= 0; i--)
1749 #else
1750 for (i = 0; i < num_actuals; i++)
1751 #endif
1753 rtx reg = ((flags & ECF_SIBCALL)
1754 ? args[i].tail_call_reg : args[i].reg);
1755 int partial = args[i].partial;
1756 int nregs;
1758 if (reg)
1760 /* Set to non-negative if must move a word at a time, even if just
1761 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1762 we just use a normal move insn. This value can be zero if the
1763 argument is a zero size structure with no fields. */
1764 nregs = (partial ? partial
1765 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1766 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1767 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1768 : -1));
1770 /* Handle calls that pass values in multiple non-contiguous
1771 locations. The Irix 6 ABI has examples of this. */
1773 if (GET_CODE (reg) == PARALLEL)
1774 emit_group_load (reg, args[i].value,
1775 int_size_in_bytes (TREE_TYPE (args[i].tree_value)));
1777 /* If simple case, just do move. If normal partial, store_one_arg
1778 has already loaded the register for us. In all other cases,
1779 load the register(s) from memory. */
1781 else if (nregs == -1)
1782 emit_move_insn (reg, args[i].value);
1784 /* If we have pre-computed the values to put in the registers in
1785 the case of non-aligned structures, copy them in now. */
1787 else if (args[i].n_aligned_regs != 0)
1788 for (j = 0; j < args[i].n_aligned_regs; j++)
1789 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1790 args[i].aligned_regs[j]);
1792 else if (partial == 0 || args[i].pass_on_stack)
1793 move_block_to_reg (REGNO (reg),
1794 validize_mem (args[i].value), nregs,
1795 args[i].mode);
1797 /* Handle calls that pass values in multiple non-contiguous
1798 locations. The Irix 6 ABI has examples of this. */
1799 if (GET_CODE (reg) == PARALLEL)
1800 use_group_regs (call_fusage, reg);
1801 else if (nregs == -1)
1802 use_reg (call_fusage, reg);
1803 else
1804 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1809 /* Try to integrate function. See expand_inline_function for documentation
1810 about the parameters. */
1812 static rtx
1813 try_to_integrate (fndecl, actparms, target, ignore, type, structure_value_addr)
1814 tree fndecl;
1815 tree actparms;
1816 rtx target;
1817 int ignore;
1818 tree type;
1819 rtx structure_value_addr;
1821 rtx temp;
1822 rtx before_call;
1823 int i;
1824 rtx old_stack_level = 0;
1825 int reg_parm_stack_space = 0;
1827 #ifdef REG_PARM_STACK_SPACE
1828 #ifdef MAYBE_REG_PARM_STACK_SPACE
1829 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1830 #else
1831 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1832 #endif
1833 #endif
1835 before_call = get_last_insn ();
1837 timevar_push (TV_INTEGRATION);
1839 temp = expand_inline_function (fndecl, actparms, target,
1840 ignore, type,
1841 structure_value_addr);
1843 timevar_pop (TV_INTEGRATION);
1845 /* If inlining succeeded, return. */
1846 if (temp != (rtx) (size_t) - 1)
1848 if (ACCUMULATE_OUTGOING_ARGS)
1850 /* If the outgoing argument list must be preserved, push
1851 the stack before executing the inlined function if it
1852 makes any calls. */
1854 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1855 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1856 break;
1858 if (stack_arg_under_construction || i >= 0)
1860 rtx first_insn
1861 = before_call ? NEXT_INSN (before_call) : get_insns ();
1862 rtx insn = NULL_RTX, seq;
1864 /* Look for a call in the inline function code.
1865 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1866 nonzero then there is a call and it is not necessary
1867 to scan the insns. */
1869 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1870 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1871 if (GET_CODE (insn) == CALL_INSN)
1872 break;
1874 if (insn)
1876 /* Reserve enough stack space so that the largest
1877 argument list of any function call in the inline
1878 function does not overlap the argument list being
1879 evaluated. This is usually an overestimate because
1880 allocate_dynamic_stack_space reserves space for an
1881 outgoing argument list in addition to the requested
1882 space, but there is no way to ask for stack space such
1883 that an argument list of a certain length can be
1884 safely constructed.
1886 Add the stack space reserved for register arguments, if
1887 any, in the inline function. What is really needed is the
1888 largest value of reg_parm_stack_space in the inline
1889 function, but that is not available. Using the current
1890 value of reg_parm_stack_space is wrong, but gives
1891 correct results on all supported machines. */
1893 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1894 + reg_parm_stack_space);
1896 start_sequence ();
1897 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1898 allocate_dynamic_stack_space (GEN_INT (adjust),
1899 NULL_RTX, BITS_PER_UNIT);
1900 seq = get_insns ();
1901 end_sequence ();
1902 emit_insn_before (seq, first_insn);
1903 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1908 /* If the result is equivalent to TARGET, return TARGET to simplify
1909 checks in store_expr. They can be equivalent but not equal in the
1910 case of a function that returns BLKmode. */
1911 if (temp != target && rtx_equal_p (temp, target))
1912 return target;
1913 return temp;
1916 /* If inlining failed, mark FNDECL as needing to be compiled
1917 separately after all. If function was declared inline,
1918 give a warning. */
1919 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1920 && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
1922 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1923 warning ("called from here");
1925 (*lang_hooks.mark_addressable) (fndecl);
1926 return (rtx) (size_t) - 1;
1929 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1930 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1931 bytes, then we would need to push some additional bytes to pad the
1932 arguments. So, we compute an adjust to the stack pointer for an
1933 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1934 bytes. Then, when the arguments are pushed the stack will be perfectly
1935 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1936 be popped after the call. Returns the adjustment. */
1938 static int
1939 combine_pending_stack_adjustment_and_call (unadjusted_args_size,
1940 args_size,
1941 preferred_unit_stack_boundary)
1942 int unadjusted_args_size;
1943 struct args_size *args_size;
1944 int preferred_unit_stack_boundary;
1946 /* The number of bytes to pop so that the stack will be
1947 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1948 HOST_WIDE_INT adjustment;
1949 /* The alignment of the stack after the arguments are pushed, if we
1950 just pushed the arguments without adjust the stack here. */
1951 HOST_WIDE_INT unadjusted_alignment;
1953 unadjusted_alignment
1954 = ((stack_pointer_delta + unadjusted_args_size)
1955 % preferred_unit_stack_boundary);
1957 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1958 as possible -- leaving just enough left to cancel out the
1959 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1960 PENDING_STACK_ADJUST is non-negative, and congruent to
1961 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1963 /* Begin by trying to pop all the bytes. */
1964 unadjusted_alignment
1965 = (unadjusted_alignment
1966 - (pending_stack_adjust % preferred_unit_stack_boundary));
1967 adjustment = pending_stack_adjust;
1968 /* Push enough additional bytes that the stack will be aligned
1969 after the arguments are pushed. */
1970 if (preferred_unit_stack_boundary > 1)
1972 if (unadjusted_alignment > 0)
1973 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1974 else
1975 adjustment += unadjusted_alignment;
1978 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1979 bytes after the call. The right number is the entire
1980 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1981 by the arguments in the first place. */
1982 args_size->constant
1983 = pending_stack_adjust - adjustment + unadjusted_args_size;
1985 return adjustment;
1988 /* Scan X expression if it does not dereference any argument slots
1989 we already clobbered by tail call arguments (as noted in stored_args_map
1990 bitmap).
1991 Return nonzero if X expression dereferences such argument slots,
1992 zero otherwise. */
1994 static int
1995 check_sibcall_argument_overlap_1 (x)
1996 rtx x;
1998 RTX_CODE code;
1999 int i, j;
2000 unsigned int k;
2001 const char *fmt;
2003 if (x == NULL_RTX)
2004 return 0;
2006 code = GET_CODE (x);
2008 if (code == MEM)
2010 if (XEXP (x, 0) == current_function_internal_arg_pointer)
2011 i = 0;
2012 else if (GET_CODE (XEXP (x, 0)) == PLUS
2013 && XEXP (XEXP (x, 0), 0) ==
2014 current_function_internal_arg_pointer
2015 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2016 i = INTVAL (XEXP (XEXP (x, 0), 1));
2017 else
2018 return 0;
2020 #ifdef ARGS_GROW_DOWNWARD
2021 i = -i - GET_MODE_SIZE (GET_MODE (x));
2022 #endif
2024 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
2025 if (i + k < stored_args_map->n_bits
2026 && TEST_BIT (stored_args_map, i + k))
2027 return 1;
2029 return 0;
2032 /* Scan all subexpressions. */
2033 fmt = GET_RTX_FORMAT (code);
2034 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2036 if (*fmt == 'e')
2038 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2039 return 1;
2041 else if (*fmt == 'E')
2043 for (j = 0; j < XVECLEN (x, i); j++)
2044 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2045 return 1;
2048 return 0;
2051 /* Scan sequence after INSN if it does not dereference any argument slots
2052 we already clobbered by tail call arguments (as noted in stored_args_map
2053 bitmap). Add stack slots for ARG to stored_args_map bitmap afterwards.
2054 Return nonzero if sequence after INSN dereferences such argument slots,
2055 zero otherwise. */
2057 static int
2058 check_sibcall_argument_overlap (insn, arg)
2059 rtx insn;
2060 struct arg_data *arg;
2062 int low, high;
2064 if (insn == NULL_RTX)
2065 insn = get_insns ();
2066 else
2067 insn = NEXT_INSN (insn);
2069 for (; insn; insn = NEXT_INSN (insn))
2070 if (INSN_P (insn)
2071 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
2072 break;
2074 #ifdef ARGS_GROW_DOWNWARD
2075 low = -arg->slot_offset.constant - arg->size.constant;
2076 #else
2077 low = arg->slot_offset.constant;
2078 #endif
2080 for (high = low + arg->size.constant; low < high; low++)
2081 SET_BIT (stored_args_map, low);
2082 return insn != NULL_RTX;
2085 /* Generate all the code for a function call
2086 and return an rtx for its value.
2087 Store the value in TARGET (specified as an rtx) if convenient.
2088 If the value is stored in TARGET then TARGET is returned.
2089 If IGNORE is nonzero, then we ignore the value of the function call. */
2092 expand_call (exp, target, ignore)
2093 tree exp;
2094 rtx target;
2095 int ignore;
2097 /* Nonzero if we are currently expanding a call. */
2098 static int currently_expanding_call = 0;
2100 /* List of actual parameters. */
2101 tree actparms = TREE_OPERAND (exp, 1);
2102 /* RTX for the function to be called. */
2103 rtx funexp;
2104 /* Sequence of insns to perform a tail recursive "call". */
2105 rtx tail_recursion_insns = NULL_RTX;
2106 /* Sequence of insns to perform a normal "call". */
2107 rtx normal_call_insns = NULL_RTX;
2108 /* Sequence of insns to perform a tail recursive "call". */
2109 rtx tail_call_insns = NULL_RTX;
2110 /* Data type of the function. */
2111 tree funtype;
2112 /* Declaration of the function being called,
2113 or 0 if the function is computed (not known by name). */
2114 tree fndecl = 0;
2115 rtx insn;
2116 int try_tail_call = 1;
2117 int try_tail_recursion = 1;
2118 int pass;
2120 /* Register in which non-BLKmode value will be returned,
2121 or 0 if no value or if value is BLKmode. */
2122 rtx valreg;
2123 /* Address where we should return a BLKmode value;
2124 0 if value not BLKmode. */
2125 rtx structure_value_addr = 0;
2126 /* Nonzero if that address is being passed by treating it as
2127 an extra, implicit first parameter. Otherwise,
2128 it is passed by being copied directly into struct_value_rtx. */
2129 int structure_value_addr_parm = 0;
2130 /* Size of aggregate value wanted, or zero if none wanted
2131 or if we are using the non-reentrant PCC calling convention
2132 or expecting the value in registers. */
2133 HOST_WIDE_INT struct_value_size = 0;
2134 /* Nonzero if called function returns an aggregate in memory PCC style,
2135 by returning the address of where to find it. */
2136 int pcc_struct_value = 0;
2138 /* Number of actual parameters in this call, including struct value addr. */
2139 int num_actuals;
2140 /* Number of named args. Args after this are anonymous ones
2141 and they must all go on the stack. */
2142 int n_named_args;
2144 /* Vector of information about each argument.
2145 Arguments are numbered in the order they will be pushed,
2146 not the order they are written. */
2147 struct arg_data *args;
2149 /* Total size in bytes of all the stack-parms scanned so far. */
2150 struct args_size args_size;
2151 struct args_size adjusted_args_size;
2152 /* Size of arguments before any adjustments (such as rounding). */
2153 int unadjusted_args_size;
2154 /* Data on reg parms scanned so far. */
2155 CUMULATIVE_ARGS args_so_far;
2156 /* Nonzero if a reg parm has been scanned. */
2157 int reg_parm_seen;
2158 /* Nonzero if this is an indirect function call. */
2160 /* Nonzero if we must avoid push-insns in the args for this call.
2161 If stack space is allocated for register parameters, but not by the
2162 caller, then it is preallocated in the fixed part of the stack frame.
2163 So the entire argument block must then be preallocated (i.e., we
2164 ignore PUSH_ROUNDING in that case). */
2166 int must_preallocate = !PUSH_ARGS;
2168 /* Size of the stack reserved for parameter registers. */
2169 int reg_parm_stack_space = 0;
2171 /* Address of space preallocated for stack parms
2172 (on machines that lack push insns), or 0 if space not preallocated. */
2173 rtx argblock = 0;
2175 /* Mask of ECF_ flags. */
2176 int flags = 0;
2177 /* Nonzero if this is a call to an inline function. */
2178 int is_integrable = 0;
2179 #ifdef REG_PARM_STACK_SPACE
2180 /* Define the boundary of the register parm stack space that needs to be
2181 save, if any. */
2182 int low_to_save = -1, high_to_save;
2183 rtx save_area = 0; /* Place that it is saved */
2184 #endif
2186 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2187 char *initial_stack_usage_map = stack_usage_map;
2188 int old_stack_arg_under_construction = 0;
2190 rtx old_stack_level = 0;
2191 int old_pending_adj = 0;
2192 int old_inhibit_defer_pop = inhibit_defer_pop;
2193 int old_stack_allocated;
2194 rtx call_fusage;
2195 tree p = TREE_OPERAND (exp, 0);
2196 int i;
2197 /* The alignment of the stack, in bits. */
2198 HOST_WIDE_INT preferred_stack_boundary;
2199 /* The alignment of the stack, in bytes. */
2200 HOST_WIDE_INT preferred_unit_stack_boundary;
2202 /* See if this is "nothrow" function call. */
2203 if (TREE_NOTHROW (exp))
2204 flags |= ECF_NOTHROW;
2206 /* See if we can find a DECL-node for the actual function.
2207 As a result, decide whether this is a call to an integrable function. */
2209 fndecl = get_callee_fndecl (exp);
2210 if (fndecl)
2212 if (!flag_no_inline
2213 && fndecl != current_function_decl
2214 && DECL_INLINE (fndecl)
2215 && DECL_SAVED_INSNS (fndecl)
2216 && DECL_SAVED_INSNS (fndecl)->inlinable)
2217 is_integrable = 1;
2218 else if (! TREE_ADDRESSABLE (fndecl))
2220 /* In case this function later becomes inlinable,
2221 record that there was already a non-inline call to it.
2223 Use abstraction instead of setting TREE_ADDRESSABLE
2224 directly. */
2225 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
2226 && optimize > 0)
2228 warning_with_decl (fndecl, "can't inline call to `%s'");
2229 warning ("called from here");
2231 (*lang_hooks.mark_addressable) (fndecl);
2234 flags |= flags_from_decl_or_type (fndecl);
2237 /* If we don't have specific function to call, see if we have a
2238 attributes set in the type. */
2239 else
2240 flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
2242 #ifdef REG_PARM_STACK_SPACE
2243 #ifdef MAYBE_REG_PARM_STACK_SPACE
2244 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2245 #else
2246 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2247 #endif
2248 #endif
2250 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2251 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2252 must_preallocate = 1;
2253 #endif
2255 /* Warn if this value is an aggregate type,
2256 regardless of which calling convention we are using for it. */
2257 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2258 warning ("function call has aggregate value");
2260 /* Set up a place to return a structure. */
2262 /* Cater to broken compilers. */
2263 if (aggregate_value_p (exp))
2265 /* This call returns a big structure. */
2266 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2268 #ifdef PCC_STATIC_STRUCT_RETURN
2270 pcc_struct_value = 1;
2271 /* Easier than making that case work right. */
2272 if (is_integrable)
2274 /* In case this is a static function, note that it has been
2275 used. */
2276 if (! TREE_ADDRESSABLE (fndecl))
2277 (*lang_hooks.mark_addressable) (fndecl);
2278 is_integrable = 0;
2281 #else /* not PCC_STATIC_STRUCT_RETURN */
2283 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2285 if (target && GET_CODE (target) == MEM)
2286 structure_value_addr = XEXP (target, 0);
2287 else
2289 /* For variable-sized objects, we must be called with a target
2290 specified. If we were to allocate space on the stack here,
2291 we would have no way of knowing when to free it. */
2292 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
2294 mark_temp_addr_taken (d);
2295 structure_value_addr = XEXP (d, 0);
2296 target = 0;
2299 #endif /* not PCC_STATIC_STRUCT_RETURN */
2302 /* If called function is inline, try to integrate it. */
2304 if (is_integrable)
2306 rtx temp = try_to_integrate (fndecl, actparms, target,
2307 ignore, TREE_TYPE (exp),
2308 structure_value_addr);
2309 if (temp != (rtx) (size_t) - 1)
2310 return temp;
2313 /* Figure out the amount to which the stack should be aligned. */
2314 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2316 /* Operand 0 is a pointer-to-function; get the type of the function. */
2317 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
2318 if (! POINTER_TYPE_P (funtype))
2319 abort ();
2320 funtype = TREE_TYPE (funtype);
2322 /* See if this is a call to a function that can return more than once
2323 or a call to longjmp or malloc. */
2324 flags |= special_function_p (fndecl, flags);
2326 if (flags & ECF_MAY_BE_ALLOCA)
2327 current_function_calls_alloca = 1;
2329 /* If struct_value_rtx is 0, it means pass the address
2330 as if it were an extra parameter. */
2331 if (structure_value_addr && struct_value_rtx == 0)
2333 /* If structure_value_addr is a REG other than
2334 virtual_outgoing_args_rtx, we can use always use it. If it
2335 is not a REG, we must always copy it into a register.
2336 If it is virtual_outgoing_args_rtx, we must copy it to another
2337 register in some cases. */
2338 rtx temp = (GET_CODE (structure_value_addr) != REG
2339 || (ACCUMULATE_OUTGOING_ARGS
2340 && stack_arg_under_construction
2341 && structure_value_addr == virtual_outgoing_args_rtx)
2342 ? copy_addr_to_reg (structure_value_addr)
2343 : structure_value_addr);
2345 actparms
2346 = tree_cons (error_mark_node,
2347 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2348 temp),
2349 actparms);
2350 structure_value_addr_parm = 1;
2353 /* Count the arguments and set NUM_ACTUALS. */
2354 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2355 num_actuals++;
2357 /* Compute number of named args.
2358 Normally, don't include the last named arg if anonymous args follow.
2359 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2360 (If no anonymous args follow, the result of list_length is actually
2361 one too large. This is harmless.)
2363 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2364 zero, this machine will be able to place unnamed args that were
2365 passed in registers into the stack. So treat all args as named.
2366 This allows the insns emitting for a specific argument list to be
2367 independent of the function declaration.
2369 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2370 reliable way to pass unnamed args in registers, so we must force
2371 them into memory. */
2373 if ((STRICT_ARGUMENT_NAMING
2374 || ! PRETEND_OUTGOING_VARARGS_NAMED)
2375 && TYPE_ARG_TYPES (funtype) != 0)
2376 n_named_args
2377 = (list_length (TYPE_ARG_TYPES (funtype))
2378 /* Don't include the last named arg. */
2379 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
2380 /* Count the struct value address, if it is passed as a parm. */
2381 + structure_value_addr_parm);
2382 else
2383 /* If we know nothing, treat all args as named. */
2384 n_named_args = num_actuals;
2386 /* Start updating where the next arg would go.
2388 On some machines (such as the PA) indirect calls have a different
2389 calling convention than normal calls. The last argument in
2390 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2391 or not. */
2392 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
2394 /* Make a vector to hold all the information about each arg. */
2395 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
2396 memset ((char *) args, 0, num_actuals * sizeof (struct arg_data));
2398 /* Build up entries in the ARGS array, compute the size of the
2399 arguments into ARGS_SIZE, etc. */
2400 initialize_argument_information (num_actuals, args, &args_size,
2401 n_named_args, actparms, fndecl,
2402 &args_so_far, reg_parm_stack_space,
2403 &old_stack_level, &old_pending_adj,
2404 &must_preallocate, &flags);
2406 if (args_size.var)
2408 /* If this function requires a variable-sized argument list, don't
2409 try to make a cse'able block for this call. We may be able to
2410 do this eventually, but it is too complicated to keep track of
2411 what insns go in the cse'able block and which don't. */
2413 flags &= ~ECF_LIBCALL_BLOCK;
2414 must_preallocate = 1;
2417 /* Now make final decision about preallocating stack space. */
2418 must_preallocate = finalize_must_preallocate (must_preallocate,
2419 num_actuals, args,
2420 &args_size);
2422 /* If the structure value address will reference the stack pointer, we
2423 must stabilize it. We don't need to do this if we know that we are
2424 not going to adjust the stack pointer in processing this call. */
2426 if (structure_value_addr
2427 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2428 || reg_mentioned_p (virtual_outgoing_args_rtx,
2429 structure_value_addr))
2430 && (args_size.var
2431 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2432 structure_value_addr = copy_to_reg (structure_value_addr);
2434 /* Tail calls can make things harder to debug, and we're traditionally
2435 pushed these optimizations into -O2. Don't try if we're already
2436 expanding a call, as that means we're an argument. Don't try if
2437 there's cleanups, as we know there's code to follow the call.
2439 If rtx_equal_function_value_matters is false, that means we've
2440 finished with regular parsing. Which means that some of the
2441 machinery we use to generate tail-calls is no longer in place.
2442 This is most often true of sjlj-exceptions, which we couldn't
2443 tail-call to anyway. */
2445 if (currently_expanding_call++ != 0
2446 || !flag_optimize_sibling_calls
2447 || !rtx_equal_function_value_matters
2448 || any_pending_cleanups (1)
2449 || args_size.var)
2450 try_tail_call = try_tail_recursion = 0;
2452 /* Tail recursion fails, when we are not dealing with recursive calls. */
2453 if (!try_tail_recursion
2454 || TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
2455 || TREE_OPERAND (TREE_OPERAND (exp, 0), 0) != current_function_decl)
2456 try_tail_recursion = 0;
2458 /* Rest of purposes for tail call optimizations to fail. */
2459 if (
2460 #ifdef HAVE_sibcall_epilogue
2461 !HAVE_sibcall_epilogue
2462 #else
2464 #endif
2465 || !try_tail_call
2466 /* Doing sibling call optimization needs some work, since
2467 structure_value_addr can be allocated on the stack.
2468 It does not seem worth the effort since few optimizable
2469 sibling calls will return a structure. */
2470 || structure_value_addr != NULL_RTX
2471 /* If the register holding the address is a callee saved
2472 register, then we lose. We have no way to prevent that,
2473 so we only allow calls to named functions. */
2474 /* ??? This could be done by having the insn constraints
2475 use a register class that is all call-clobbered. Any
2476 reload insns generated to fix things up would appear
2477 before the sibcall_epilogue. */
2478 || fndecl == NULL_TREE
2479 || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP | ECF_NORETURN))
2480 || !FUNCTION_OK_FOR_SIBCALL (fndecl)
2481 /* If this function requires more stack slots than the current
2482 function, we cannot change it into a sibling call. */
2483 || args_size.constant > current_function_args_size
2484 /* If the callee pops its own arguments, then it must pop exactly
2485 the same number of arguments as the current function. */
2486 || RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2487 != RETURN_POPS_ARGS (current_function_decl,
2488 TREE_TYPE (current_function_decl),
2489 current_function_args_size))
2490 try_tail_call = 0;
2492 if (try_tail_call || try_tail_recursion)
2494 int end, inc;
2495 actparms = NULL_TREE;
2496 /* Ok, we're going to give the tail call the old college try.
2497 This means we're going to evaluate the function arguments
2498 up to three times. There are two degrees of badness we can
2499 encounter, those that can be unsaved and those that can't.
2500 (See unsafe_for_reeval commentary for details.)
2502 Generate a new argument list. Pass safe arguments through
2503 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2504 For hard badness, evaluate them now and put their resulting
2505 rtx in a temporary VAR_DECL.
2507 initialize_argument_information has ordered the array for the
2508 order to be pushed, and we must remember this when reconstructing
2509 the original argument order. */
2511 if (PUSH_ARGS_REVERSED)
2513 inc = 1;
2514 i = 0;
2515 end = num_actuals;
2517 else
2519 inc = -1;
2520 i = num_actuals - 1;
2521 end = -1;
2524 for (; i != end; i += inc)
2526 switch (unsafe_for_reeval (args[i].tree_value))
2528 case 0: /* Safe. */
2529 break;
2531 case 1: /* Mildly unsafe. */
2532 args[i].tree_value = unsave_expr (args[i].tree_value);
2533 break;
2535 case 2: /* Wildly unsafe. */
2537 tree var = build_decl (VAR_DECL, NULL_TREE,
2538 TREE_TYPE (args[i].tree_value));
2539 SET_DECL_RTL (var,
2540 expand_expr (args[i].tree_value, NULL_RTX,
2541 VOIDmode, EXPAND_NORMAL));
2542 args[i].tree_value = var;
2544 break;
2546 default:
2547 abort ();
2549 /* We need to build actparms for optimize_tail_recursion. We can
2550 safely trash away TREE_PURPOSE, since it is unused by this
2551 function. */
2552 if (try_tail_recursion)
2553 actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms);
2555 /* Expanding one of those dangerous arguments could have added
2556 cleanups, but otherwise give it a whirl. */
2557 if (any_pending_cleanups (1))
2558 try_tail_call = try_tail_recursion = 0;
2561 /* Generate a tail recursion sequence when calling ourselves. */
2563 if (try_tail_recursion)
2565 /* We want to emit any pending stack adjustments before the tail
2566 recursion "call". That way we know any adjustment after the tail
2567 recursion call can be ignored if we indeed use the tail recursion
2568 call expansion. */
2569 int save_pending_stack_adjust = pending_stack_adjust;
2570 int save_stack_pointer_delta = stack_pointer_delta;
2572 /* Emit any queued insns now; otherwise they would end up in
2573 only one of the alternates. */
2574 emit_queue ();
2576 /* Use a new sequence to hold any RTL we generate. We do not even
2577 know if we will use this RTL yet. The final decision can not be
2578 made until after RTL generation for the entire function is
2579 complete. */
2580 start_sequence ();
2581 /* If expanding any of the arguments creates cleanups, we can't
2582 do a tailcall. So, we'll need to pop the pending cleanups
2583 list. If, however, all goes well, and there are no cleanups
2584 then the call to expand_start_target_temps will have no
2585 effect. */
2586 expand_start_target_temps ();
2587 if (optimize_tail_recursion (actparms, get_last_insn ()))
2589 if (any_pending_cleanups (1))
2590 try_tail_call = try_tail_recursion = 0;
2591 else
2592 tail_recursion_insns = get_insns ();
2594 expand_end_target_temps ();
2595 end_sequence ();
2597 /* Restore the original pending stack adjustment for the sibling and
2598 normal call cases below. */
2599 pending_stack_adjust = save_pending_stack_adjust;
2600 stack_pointer_delta = save_stack_pointer_delta;
2603 if (profile_arc_flag && (flags & ECF_FORK_OR_EXEC))
2605 /* A fork duplicates the profile information, and an exec discards
2606 it. We can't rely on fork/exec to be paired. So write out the
2607 profile information we have gathered so far, and clear it. */
2608 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2609 is subject to race conditions, just as with multithreaded
2610 programs. */
2612 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__bb_fork_func"),
2613 LCT_ALWAYS_RETURN,
2614 VOIDmode, 0);
2617 /* Ensure current function's preferred stack boundary is at least
2618 what we need. We don't have to increase alignment for recursive
2619 functions. */
2620 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2621 && fndecl != current_function_decl)
2622 cfun->preferred_stack_boundary = preferred_stack_boundary;
2624 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2626 function_call_count++;
2628 /* We want to make two insn chains; one for a sibling call, the other
2629 for a normal call. We will select one of the two chains after
2630 initial RTL generation is complete. */
2631 for (pass = 0; pass < 2; pass++)
2633 int sibcall_failure = 0;
2634 /* We want to emit any pending stack adjustments before the tail
2635 recursion "call". That way we know any adjustment after the tail
2636 recursion call can be ignored if we indeed use the tail recursion
2637 call expansion. */
2638 int save_pending_stack_adjust = 0;
2639 int save_stack_pointer_delta = 0;
2640 rtx insns;
2641 rtx before_call, next_arg_reg;
2643 if (pass == 0)
2645 if (! try_tail_call)
2646 continue;
2648 /* Emit any queued insns now; otherwise they would end up in
2649 only one of the alternates. */
2650 emit_queue ();
2652 /* State variables we need to save and restore between
2653 iterations. */
2654 save_pending_stack_adjust = pending_stack_adjust;
2655 save_stack_pointer_delta = stack_pointer_delta;
2657 if (pass)
2658 flags &= ~ECF_SIBCALL;
2659 else
2660 flags |= ECF_SIBCALL;
2662 /* Other state variables that we must reinitialize each time
2663 through the loop (that are not initialized by the loop itself). */
2664 argblock = 0;
2665 call_fusage = 0;
2667 /* Start a new sequence for the normal call case.
2669 From this point on, if the sibling call fails, we want to set
2670 sibcall_failure instead of continuing the loop. */
2671 start_sequence ();
2673 if (pass == 0)
2675 /* We know at this point that there are not currently any
2676 pending cleanups. If, however, in the process of evaluating
2677 the arguments we were to create some, we'll need to be
2678 able to get rid of them. */
2679 expand_start_target_temps ();
2682 /* Don't let pending stack adjusts add up to too much.
2683 Also, do all pending adjustments now if there is any chance
2684 this might be a call to alloca or if we are expanding a sibling
2685 call sequence or if we are calling a function that is to return
2686 with stack pointer depressed. */
2687 if (pending_stack_adjust >= 32
2688 || (pending_stack_adjust > 0
2689 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2690 || pass == 0)
2691 do_pending_stack_adjust ();
2693 /* When calling a const function, we must pop the stack args right away,
2694 so that the pop is deleted or moved with the call. */
2695 if (pass && (flags & ECF_LIBCALL_BLOCK))
2696 NO_DEFER_POP;
2698 #ifdef FINAL_REG_PARM_STACK_SPACE
2699 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2700 args_size.var);
2701 #endif
2702 /* Precompute any arguments as needed. */
2703 if (pass)
2704 precompute_arguments (flags, num_actuals, args);
2706 /* Now we are about to start emitting insns that can be deleted
2707 if a libcall is deleted. */
2708 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2709 start_sequence ();
2711 adjusted_args_size = args_size;
2712 /* Compute the actual size of the argument block required. The variable
2713 and constant sizes must be combined, the size may have to be rounded,
2714 and there may be a minimum required size. When generating a sibcall
2715 pattern, do not round up, since we'll be re-using whatever space our
2716 caller provided. */
2717 unadjusted_args_size
2718 = compute_argument_block_size (reg_parm_stack_space,
2719 &adjusted_args_size,
2720 (pass == 0 ? 0
2721 : preferred_stack_boundary));
2723 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2725 /* The argument block when performing a sibling call is the
2726 incoming argument block. */
2727 if (pass == 0)
2729 argblock = virtual_incoming_args_rtx;
2730 argblock
2731 #ifdef STACK_GROWS_DOWNWARD
2732 = plus_constant (argblock, current_function_pretend_args_size);
2733 #else
2734 = plus_constant (argblock, -current_function_pretend_args_size);
2735 #endif
2736 stored_args_map = sbitmap_alloc (args_size.constant);
2737 sbitmap_zero (stored_args_map);
2740 /* If we have no actual push instructions, or shouldn't use them,
2741 make space for all args right now. */
2742 else if (adjusted_args_size.var != 0)
2744 if (old_stack_level == 0)
2746 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2747 old_pending_adj = pending_stack_adjust;
2748 pending_stack_adjust = 0;
2749 /* stack_arg_under_construction says whether a stack arg is
2750 being constructed at the old stack level. Pushing the stack
2751 gets a clean outgoing argument block. */
2752 old_stack_arg_under_construction = stack_arg_under_construction;
2753 stack_arg_under_construction = 0;
2755 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2757 else
2759 /* Note that we must go through the motions of allocating an argument
2760 block even if the size is zero because we may be storing args
2761 in the area reserved for register arguments, which may be part of
2762 the stack frame. */
2764 int needed = adjusted_args_size.constant;
2766 /* Store the maximum argument space used. It will be pushed by
2767 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2768 checking). */
2770 if (needed > current_function_outgoing_args_size)
2771 current_function_outgoing_args_size = needed;
2773 if (must_preallocate)
2775 if (ACCUMULATE_OUTGOING_ARGS)
2777 /* Since the stack pointer will never be pushed, it is
2778 possible for the evaluation of a parm to clobber
2779 something we have already written to the stack.
2780 Since most function calls on RISC machines do not use
2781 the stack, this is uncommon, but must work correctly.
2783 Therefore, we save any area of the stack that was already
2784 written and that we are using. Here we set up to do this
2785 by making a new stack usage map from the old one. The
2786 actual save will be done by store_one_arg.
2788 Another approach might be to try to reorder the argument
2789 evaluations to avoid this conflicting stack usage. */
2791 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2792 /* Since we will be writing into the entire argument area,
2793 the map must be allocated for its entire size, not just
2794 the part that is the responsibility of the caller. */
2795 needed += reg_parm_stack_space;
2796 #endif
2798 #ifdef ARGS_GROW_DOWNWARD
2799 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2800 needed + 1);
2801 #else
2802 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2803 needed);
2804 #endif
2805 stack_usage_map
2806 = (char *) alloca (highest_outgoing_arg_in_use);
2808 if (initial_highest_arg_in_use)
2809 memcpy (stack_usage_map, initial_stack_usage_map,
2810 initial_highest_arg_in_use);
2812 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2813 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2814 (highest_outgoing_arg_in_use
2815 - initial_highest_arg_in_use));
2816 needed = 0;
2818 /* The address of the outgoing argument list must not be
2819 copied to a register here, because argblock would be left
2820 pointing to the wrong place after the call to
2821 allocate_dynamic_stack_space below. */
2823 argblock = virtual_outgoing_args_rtx;
2825 else
2827 if (inhibit_defer_pop == 0)
2829 /* Try to reuse some or all of the pending_stack_adjust
2830 to get this space. */
2831 needed
2832 = (combine_pending_stack_adjustment_and_call
2833 (unadjusted_args_size,
2834 &adjusted_args_size,
2835 preferred_unit_stack_boundary));
2837 /* combine_pending_stack_adjustment_and_call computes
2838 an adjustment before the arguments are allocated.
2839 Account for them and see whether or not the stack
2840 needs to go up or down. */
2841 needed = unadjusted_args_size - needed;
2843 if (needed < 0)
2845 /* We're releasing stack space. */
2846 /* ??? We can avoid any adjustment at all if we're
2847 already aligned. FIXME. */
2848 pending_stack_adjust = -needed;
2849 do_pending_stack_adjust ();
2850 needed = 0;
2852 else
2853 /* We need to allocate space. We'll do that in
2854 push_block below. */
2855 pending_stack_adjust = 0;
2858 /* Special case this because overhead of `push_block' in
2859 this case is non-trivial. */
2860 if (needed == 0)
2861 argblock = virtual_outgoing_args_rtx;
2862 else
2863 argblock = push_block (GEN_INT (needed), 0, 0);
2865 /* We only really need to call `copy_to_reg' in the case
2866 where push insns are going to be used to pass ARGBLOCK
2867 to a function call in ARGS. In that case, the stack
2868 pointer changes value from the allocation point to the
2869 call point, and hence the value of
2870 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2871 as well always do it. */
2872 argblock = copy_to_reg (argblock);
2874 /* The save/restore code in store_one_arg handles all
2875 cases except one: a constructor call (including a C
2876 function returning a BLKmode struct) to initialize
2877 an argument. */
2878 if (stack_arg_under_construction)
2880 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2881 rtx push_size = GEN_INT (reg_parm_stack_space
2882 + adjusted_args_size.constant);
2883 #else
2884 rtx push_size = GEN_INT (adjusted_args_size.constant);
2885 #endif
2886 if (old_stack_level == 0)
2888 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2889 NULL_RTX);
2890 old_pending_adj = pending_stack_adjust;
2891 pending_stack_adjust = 0;
2892 /* stack_arg_under_construction says whether a stack
2893 arg is being constructed at the old stack level.
2894 Pushing the stack gets a clean outgoing argument
2895 block. */
2896 old_stack_arg_under_construction
2897 = stack_arg_under_construction;
2898 stack_arg_under_construction = 0;
2899 /* Make a new map for the new argument list. */
2900 stack_usage_map = (char *)
2901 alloca (highest_outgoing_arg_in_use);
2902 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2903 highest_outgoing_arg_in_use = 0;
2905 allocate_dynamic_stack_space (push_size, NULL_RTX,
2906 BITS_PER_UNIT);
2908 /* If argument evaluation might modify the stack pointer,
2909 copy the address of the argument list to a register. */
2910 for (i = 0; i < num_actuals; i++)
2911 if (args[i].pass_on_stack)
2913 argblock = copy_addr_to_reg (argblock);
2914 break;
2920 compute_argument_addresses (args, argblock, num_actuals);
2922 /* If we push args individually in reverse order, perform stack alignment
2923 before the first push (the last arg). */
2924 if (PUSH_ARGS_REVERSED && argblock == 0
2925 && adjusted_args_size.constant != unadjusted_args_size)
2927 /* When the stack adjustment is pending, we get better code
2928 by combining the adjustments. */
2929 if (pending_stack_adjust
2930 && ! (flags & ECF_LIBCALL_BLOCK)
2931 && ! inhibit_defer_pop)
2933 pending_stack_adjust
2934 = (combine_pending_stack_adjustment_and_call
2935 (unadjusted_args_size,
2936 &adjusted_args_size,
2937 preferred_unit_stack_boundary));
2938 do_pending_stack_adjust ();
2940 else if (argblock == 0)
2941 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2942 - unadjusted_args_size));
2944 /* Now that the stack is properly aligned, pops can't safely
2945 be deferred during the evaluation of the arguments. */
2946 NO_DEFER_POP;
2948 funexp = rtx_for_function_call (fndecl, exp);
2950 /* Figure out the register where the value, if any, will come back. */
2951 valreg = 0;
2952 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2953 && ! structure_value_addr)
2955 if (pcc_struct_value)
2956 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2957 fndecl, (pass == 0));
2958 else
2959 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2962 /* Precompute all register parameters. It isn't safe to compute anything
2963 once we have started filling any specific hard regs. */
2964 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2966 #ifdef REG_PARM_STACK_SPACE
2967 /* Save the fixed argument area if it's part of the caller's frame and
2968 is clobbered by argument setup for this call. */
2969 if (ACCUMULATE_OUTGOING_ARGS && pass)
2970 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2971 &low_to_save, &high_to_save);
2972 #endif
2974 /* Now store (and compute if necessary) all non-register parms.
2975 These come before register parms, since they can require block-moves,
2976 which could clobber the registers used for register parms.
2977 Parms which have partial registers are not stored here,
2978 but we do preallocate space here if they want that. */
2980 for (i = 0; i < num_actuals; i++)
2981 if (args[i].reg == 0 || args[i].pass_on_stack)
2983 rtx before_arg = get_last_insn ();
2985 if (store_one_arg (&args[i], argblock, flags,
2986 adjusted_args_size.var != 0,
2987 reg_parm_stack_space)
2988 || (pass == 0
2989 && check_sibcall_argument_overlap (before_arg,
2990 &args[i])))
2991 sibcall_failure = 1;
2994 /* If we have a parm that is passed in registers but not in memory
2995 and whose alignment does not permit a direct copy into registers,
2996 make a group of pseudos that correspond to each register that we
2997 will later fill. */
2998 if (STRICT_ALIGNMENT)
2999 store_unaligned_arguments_into_pseudos (args, num_actuals);
3001 /* Now store any partially-in-registers parm.
3002 This is the last place a block-move can happen. */
3003 if (reg_parm_seen)
3004 for (i = 0; i < num_actuals; i++)
3005 if (args[i].partial != 0 && ! args[i].pass_on_stack)
3007 rtx before_arg = get_last_insn ();
3009 if (store_one_arg (&args[i], argblock, flags,
3010 adjusted_args_size.var != 0,
3011 reg_parm_stack_space)
3012 || (pass == 0
3013 && check_sibcall_argument_overlap (before_arg,
3014 &args[i])))
3015 sibcall_failure = 1;
3018 /* If we pushed args in forward order, perform stack alignment
3019 after pushing the last arg. */
3020 if (!PUSH_ARGS_REVERSED && argblock == 0)
3021 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
3022 - unadjusted_args_size));
3024 /* If register arguments require space on the stack and stack space
3025 was not preallocated, allocate stack space here for arguments
3026 passed in registers. */
3027 #ifdef OUTGOING_REG_PARM_STACK_SPACE
3028 if (!ACCUMULATE_OUTGOING_ARGS
3029 && must_preallocate == 0 && reg_parm_stack_space > 0)
3030 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
3031 #endif
3033 /* Pass the function the address in which to return a
3034 structure value. */
3035 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3037 emit_move_insn (struct_value_rtx,
3038 force_reg (Pmode,
3039 force_operand (structure_value_addr,
3040 NULL_RTX)));
3042 if (GET_CODE (struct_value_rtx) == REG)
3043 use_reg (&call_fusage, struct_value_rtx);
3046 funexp = prepare_call_address (funexp, fndecl, &call_fusage,
3047 reg_parm_seen, pass == 0);
3049 load_register_parameters (args, num_actuals, &call_fusage, flags);
3051 /* Perform postincrements before actually calling the function. */
3052 emit_queue ();
3054 /* Save a pointer to the last insn before the call, so that we can
3055 later safely search backwards to find the CALL_INSN. */
3056 before_call = get_last_insn ();
3058 /* Set up next argument register. For sibling calls on machines
3059 with register windows this should be the incoming register. */
3060 #ifdef FUNCTION_INCOMING_ARG
3061 if (pass == 0)
3062 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
3063 void_type_node, 1);
3064 else
3065 #endif
3066 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
3067 void_type_node, 1);
3069 /* All arguments and registers used for the call must be set up by
3070 now! */
3072 /* Stack must be properly aligned now. */
3073 if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
3074 abort ();
3076 /* Generate the actual call instruction. */
3077 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
3078 adjusted_args_size.constant, struct_value_size,
3079 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3080 flags, & args_so_far);
3082 /* Verify that we've deallocated all the stack we used. */
3083 if (pass
3084 && old_stack_allocated != stack_pointer_delta - pending_stack_adjust)
3085 abort ();
3087 /* If call is cse'able, make appropriate pair of reg-notes around it.
3088 Test valreg so we don't crash; may safely ignore `const'
3089 if return type is void. Disable for PARALLEL return values, because
3090 we have no way to move such values into a pseudo register. */
3091 if (pass && (flags & ECF_LIBCALL_BLOCK))
3093 rtx insns;
3095 if (valreg == 0 || GET_CODE (valreg) == PARALLEL)
3097 insns = get_insns ();
3098 end_sequence ();
3099 emit_insn (insns);
3101 else
3103 rtx note = 0;
3104 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3106 /* Mark the return value as a pointer if needed. */
3107 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3108 mark_reg_pointer (temp,
3109 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
3111 /* Construct an "equal form" for the value which mentions all the
3112 arguments in order as well as the function name. */
3113 for (i = 0; i < num_actuals; i++)
3114 note = gen_rtx_EXPR_LIST (VOIDmode,
3115 args[i].initial_value, note);
3116 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
3118 insns = get_insns ();
3119 end_sequence ();
3121 if (flags & ECF_PURE)
3122 note = gen_rtx_EXPR_LIST (VOIDmode,
3123 gen_rtx_USE (VOIDmode,
3124 gen_rtx_MEM (BLKmode,
3125 gen_rtx_SCRATCH (VOIDmode))),
3126 note);
3128 emit_libcall_block (insns, temp, valreg, note);
3130 valreg = temp;
3133 else if (pass && (flags & ECF_MALLOC))
3135 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3136 rtx last, insns;
3138 /* The return value from a malloc-like function is a pointer. */
3139 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3140 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
3142 emit_move_insn (temp, valreg);
3144 /* The return value from a malloc-like function can not alias
3145 anything else. */
3146 last = get_last_insn ();
3147 REG_NOTES (last) =
3148 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
3150 /* Write out the sequence. */
3151 insns = get_insns ();
3152 end_sequence ();
3153 emit_insn (insns);
3154 valreg = temp;
3157 /* For calls to `setjmp', etc., inform flow.c it should complain
3158 if nonvolatile values are live. For functions that cannot return,
3159 inform flow that control does not fall through. */
3161 if ((flags & (ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
3163 /* The barrier must be emitted
3164 immediately after the CALL_INSN. Some ports emit more
3165 than just a CALL_INSN above, so we must search for it here. */
3167 rtx last = get_last_insn ();
3168 while (GET_CODE (last) != CALL_INSN)
3170 last = PREV_INSN (last);
3171 /* There was no CALL_INSN? */
3172 if (last == before_call)
3173 abort ();
3176 emit_barrier_after (last);
3179 if (flags & ECF_LONGJMP)
3180 current_function_calls_longjmp = 1;
3182 /* If this function is returning into a memory location marked as
3183 readonly, it means it is initializing that location. But we normally
3184 treat functions as not clobbering such locations, so we need to
3185 specify that this one does. */
3186 if (target != 0 && GET_CODE (target) == MEM
3187 && structure_value_addr != 0 && RTX_UNCHANGING_P (target))
3188 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3190 /* If value type not void, return an rtx for the value. */
3192 /* If there are cleanups to be called, don't use a hard reg as target.
3193 We need to double check this and see if it matters anymore. */
3194 if (any_pending_cleanups (1))
3196 if (target && REG_P (target)
3197 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3198 target = 0;
3199 sibcall_failure = 1;
3202 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
3203 || ignore)
3204 target = const0_rtx;
3205 else if (structure_value_addr)
3207 if (target == 0 || GET_CODE (target) != MEM)
3209 target
3210 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3211 memory_address (TYPE_MODE (TREE_TYPE (exp)),
3212 structure_value_addr));
3213 set_mem_attributes (target, exp, 1);
3216 else if (pcc_struct_value)
3218 /* This is the special C++ case where we need to
3219 know what the true target was. We take care to
3220 never use this value more than once in one expression. */
3221 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3222 copy_to_reg (valreg));
3223 set_mem_attributes (target, exp, 1);
3225 /* Handle calls that return values in multiple non-contiguous locations.
3226 The Irix 6 ABI has examples of this. */
3227 else if (GET_CODE (valreg) == PARALLEL)
3229 if (target == 0)
3231 /* This will only be assigned once, so it can be readonly. */
3232 tree nt = build_qualified_type (TREE_TYPE (exp),
3233 (TYPE_QUALS (TREE_TYPE (exp))
3234 | TYPE_QUAL_CONST));
3236 target = assign_temp (nt, 0, 1, 1);
3237 preserve_temp_slots (target);
3240 if (! rtx_equal_p (target, valreg))
3241 emit_group_store (target, valreg,
3242 int_size_in_bytes (TREE_TYPE (exp)));
3244 /* We can not support sibling calls for this case. */
3245 sibcall_failure = 1;
3247 else if (target
3248 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
3249 && GET_MODE (target) == GET_MODE (valreg))
3251 /* TARGET and VALREG cannot be equal at this point because the
3252 latter would not have REG_FUNCTION_VALUE_P true, while the
3253 former would if it were referring to the same register.
3255 If they refer to the same register, this move will be a no-op,
3256 except when function inlining is being done. */
3257 emit_move_insn (target, valreg);
3259 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3261 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3263 /* We can not support sibling calls for this case. */
3264 sibcall_failure = 1;
3266 else
3267 target = copy_to_reg (valreg);
3269 #ifdef PROMOTE_FUNCTION_RETURN
3270 /* If we promoted this return value, make the proper SUBREG. TARGET
3271 might be const0_rtx here, so be careful. */
3272 if (GET_CODE (target) == REG
3273 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3274 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3276 tree type = TREE_TYPE (exp);
3277 int unsignedp = TREE_UNSIGNED (type);
3278 int offset = 0;
3280 /* If we don't promote as expected, something is wrong. */
3281 if (GET_MODE (target)
3282 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3283 abort ();
3285 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3286 && GET_MODE_SIZE (GET_MODE (target))
3287 > GET_MODE_SIZE (TYPE_MODE (type)))
3289 offset = GET_MODE_SIZE (GET_MODE (target))
3290 - GET_MODE_SIZE (TYPE_MODE (type));
3291 if (! BYTES_BIG_ENDIAN)
3292 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3293 else if (! WORDS_BIG_ENDIAN)
3294 offset %= UNITS_PER_WORD;
3296 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3297 SUBREG_PROMOTED_VAR_P (target) = 1;
3298 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3300 #endif
3302 /* If size of args is variable or this was a constructor call for a stack
3303 argument, restore saved stack-pointer value. */
3305 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
3307 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3308 pending_stack_adjust = old_pending_adj;
3309 stack_arg_under_construction = old_stack_arg_under_construction;
3310 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3311 stack_usage_map = initial_stack_usage_map;
3312 sibcall_failure = 1;
3314 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3316 #ifdef REG_PARM_STACK_SPACE
3317 if (save_area)
3319 restore_fixed_argument_area (save_area, argblock,
3320 high_to_save, low_to_save);
3322 #endif
3324 /* If we saved any argument areas, restore them. */
3325 for (i = 0; i < num_actuals; i++)
3326 if (args[i].save_area)
3328 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3329 rtx stack_area
3330 = gen_rtx_MEM (save_mode,
3331 memory_address (save_mode,
3332 XEXP (args[i].stack_slot, 0)));
3334 if (save_mode != BLKmode)
3335 emit_move_insn (stack_area, args[i].save_area);
3336 else
3337 emit_block_move (stack_area, args[i].save_area,
3338 GEN_INT (args[i].size.constant),
3339 BLOCK_OP_CALL_PARM);
3342 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3343 stack_usage_map = initial_stack_usage_map;
3346 /* If this was alloca, record the new stack level for nonlocal gotos.
3347 Check for the handler slots since we might not have a save area
3348 for non-local gotos. */
3350 if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
3351 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3353 /* Free up storage we no longer need. */
3354 for (i = 0; i < num_actuals; ++i)
3355 if (args[i].aligned_regs)
3356 free (args[i].aligned_regs);
3358 if (pass == 0)
3360 /* Undo the fake expand_start_target_temps we did earlier. If
3361 there had been any cleanups created, we've already set
3362 sibcall_failure. */
3363 expand_end_target_temps ();
3366 insns = get_insns ();
3367 end_sequence ();
3369 if (pass == 0)
3371 tail_call_insns = insns;
3373 /* Restore the pending stack adjustment now that we have
3374 finished generating the sibling call sequence. */
3376 pending_stack_adjust = save_pending_stack_adjust;
3377 stack_pointer_delta = save_stack_pointer_delta;
3379 /* Prepare arg structure for next iteration. */
3380 for (i = 0; i < num_actuals; i++)
3382 args[i].value = 0;
3383 args[i].aligned_regs = 0;
3384 args[i].stack = 0;
3387 sbitmap_free (stored_args_map);
3389 else
3390 normal_call_insns = insns;
3392 /* If something prevents making this a sibling call,
3393 zero out the sequence. */
3394 if (sibcall_failure)
3395 tail_call_insns = NULL_RTX;
3398 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3399 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3400 can happen if the arguments to this function call an inline
3401 function who's expansion contains another CALL_PLACEHOLDER.
3403 If there are any C_Ps in any of these sequences, replace them
3404 with their normal call. */
3406 for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3407 if (GET_CODE (insn) == CALL_INSN
3408 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3409 replace_call_placeholder (insn, sibcall_use_normal);
3411 for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3412 if (GET_CODE (insn) == CALL_INSN
3413 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3414 replace_call_placeholder (insn, sibcall_use_normal);
3416 for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3417 if (GET_CODE (insn) == CALL_INSN
3418 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3419 replace_call_placeholder (insn, sibcall_use_normal);
3421 /* If this was a potential tail recursion site, then emit a
3422 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3423 One of them will be selected later. */
3424 if (tail_recursion_insns || tail_call_insns)
3426 /* The tail recursion label must be kept around. We could expose
3427 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3428 and makes determining true tail recursion sites difficult.
3430 So we set LABEL_PRESERVE_P here, then clear it when we select
3431 one of the call sequences after rtl generation is complete. */
3432 if (tail_recursion_insns)
3433 LABEL_PRESERVE_P (tail_recursion_label) = 1;
3434 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3435 tail_call_insns,
3436 tail_recursion_insns,
3437 tail_recursion_label));
3439 else
3440 emit_insn (normal_call_insns);
3442 currently_expanding_call--;
3444 /* If this function returns with the stack pointer depressed, ensure
3445 this block saves and restores the stack pointer, show it was
3446 changed, and adjust for any outgoing arg space. */
3447 if (flags & ECF_SP_DEPRESSED)
3449 clear_pending_stack_adjust ();
3450 emit_insn (gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx));
3451 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3452 save_stack_pointer ();
3455 return target;
3458 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3459 The RETVAL parameter specifies whether return value needs to be saved, other
3460 parameters are documented in the emit_library_call function below. */
3462 static rtx
3463 emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
3464 int retval;
3465 rtx orgfun;
3466 rtx value;
3467 enum libcall_type fn_type;
3468 enum machine_mode outmode;
3469 int nargs;
3470 va_list p;
3472 /* Total size in bytes of all the stack-parms scanned so far. */
3473 struct args_size args_size;
3474 /* Size of arguments before any adjustments (such as rounding). */
3475 struct args_size original_args_size;
3476 int argnum;
3477 rtx fun;
3478 int inc;
3479 int count;
3480 struct args_size alignment_pad;
3481 rtx argblock = 0;
3482 CUMULATIVE_ARGS args_so_far;
3483 struct arg
3485 rtx value;
3486 enum machine_mode mode;
3487 rtx reg;
3488 int partial;
3489 struct args_size offset;
3490 struct args_size size;
3491 rtx save_area;
3493 struct arg *argvec;
3494 int old_inhibit_defer_pop = inhibit_defer_pop;
3495 rtx call_fusage = 0;
3496 rtx mem_value = 0;
3497 rtx valreg;
3498 int pcc_struct_value = 0;
3499 int struct_value_size = 0;
3500 int flags;
3501 int reg_parm_stack_space = 0;
3502 int needed;
3503 rtx before_call;
3504 tree tfom; /* type_for_mode (outmode, 0) */
3506 #ifdef REG_PARM_STACK_SPACE
3507 /* Define the boundary of the register parm stack space that needs to be
3508 save, if any. */
3509 int low_to_save = -1, high_to_save = 0;
3510 rtx save_area = 0; /* Place that it is saved. */
3511 #endif
3513 /* Size of the stack reserved for parameter registers. */
3514 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3515 char *initial_stack_usage_map = stack_usage_map;
3517 #ifdef REG_PARM_STACK_SPACE
3518 #ifdef MAYBE_REG_PARM_STACK_SPACE
3519 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3520 #else
3521 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3522 #endif
3523 #endif
3525 /* By default, library functions can not throw. */
3526 flags = ECF_NOTHROW;
3528 switch (fn_type)
3530 case LCT_NORMAL:
3531 break;
3532 case LCT_CONST:
3533 flags |= ECF_CONST;
3534 break;
3535 case LCT_PURE:
3536 flags |= ECF_PURE;
3537 break;
3538 case LCT_CONST_MAKE_BLOCK:
3539 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3540 break;
3541 case LCT_PURE_MAKE_BLOCK:
3542 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3543 break;
3544 case LCT_NORETURN:
3545 flags |= ECF_NORETURN;
3546 break;
3547 case LCT_THROW:
3548 flags = ECF_NORETURN;
3549 break;
3550 case LCT_ALWAYS_RETURN:
3551 flags = ECF_ALWAYS_RETURN;
3552 break;
3553 case LCT_RETURNS_TWICE:
3554 flags = ECF_RETURNS_TWICE;
3555 break;
3557 fun = orgfun;
3559 /* Ensure current function's preferred stack boundary is at least
3560 what we need. */
3561 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3562 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3564 /* If this kind of value comes back in memory,
3565 decide where in memory it should come back. */
3566 if (outmode != VOIDmode)
3568 tfom = (*lang_hooks.types.type_for_mode) (outmode, 0);
3569 if (aggregate_value_p (tfom))
3571 #ifdef PCC_STATIC_STRUCT_RETURN
3572 rtx pointer_reg
3573 = hard_function_value (build_pointer_type (tfom), 0, 0);
3574 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3575 pcc_struct_value = 1;
3576 if (value == 0)
3577 value = gen_reg_rtx (outmode);
3578 #else /* not PCC_STATIC_STRUCT_RETURN */
3579 struct_value_size = GET_MODE_SIZE (outmode);
3580 if (value != 0 && GET_CODE (value) == MEM)
3581 mem_value = value;
3582 else
3583 mem_value = assign_temp (tfom, 0, 1, 1);
3584 #endif
3585 /* This call returns a big structure. */
3586 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3589 else
3590 tfom = void_type_node;
3592 /* ??? Unfinished: must pass the memory address as an argument. */
3594 /* Copy all the libcall-arguments out of the varargs data
3595 and into a vector ARGVEC.
3597 Compute how to pass each argument. We only support a very small subset
3598 of the full argument passing conventions to limit complexity here since
3599 library functions shouldn't have many args. */
3601 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3602 memset ((char *) argvec, 0, (nargs + 1) * sizeof (struct arg));
3604 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3605 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3606 #else
3607 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3608 #endif
3610 args_size.constant = 0;
3611 args_size.var = 0;
3613 count = 0;
3615 /* Now we are about to start emitting insns that can be deleted
3616 if a libcall is deleted. */
3617 if (flags & ECF_LIBCALL_BLOCK)
3618 start_sequence ();
3620 push_temp_slots ();
3622 /* If there's a structure value address to be passed,
3623 either pass it in the special place, or pass it as an extra argument. */
3624 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3626 rtx addr = XEXP (mem_value, 0);
3627 nargs++;
3629 /* Make sure it is a reasonable operand for a move or push insn. */
3630 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3631 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3632 addr = force_operand (addr, NULL_RTX);
3634 argvec[count].value = addr;
3635 argvec[count].mode = Pmode;
3636 argvec[count].partial = 0;
3638 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3639 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3640 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3641 abort ();
3642 #endif
3644 locate_and_pad_parm (Pmode, NULL_TREE,
3645 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3647 #else
3648 argvec[count].reg != 0,
3649 #endif
3650 NULL_TREE, &args_size, &argvec[count].offset,
3651 &argvec[count].size, &alignment_pad);
3653 if (argvec[count].reg == 0 || argvec[count].partial != 0
3654 || reg_parm_stack_space > 0)
3655 args_size.constant += argvec[count].size.constant;
3657 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3659 count++;
3662 for (; count < nargs; count++)
3664 rtx val = va_arg (p, rtx);
3665 enum machine_mode mode = va_arg (p, enum machine_mode);
3667 /* We cannot convert the arg value to the mode the library wants here;
3668 must do it earlier where we know the signedness of the arg. */
3669 if (mode == BLKmode
3670 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3671 abort ();
3673 /* On some machines, there's no way to pass a float to a library fcn.
3674 Pass it as a double instead. */
3675 #ifdef LIBGCC_NEEDS_DOUBLE
3676 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3677 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3678 #endif
3680 /* There's no need to call protect_from_queue, because
3681 either emit_move_insn or emit_push_insn will do that. */
3683 /* Make sure it is a reasonable operand for a move or push insn. */
3684 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3685 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3686 val = force_operand (val, NULL_RTX);
3688 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3689 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3691 rtx slot;
3692 int must_copy = 1
3693 #ifdef FUNCTION_ARG_CALLEE_COPIES
3694 && ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
3695 NULL_TREE, 1)
3696 #endif
3699 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3700 functions, so we have to pretend this isn't such a function. */
3701 if (flags & ECF_LIBCALL_BLOCK)
3703 rtx insns = get_insns ();
3704 end_sequence ();
3705 emit_insn (insns);
3707 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3709 /* If this was a CONST function, it is now PURE since
3710 it now reads memory. */
3711 if (flags & ECF_CONST)
3713 flags &= ~ECF_CONST;
3714 flags |= ECF_PURE;
3717 if (GET_MODE (val) == MEM && ! must_copy)
3718 slot = val;
3719 else if (must_copy)
3721 slot = assign_temp ((*lang_hooks.types.type_for_mode) (mode, 0),
3722 0, 1, 1);
3723 emit_move_insn (slot, val);
3725 else
3727 tree type = (*lang_hooks.types.type_for_mode) (mode, 0);
3729 slot = gen_rtx_MEM (mode,
3730 expand_expr (build1 (ADDR_EXPR,
3731 build_pointer_type
3732 (type),
3733 make_tree (type, val)),
3734 NULL_RTX, VOIDmode, 0));
3737 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3738 gen_rtx_USE (VOIDmode, slot),
3739 call_fusage);
3740 if (must_copy)
3741 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3742 gen_rtx_CLOBBER (VOIDmode,
3743 slot),
3744 call_fusage);
3746 mode = Pmode;
3747 val = force_operand (XEXP (slot, 0), NULL_RTX);
3749 #endif
3751 argvec[count].value = val;
3752 argvec[count].mode = mode;
3754 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3756 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3757 argvec[count].partial
3758 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3759 #else
3760 argvec[count].partial = 0;
3761 #endif
3763 locate_and_pad_parm (mode, NULL_TREE,
3764 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3766 #else
3767 argvec[count].reg != 0,
3768 #endif
3769 NULL_TREE, &args_size, &argvec[count].offset,
3770 &argvec[count].size, &alignment_pad);
3772 if (argvec[count].size.var)
3773 abort ();
3775 if (reg_parm_stack_space == 0 && argvec[count].partial)
3776 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3778 if (argvec[count].reg == 0 || argvec[count].partial != 0
3779 || reg_parm_stack_space > 0)
3780 args_size.constant += argvec[count].size.constant;
3782 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3785 #ifdef FINAL_REG_PARM_STACK_SPACE
3786 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3787 args_size.var);
3788 #endif
3789 /* If this machine requires an external definition for library
3790 functions, write one out. */
3791 assemble_external_libcall (fun);
3793 original_args_size = args_size;
3794 args_size.constant = (((args_size.constant
3795 + stack_pointer_delta
3796 + STACK_BYTES - 1)
3797 / STACK_BYTES
3798 * STACK_BYTES)
3799 - stack_pointer_delta);
3801 args_size.constant = MAX (args_size.constant,
3802 reg_parm_stack_space);
3804 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3805 args_size.constant -= reg_parm_stack_space;
3806 #endif
3808 if (args_size.constant > current_function_outgoing_args_size)
3809 current_function_outgoing_args_size = args_size.constant;
3811 if (ACCUMULATE_OUTGOING_ARGS)
3813 /* Since the stack pointer will never be pushed, it is possible for
3814 the evaluation of a parm to clobber something we have already
3815 written to the stack. Since most function calls on RISC machines
3816 do not use the stack, this is uncommon, but must work correctly.
3818 Therefore, we save any area of the stack that was already written
3819 and that we are using. Here we set up to do this by making a new
3820 stack usage map from the old one.
3822 Another approach might be to try to reorder the argument
3823 evaluations to avoid this conflicting stack usage. */
3825 needed = args_size.constant;
3827 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3828 /* Since we will be writing into the entire argument area, the
3829 map must be allocated for its entire size, not just the part that
3830 is the responsibility of the caller. */
3831 needed += reg_parm_stack_space;
3832 #endif
3834 #ifdef ARGS_GROW_DOWNWARD
3835 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3836 needed + 1);
3837 #else
3838 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3839 needed);
3840 #endif
3841 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3843 if (initial_highest_arg_in_use)
3844 memcpy (stack_usage_map, initial_stack_usage_map,
3845 initial_highest_arg_in_use);
3847 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3848 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3849 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3850 needed = 0;
3852 /* We must be careful to use virtual regs before they're instantiated,
3853 and real regs afterwards. Loop optimization, for example, can create
3854 new libcalls after we've instantiated the virtual regs, and if we
3855 use virtuals anyway, they won't match the rtl patterns. */
3857 if (virtuals_instantiated)
3858 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3859 else
3860 argblock = virtual_outgoing_args_rtx;
3862 else
3864 if (!PUSH_ARGS)
3865 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3868 /* If we push args individually in reverse order, perform stack alignment
3869 before the first push (the last arg). */
3870 if (argblock == 0 && PUSH_ARGS_REVERSED)
3871 anti_adjust_stack (GEN_INT (args_size.constant
3872 - original_args_size.constant));
3874 if (PUSH_ARGS_REVERSED)
3876 inc = -1;
3877 argnum = nargs - 1;
3879 else
3881 inc = 1;
3882 argnum = 0;
3885 #ifdef REG_PARM_STACK_SPACE
3886 if (ACCUMULATE_OUTGOING_ARGS)
3888 /* The argument list is the property of the called routine and it
3889 may clobber it. If the fixed area has been used for previous
3890 parameters, we must save and restore it.
3892 Here we compute the boundary of the that needs to be saved, if any. */
3894 #ifdef ARGS_GROW_DOWNWARD
3895 for (count = 0; count < reg_parm_stack_space + 1; count++)
3896 #else
3897 for (count = 0; count < reg_parm_stack_space; count++)
3898 #endif
3900 if (count >= highest_outgoing_arg_in_use
3901 || stack_usage_map[count] == 0)
3902 continue;
3904 if (low_to_save == -1)
3905 low_to_save = count;
3907 high_to_save = count;
3910 if (low_to_save >= 0)
3912 int num_to_save = high_to_save - low_to_save + 1;
3913 enum machine_mode save_mode
3914 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3915 rtx stack_area;
3917 /* If we don't have the required alignment, must do this in BLKmode. */
3918 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3919 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3920 save_mode = BLKmode;
3922 #ifdef ARGS_GROW_DOWNWARD
3923 stack_area = gen_rtx_MEM (save_mode,
3924 memory_address (save_mode,
3925 plus_constant (argblock,
3926 -high_to_save)));
3927 #else
3928 stack_area = gen_rtx_MEM (save_mode,
3929 memory_address (save_mode,
3930 plus_constant (argblock,
3931 low_to_save)));
3932 #endif
3933 if (save_mode == BLKmode)
3935 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3936 set_mem_align (save_area, PARM_BOUNDARY);
3937 emit_block_move (save_area, stack_area, GEN_INT (num_to_save),
3938 BLOCK_OP_CALL_PARM);
3940 else
3942 save_area = gen_reg_rtx (save_mode);
3943 emit_move_insn (save_area, stack_area);
3947 #endif
3949 /* Push the args that need to be pushed. */
3951 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3952 are to be pushed. */
3953 for (count = 0; count < nargs; count++, argnum += inc)
3955 enum machine_mode mode = argvec[argnum].mode;
3956 rtx val = argvec[argnum].value;
3957 rtx reg = argvec[argnum].reg;
3958 int partial = argvec[argnum].partial;
3959 int lower_bound = 0, upper_bound = 0, i;
3961 if (! (reg != 0 && partial == 0))
3963 if (ACCUMULATE_OUTGOING_ARGS)
3965 /* If this is being stored into a pre-allocated, fixed-size,
3966 stack area, save any previous data at that location. */
3968 #ifdef ARGS_GROW_DOWNWARD
3969 /* stack_slot is negative, but we want to index stack_usage_map
3970 with positive values. */
3971 upper_bound = -argvec[argnum].offset.constant + 1;
3972 lower_bound = upper_bound - argvec[argnum].size.constant;
3973 #else
3974 lower_bound = argvec[argnum].offset.constant;
3975 upper_bound = lower_bound + argvec[argnum].size.constant;
3976 #endif
3978 for (i = lower_bound; i < upper_bound; i++)
3979 if (stack_usage_map[i]
3980 /* Don't store things in the fixed argument area at this
3981 point; it has already been saved. */
3982 && i > reg_parm_stack_space)
3983 break;
3985 if (i != upper_bound)
3987 /* We need to make a save area. See what mode we can make
3988 it. */
3989 enum machine_mode save_mode
3990 = mode_for_size (argvec[argnum].size.constant
3991 * BITS_PER_UNIT,
3992 MODE_INT, 1);
3993 rtx stack_area
3994 = gen_rtx_MEM
3995 (save_mode,
3996 memory_address
3997 (save_mode,
3998 plus_constant (argblock,
3999 argvec[argnum].offset.constant)));
4000 argvec[argnum].save_area = gen_reg_rtx (save_mode);
4002 emit_move_insn (argvec[argnum].save_area, stack_area);
4006 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
4007 partial, reg, 0, argblock,
4008 GEN_INT (argvec[argnum].offset.constant),
4009 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
4011 /* Now mark the segment we just used. */
4012 if (ACCUMULATE_OUTGOING_ARGS)
4013 for (i = lower_bound; i < upper_bound; i++)
4014 stack_usage_map[i] = 1;
4016 NO_DEFER_POP;
4020 /* If we pushed args in forward order, perform stack alignment
4021 after pushing the last arg. */
4022 if (argblock == 0 && !PUSH_ARGS_REVERSED)
4023 anti_adjust_stack (GEN_INT (args_size.constant
4024 - original_args_size.constant));
4026 if (PUSH_ARGS_REVERSED)
4027 argnum = nargs - 1;
4028 else
4029 argnum = 0;
4031 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0, 0);
4033 /* Now load any reg parms into their regs. */
4035 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4036 are to be pushed. */
4037 for (count = 0; count < nargs; count++, argnum += inc)
4039 rtx val = argvec[argnum].value;
4040 rtx reg = argvec[argnum].reg;
4041 int partial = argvec[argnum].partial;
4043 /* Handle calls that pass values in multiple non-contiguous
4044 locations. The PA64 has examples of this for library calls. */
4045 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4046 emit_group_load (reg, val, GET_MODE_SIZE (GET_MODE (val)));
4047 else if (reg != 0 && partial == 0)
4048 emit_move_insn (reg, val);
4050 NO_DEFER_POP;
4053 /* Any regs containing parms remain in use through the call. */
4054 for (count = 0; count < nargs; count++)
4056 rtx reg = argvec[count].reg;
4057 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4058 use_group_regs (&call_fusage, reg);
4059 else if (reg != 0)
4060 use_reg (&call_fusage, reg);
4063 /* Pass the function the address in which to return a structure value. */
4064 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
4066 emit_move_insn (struct_value_rtx,
4067 force_reg (Pmode,
4068 force_operand (XEXP (mem_value, 0),
4069 NULL_RTX)));
4070 if (GET_CODE (struct_value_rtx) == REG)
4071 use_reg (&call_fusage, struct_value_rtx);
4074 /* Don't allow popping to be deferred, since then
4075 cse'ing of library calls could delete a call and leave the pop. */
4076 NO_DEFER_POP;
4077 valreg = (mem_value == 0 && outmode != VOIDmode
4078 ? hard_libcall_value (outmode) : NULL_RTX);
4080 /* Stack must be properly aligned now. */
4081 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
4082 abort ();
4084 before_call = get_last_insn ();
4086 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4087 will set inhibit_defer_pop to that value. */
4088 /* The return type is needed to decide how many bytes the function pops.
4089 Signedness plays no role in that, so for simplicity, we pretend it's
4090 always signed. We also assume that the list of arguments passed has
4091 no impact, so we pretend it is unknown. */
4093 emit_call_1 (fun,
4094 get_identifier (XSTR (orgfun, 0)),
4095 build_function_type (tfom, NULL_TREE),
4096 original_args_size.constant, args_size.constant,
4097 struct_value_size,
4098 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4099 valreg,
4100 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
4102 /* For calls to `setjmp', etc., inform flow.c it should complain
4103 if nonvolatile values are live. For functions that cannot return,
4104 inform flow that control does not fall through. */
4106 if (flags & (ECF_NORETURN | ECF_LONGJMP))
4108 /* The barrier note must be emitted
4109 immediately after the CALL_INSN. Some ports emit more than
4110 just a CALL_INSN above, so we must search for it here. */
4112 rtx last = get_last_insn ();
4113 while (GET_CODE (last) != CALL_INSN)
4115 last = PREV_INSN (last);
4116 /* There was no CALL_INSN? */
4117 if (last == before_call)
4118 abort ();
4121 emit_barrier_after (last);
4124 /* Now restore inhibit_defer_pop to its actual original value. */
4125 OK_DEFER_POP;
4127 /* If call is cse'able, make appropriate pair of reg-notes around it.
4128 Test valreg so we don't crash; may safely ignore `const'
4129 if return type is void. Disable for PARALLEL return values, because
4130 we have no way to move such values into a pseudo register. */
4131 if (flags & ECF_LIBCALL_BLOCK)
4133 rtx insns;
4135 if (valreg == 0 || GET_CODE (valreg) == PARALLEL)
4137 insns = get_insns ();
4138 end_sequence ();
4139 emit_insn (insns);
4141 else
4143 rtx note = 0;
4144 rtx temp = gen_reg_rtx (GET_MODE (valreg));
4145 int i;
4147 /* Construct an "equal form" for the value which mentions all the
4148 arguments in order as well as the function name. */
4149 for (i = 0; i < nargs; i++)
4150 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
4151 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
4153 insns = get_insns ();
4154 end_sequence ();
4156 if (flags & ECF_PURE)
4157 note = gen_rtx_EXPR_LIST (VOIDmode,
4158 gen_rtx_USE (VOIDmode,
4159 gen_rtx_MEM (BLKmode,
4160 gen_rtx_SCRATCH (VOIDmode))),
4161 note);
4163 emit_libcall_block (insns, temp, valreg, note);
4165 valreg = temp;
4168 pop_temp_slots ();
4170 /* Copy the value to the right place. */
4171 if (outmode != VOIDmode && retval)
4173 if (mem_value)
4175 if (value == 0)
4176 value = mem_value;
4177 if (value != mem_value)
4178 emit_move_insn (value, mem_value);
4180 else if (value != 0)
4181 emit_move_insn (value, valreg);
4182 else
4183 value = valreg;
4186 if (ACCUMULATE_OUTGOING_ARGS)
4188 #ifdef REG_PARM_STACK_SPACE
4189 if (save_area)
4191 enum machine_mode save_mode = GET_MODE (save_area);
4192 #ifdef ARGS_GROW_DOWNWARD
4193 rtx stack_area
4194 = gen_rtx_MEM (save_mode,
4195 memory_address (save_mode,
4196 plus_constant (argblock,
4197 - high_to_save)));
4198 #else
4199 rtx stack_area
4200 = gen_rtx_MEM (save_mode,
4201 memory_address (save_mode,
4202 plus_constant (argblock, low_to_save)));
4203 #endif
4205 set_mem_align (stack_area, PARM_BOUNDARY);
4206 if (save_mode != BLKmode)
4207 emit_move_insn (stack_area, save_area);
4208 else
4209 emit_block_move (stack_area, save_area,
4210 GEN_INT (high_to_save - low_to_save + 1),
4211 BLOCK_OP_CALL_PARM);
4213 #endif
4215 /* If we saved any argument areas, restore them. */
4216 for (count = 0; count < nargs; count++)
4217 if (argvec[count].save_area)
4219 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4220 rtx stack_area
4221 = gen_rtx_MEM (save_mode,
4222 memory_address
4223 (save_mode,
4224 plus_constant (argblock,
4225 argvec[count].offset.constant)));
4227 emit_move_insn (stack_area, argvec[count].save_area);
4230 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4231 stack_usage_map = initial_stack_usage_map;
4234 return value;
4238 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4239 (emitting the queue unless NO_QUEUE is nonzero),
4240 for a value of mode OUTMODE,
4241 with NARGS different arguments, passed as alternating rtx values
4242 and machine_modes to convert them to.
4243 The rtx values should have been passed through protect_from_queue already.
4245 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4246 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4247 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4248 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4249 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4250 or other LCT_ value for other types of library calls. */
4252 void
4253 emit_library_call VPARAMS((rtx orgfun, enum libcall_type fn_type,
4254 enum machine_mode outmode, int nargs, ...))
4256 VA_OPEN (p, nargs);
4257 VA_FIXEDARG (p, rtx, orgfun);
4258 VA_FIXEDARG (p, int, fn_type);
4259 VA_FIXEDARG (p, enum machine_mode, outmode);
4260 VA_FIXEDARG (p, int, nargs);
4262 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4264 VA_CLOSE (p);
4267 /* Like emit_library_call except that an extra argument, VALUE,
4268 comes second and says where to store the result.
4269 (If VALUE is zero, this function chooses a convenient way
4270 to return the value.
4272 This function returns an rtx for where the value is to be found.
4273 If VALUE is nonzero, VALUE is returned. */
4276 emit_library_call_value VPARAMS((rtx orgfun, rtx value,
4277 enum libcall_type fn_type,
4278 enum machine_mode outmode, int nargs, ...))
4280 rtx result;
4282 VA_OPEN (p, nargs);
4283 VA_FIXEDARG (p, rtx, orgfun);
4284 VA_FIXEDARG (p, rtx, value);
4285 VA_FIXEDARG (p, int, fn_type);
4286 VA_FIXEDARG (p, enum machine_mode, outmode);
4287 VA_FIXEDARG (p, int, nargs);
4289 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4290 nargs, p);
4292 VA_CLOSE (p);
4294 return result;
4297 /* Store a single argument for a function call
4298 into the register or memory area where it must be passed.
4299 *ARG describes the argument value and where to pass it.
4301 ARGBLOCK is the address of the stack-block for all the arguments,
4302 or 0 on a machine where arguments are pushed individually.
4304 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4305 so must be careful about how the stack is used.
4307 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4308 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4309 that we need not worry about saving and restoring the stack.
4311 FNDECL is the declaration of the function we are calling.
4313 Return nonzero if this arg should cause sibcall failure,
4314 zero otherwise. */
4316 static int
4317 store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space)
4318 struct arg_data *arg;
4319 rtx argblock;
4320 int flags;
4321 int variable_size ATTRIBUTE_UNUSED;
4322 int reg_parm_stack_space;
4324 tree pval = arg->tree_value;
4325 rtx reg = 0;
4326 int partial = 0;
4327 int used = 0;
4328 int i, lower_bound = 0, upper_bound = 0;
4329 int sibcall_failure = 0;
4331 if (TREE_CODE (pval) == ERROR_MARK)
4332 return 1;
4334 /* Push a new temporary level for any temporaries we make for
4335 this argument. */
4336 push_temp_slots ();
4338 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4340 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4341 save any previous data at that location. */
4342 if (argblock && ! variable_size && arg->stack)
4344 #ifdef ARGS_GROW_DOWNWARD
4345 /* stack_slot is negative, but we want to index stack_usage_map
4346 with positive values. */
4347 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4348 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4349 else
4350 upper_bound = 0;
4352 lower_bound = upper_bound - arg->size.constant;
4353 #else
4354 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4355 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4356 else
4357 lower_bound = 0;
4359 upper_bound = lower_bound + arg->size.constant;
4360 #endif
4362 for (i = lower_bound; i < upper_bound; i++)
4363 if (stack_usage_map[i]
4364 /* Don't store things in the fixed argument area at this point;
4365 it has already been saved. */
4366 && i > reg_parm_stack_space)
4367 break;
4369 if (i != upper_bound)
4371 /* We need to make a save area. See what mode we can make it. */
4372 enum machine_mode save_mode
4373 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
4374 rtx stack_area
4375 = gen_rtx_MEM (save_mode,
4376 memory_address (save_mode,
4377 XEXP (arg->stack_slot, 0)));
4379 if (save_mode == BLKmode)
4381 tree ot = TREE_TYPE (arg->tree_value);
4382 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4383 | TYPE_QUAL_CONST));
4385 arg->save_area = assign_temp (nt, 0, 1, 1);
4386 preserve_temp_slots (arg->save_area);
4387 emit_block_move (validize_mem (arg->save_area), stack_area,
4388 expr_size (arg->tree_value),
4389 BLOCK_OP_CALL_PARM);
4391 else
4393 arg->save_area = gen_reg_rtx (save_mode);
4394 emit_move_insn (arg->save_area, stack_area);
4398 /* Now that we have saved any slots that will be overwritten by this
4399 store, mark all slots this store will use. We must do this before
4400 we actually expand the argument since the expansion itself may
4401 trigger library calls which might need to use the same stack slot. */
4402 if (argblock && ! variable_size && arg->stack)
4403 for (i = lower_bound; i < upper_bound; i++)
4404 stack_usage_map[i] = 1;
4407 /* If this isn't going to be placed on both the stack and in registers,
4408 set up the register and number of words. */
4409 if (! arg->pass_on_stack)
4411 if (flags & ECF_SIBCALL)
4412 reg = arg->tail_call_reg;
4413 else
4414 reg = arg->reg;
4415 partial = arg->partial;
4418 if (reg != 0 && partial == 0)
4419 /* Being passed entirely in a register. We shouldn't be called in
4420 this case. */
4421 abort ();
4423 /* If this arg needs special alignment, don't load the registers
4424 here. */
4425 if (arg->n_aligned_regs != 0)
4426 reg = 0;
4428 /* If this is being passed partially in a register, we can't evaluate
4429 it directly into its stack slot. Otherwise, we can. */
4430 if (arg->value == 0)
4432 /* stack_arg_under_construction is nonzero if a function argument is
4433 being evaluated directly into the outgoing argument list and
4434 expand_call must take special action to preserve the argument list
4435 if it is called recursively.
4437 For scalar function arguments stack_usage_map is sufficient to
4438 determine which stack slots must be saved and restored. Scalar
4439 arguments in general have pass_on_stack == 0.
4441 If this argument is initialized by a function which takes the
4442 address of the argument (a C++ constructor or a C function
4443 returning a BLKmode structure), then stack_usage_map is
4444 insufficient and expand_call must push the stack around the
4445 function call. Such arguments have pass_on_stack == 1.
4447 Note that it is always safe to set stack_arg_under_construction,
4448 but this generates suboptimal code if set when not needed. */
4450 if (arg->pass_on_stack)
4451 stack_arg_under_construction++;
4453 arg->value = expand_expr (pval,
4454 (partial
4455 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4456 ? NULL_RTX : arg->stack,
4457 VOIDmode, 0);
4459 /* If we are promoting object (or for any other reason) the mode
4460 doesn't agree, convert the mode. */
4462 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4463 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4464 arg->value, arg->unsignedp);
4466 if (arg->pass_on_stack)
4467 stack_arg_under_construction--;
4470 /* Don't allow anything left on stack from computation
4471 of argument to alloca. */
4472 if (flags & ECF_MAY_BE_ALLOCA)
4473 do_pending_stack_adjust ();
4475 if (arg->value == arg->stack)
4476 /* If the value is already in the stack slot, we are done. */
4478 else if (arg->mode != BLKmode)
4480 int size;
4482 /* Argument is a scalar, not entirely passed in registers.
4483 (If part is passed in registers, arg->partial says how much
4484 and emit_push_insn will take care of putting it there.)
4486 Push it, and if its size is less than the
4487 amount of space allocated to it,
4488 also bump stack pointer by the additional space.
4489 Note that in C the default argument promotions
4490 will prevent such mismatches. */
4492 size = GET_MODE_SIZE (arg->mode);
4493 /* Compute how much space the push instruction will push.
4494 On many machines, pushing a byte will advance the stack
4495 pointer by a halfword. */
4496 #ifdef PUSH_ROUNDING
4497 size = PUSH_ROUNDING (size);
4498 #endif
4499 used = size;
4501 /* Compute how much space the argument should get:
4502 round up to a multiple of the alignment for arguments. */
4503 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4504 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4505 / (PARM_BOUNDARY / BITS_PER_UNIT))
4506 * (PARM_BOUNDARY / BITS_PER_UNIT));
4508 /* This isn't already where we want it on the stack, so put it there.
4509 This can either be done with push or copy insns. */
4510 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4511 PARM_BOUNDARY, partial, reg, used - size, argblock,
4512 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4513 ARGS_SIZE_RTX (arg->alignment_pad));
4515 /* Unless this is a partially-in-register argument, the argument is now
4516 in the stack. */
4517 if (partial == 0)
4518 arg->value = arg->stack;
4520 else
4522 /* BLKmode, at least partly to be pushed. */
4524 unsigned int parm_align;
4525 int excess;
4526 rtx size_rtx;
4528 /* Pushing a nonscalar.
4529 If part is passed in registers, PARTIAL says how much
4530 and emit_push_insn will take care of putting it there. */
4532 /* Round its size up to a multiple
4533 of the allocation unit for arguments. */
4535 if (arg->size.var != 0)
4537 excess = 0;
4538 size_rtx = ARGS_SIZE_RTX (arg->size);
4540 else
4542 /* PUSH_ROUNDING has no effect on us, because
4543 emit_push_insn for BLKmode is careful to avoid it. */
4544 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
4545 + partial * UNITS_PER_WORD);
4546 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4547 NULL_RTX, TYPE_MODE (sizetype), 0);
4550 /* Some types will require stricter alignment, which will be
4551 provided for elsewhere in argument layout. */
4552 parm_align = MAX (PARM_BOUNDARY, TYPE_ALIGN (TREE_TYPE (pval)));
4554 /* When an argument is padded down, the block is aligned to
4555 PARM_BOUNDARY, but the actual argument isn't. */
4556 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4558 if (arg->size.var)
4559 parm_align = BITS_PER_UNIT;
4560 else if (excess)
4562 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4563 parm_align = MIN (parm_align, excess_align);
4567 if ((flags & ECF_SIBCALL) && GET_CODE (arg->value) == MEM)
4569 /* emit_push_insn might not work properly if arg->value and
4570 argblock + arg->offset areas overlap. */
4571 rtx x = arg->value;
4572 int i = 0;
4574 if (XEXP (x, 0) == current_function_internal_arg_pointer
4575 || (GET_CODE (XEXP (x, 0)) == PLUS
4576 && XEXP (XEXP (x, 0), 0) ==
4577 current_function_internal_arg_pointer
4578 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4580 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4581 i = INTVAL (XEXP (XEXP (x, 0), 1));
4583 /* expand_call should ensure this */
4584 if (arg->offset.var || GET_CODE (size_rtx) != CONST_INT)
4585 abort ();
4587 if (arg->offset.constant > i)
4589 if (arg->offset.constant < i + INTVAL (size_rtx))
4590 sibcall_failure = 1;
4592 else if (arg->offset.constant < i)
4594 if (i < arg->offset.constant + INTVAL (size_rtx))
4595 sibcall_failure = 1;
4600 /* Special handling is required if part of the parameter lies in the
4601 register parameter area. The argument may be copied into the stack
4602 slot using memcpy(), but the original contents of the register
4603 parameter area will be restored after the memcpy() call.
4605 To ensure that the part that lies in the register parameter area
4606 is copied correctly, we emit a separate push for that part. This
4607 push should be small enough to avoid a call to memcpy(). */
4608 #ifndef STACK_PARMS_IN_REG_PARM_AREA
4609 if (arg->reg && arg->pass_on_stack)
4610 #else
4611 if (1)
4612 #endif
4614 if (arg->offset.constant < reg_parm_stack_space && arg->offset.var)
4615 error ("variable offset is passed partially in stack and in reg");
4616 else if (arg->offset.constant < reg_parm_stack_space && arg->size.var)
4617 error ("variable size is passed partially in stack and in reg");
4618 else if (arg->offset.constant < reg_parm_stack_space
4619 && ((arg->offset.constant + arg->size.constant)
4620 > reg_parm_stack_space))
4622 rtx size_rtx1 = GEN_INT (reg_parm_stack_space - arg->offset.constant);
4623 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx1,
4624 parm_align, partial, reg, excess, argblock,
4625 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4626 ARGS_SIZE_RTX (arg->alignment_pad));
4631 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4632 parm_align, partial, reg, excess, argblock,
4633 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4634 ARGS_SIZE_RTX (arg->alignment_pad));
4636 /* Unless this is a partially-in-register argument, the argument is now
4637 in the stack.
4639 ??? Unlike the case above, in which we want the actual
4640 address of the data, so that we can load it directly into a
4641 register, here we want the address of the stack slot, so that
4642 it's properly aligned for word-by-word copying or something
4643 like that. It's not clear that this is always correct. */
4644 if (partial == 0)
4645 arg->value = arg->stack_slot;
4648 /* Once we have pushed something, pops can't safely
4649 be deferred during the rest of the arguments. */
4650 NO_DEFER_POP;
4652 /* ANSI doesn't require a sequence point here,
4653 but PCC has one, so this will avoid some problems. */
4654 emit_queue ();
4656 /* Free any temporary slots made in processing this argument. Show
4657 that we might have taken the address of something and pushed that
4658 as an operand. */
4659 preserve_temp_slots (NULL_RTX);
4660 free_temp_slots ();
4661 pop_temp_slots ();
4663 return sibcall_failure;