1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
40 #include "langhooks.h"
45 #include "tree-flow.h"
47 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
48 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
50 /* Data structure and subroutines used within expand_call. */
54 /* Tree node for this argument. */
56 /* Mode for value; TYPE_MODE unless promoted. */
57 enum machine_mode mode
;
58 /* Current RTL value for argument, or 0 if it isn't precomputed. */
60 /* Initially-compute RTL value for argument; only for const functions. */
62 /* Register to pass this argument in, 0 if passed on stack, or an
63 PARALLEL if the arg is to be copied into multiple non-contiguous
66 /* Register to pass this argument in when generating tail call sequence.
67 This is not the same register as for normal calls on machines with
70 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
71 form for emit_group_move. */
73 /* If REG was promoted from the actual mode of the argument expression,
74 indicates whether the promotion is sign- or zero-extended. */
76 /* Number of bytes to put in registers. 0 means put the whole arg
77 in registers. Also 0 if not passed in registers. */
79 /* Nonzero if argument must be passed on stack.
80 Note that some arguments may be passed on the stack
81 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
82 pass_on_stack identifies arguments that *cannot* go in registers. */
84 /* Some fields packaged up for locate_and_pad_parm. */
85 struct locate_and_pad_arg_data locate
;
86 /* Location on the stack at which parameter should be stored. The store
87 has already been done if STACK == VALUE. */
89 /* Location on the stack of the start of this argument slot. This can
90 differ from STACK if this arg pads downward. This location is known
91 to be aligned to FUNCTION_ARG_BOUNDARY. */
93 /* Place that this stack area has been saved, if needed. */
95 /* If an argument's alignment does not permit direct copying into registers,
96 copy in smaller-sized pieces into pseudos. These are stored in a
97 block pointed to by this field. The next field says how many
98 word-sized pseudos we made. */
103 /* A vector of one char per byte of stack space. A byte if nonzero if
104 the corresponding stack location has been used.
105 This vector is used to prevent a function call within an argument from
106 clobbering any stack already set up. */
107 static char *stack_usage_map
;
109 /* Size of STACK_USAGE_MAP. */
110 static int highest_outgoing_arg_in_use
;
112 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
113 stack location's tail call argument has been already stored into the stack.
114 This bitmap is used to prevent sibling call optimization if function tries
115 to use parent's incoming argument slots when they have been already
116 overwritten with tail call arguments. */
117 static sbitmap stored_args_map
;
119 /* stack_arg_under_construction is nonzero when an argument may be
120 initialized with a constructor call (including a C function that
121 returns a BLKmode struct) and expand_call must take special action
122 to make sure the object being constructed does not overlap the
123 argument list for the constructor call. */
124 static int stack_arg_under_construction
;
126 static void emit_call_1 (rtx
, tree
, tree
, tree
, HOST_WIDE_INT
, HOST_WIDE_INT
,
127 HOST_WIDE_INT
, rtx
, rtx
, int, rtx
, int,
129 static void precompute_register_parameters (int, struct arg_data
*, int *);
130 static int store_one_arg (struct arg_data
*, rtx
, int, int, int);
131 static void store_unaligned_arguments_into_pseudos (struct arg_data
*, int);
132 static int finalize_must_preallocate (int, int, struct arg_data
*,
134 static void precompute_arguments (int, struct arg_data
*);
135 static int compute_argument_block_size (int, struct args_size
*, tree
, tree
, int);
136 static void initialize_argument_information (int, struct arg_data
*,
137 struct args_size
*, int,
139 tree
, tree
, CUMULATIVE_ARGS
*, int,
140 rtx
*, int *, int *, int *,
142 static void compute_argument_addresses (struct arg_data
*, rtx
, int);
143 static rtx
rtx_for_function_call (tree
, tree
);
144 static void load_register_parameters (struct arg_data
*, int, rtx
*, int,
146 static rtx
emit_library_call_value_1 (int, rtx
, rtx
, enum libcall_type
,
147 enum machine_mode
, int, va_list);
148 static int special_function_p (const_tree
, int);
149 static int check_sibcall_argument_overlap_1 (rtx
);
150 static int check_sibcall_argument_overlap (rtx
, struct arg_data
*, int);
152 static int combine_pending_stack_adjustment_and_call (int, struct args_size
*,
154 static tree
split_complex_types (tree
);
156 #ifdef REG_PARM_STACK_SPACE
157 static rtx
save_fixed_argument_area (int, rtx
, int *, int *);
158 static void restore_fixed_argument_area (rtx
, rtx
, int, int);
161 /* Force FUNEXP into a form suitable for the address of a CALL,
162 and return that as an rtx. Also load the static chain register
163 if FNDECL is a nested function.
165 CALL_FUSAGE points to a variable holding the prospective
166 CALL_INSN_FUNCTION_USAGE information. */
169 prepare_call_address (tree fndecl
, rtx funexp
, rtx static_chain_value
,
170 rtx
*call_fusage
, int reg_parm_seen
, int sibcallp
)
172 /* Make a valid memory address and copy constants through pseudo-regs,
173 but not for a constant address if -fno-function-cse. */
174 if (GET_CODE (funexp
) != SYMBOL_REF
)
175 /* If we are using registers for parameters, force the
176 function address into a register now. */
177 funexp
= ((SMALL_REGISTER_CLASSES
&& reg_parm_seen
)
178 ? force_not_mem (memory_address (FUNCTION_MODE
, funexp
))
179 : memory_address (FUNCTION_MODE
, funexp
));
182 #ifndef NO_FUNCTION_CSE
183 if (optimize
&& ! flag_no_function_cse
)
184 funexp
= force_reg (Pmode
, funexp
);
188 if (static_chain_value
!= 0)
193 chain
= targetm
.calls
.static_chain (fndecl
, false);
194 static_chain_value
= convert_memory_address (Pmode
, static_chain_value
);
196 emit_move_insn (chain
, static_chain_value
);
198 use_reg (call_fusage
, chain
);
204 /* Generate instructions to call function FUNEXP,
205 and optionally pop the results.
206 The CALL_INSN is the first insn generated.
208 FNDECL is the declaration node of the function. This is given to the
209 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
211 FUNTYPE is the data type of the function. This is given to the macro
212 RETURN_POPS_ARGS to determine whether this function pops its own args.
213 We used to allow an identifier for library functions, but that doesn't
214 work when the return type is an aggregate type and the calling convention
215 says that the pointer to this aggregate is to be popped by the callee.
217 STACK_SIZE is the number of bytes of arguments on the stack,
218 ROUNDED_STACK_SIZE is that number rounded up to
219 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
220 both to put into the call insn and to generate explicit popping
223 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
224 It is zero if this call doesn't want a structure value.
226 NEXT_ARG_REG is the rtx that results from executing
227 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
228 just after all the args have had their registers assigned.
229 This could be whatever you like, but normally it is the first
230 arg-register beyond those used for args in this call,
231 or 0 if all the arg-registers are used in this call.
232 It is passed on to `gen_call' so you can put this info in the call insn.
234 VALREG is a hard register in which a value is returned,
235 or 0 if the call does not return a value.
237 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
238 the args to this call were processed.
239 We restore `inhibit_defer_pop' to that value.
241 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
242 denote registers used by the called function. */
245 emit_call_1 (rtx funexp
, tree fntree ATTRIBUTE_UNUSED
, tree fndecl ATTRIBUTE_UNUSED
,
246 tree funtype ATTRIBUTE_UNUSED
,
247 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED
,
248 HOST_WIDE_INT rounded_stack_size
,
249 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED
,
250 rtx next_arg_reg ATTRIBUTE_UNUSED
, rtx valreg
,
251 int old_inhibit_defer_pop
, rtx call_fusage
, int ecf_flags
,
252 CUMULATIVE_ARGS
*args_so_far ATTRIBUTE_UNUSED
)
254 rtx rounded_stack_size_rtx
= GEN_INT (rounded_stack_size
);
256 int already_popped
= 0;
257 HOST_WIDE_INT n_popped
= RETURN_POPS_ARGS (fndecl
, funtype
, stack_size
);
258 #if defined (HAVE_call) && defined (HAVE_call_value)
259 rtx struct_value_size_rtx
;
260 struct_value_size_rtx
= GEN_INT (struct_value_size
);
263 #ifdef CALL_POPS_ARGS
264 n_popped
+= CALL_POPS_ARGS (* args_so_far
);
267 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
268 and we don't want to load it into a register as an optimization,
269 because prepare_call_address already did it if it should be done. */
270 if (GET_CODE (funexp
) != SYMBOL_REF
)
271 funexp
= memory_address (FUNCTION_MODE
, funexp
);
273 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
274 if ((ecf_flags
& ECF_SIBCALL
)
275 && HAVE_sibcall_pop
&& HAVE_sibcall_value_pop
276 && (n_popped
> 0 || stack_size
== 0))
278 rtx n_pop
= GEN_INT (n_popped
);
281 /* If this subroutine pops its own args, record that in the call insn
282 if possible, for the sake of frame pointer elimination. */
285 pat
= GEN_SIBCALL_VALUE_POP (valreg
,
286 gen_rtx_MEM (FUNCTION_MODE
, funexp
),
287 rounded_stack_size_rtx
, next_arg_reg
,
290 pat
= GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE
, funexp
),
291 rounded_stack_size_rtx
, next_arg_reg
, n_pop
);
293 emit_call_insn (pat
);
299 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
300 /* If the target has "call" or "call_value" insns, then prefer them
301 if no arguments are actually popped. If the target does not have
302 "call" or "call_value" insns, then we must use the popping versions
303 even if the call has no arguments to pop. */
304 #if defined (HAVE_call) && defined (HAVE_call_value)
305 if (HAVE_call
&& HAVE_call_value
&& HAVE_call_pop
&& HAVE_call_value_pop
308 if (HAVE_call_pop
&& HAVE_call_value_pop
)
311 rtx n_pop
= GEN_INT (n_popped
);
314 /* If this subroutine pops its own args, record that in the call insn
315 if possible, for the sake of frame pointer elimination. */
318 pat
= GEN_CALL_VALUE_POP (valreg
,
319 gen_rtx_MEM (FUNCTION_MODE
, funexp
),
320 rounded_stack_size_rtx
, next_arg_reg
, n_pop
);
322 pat
= GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE
, funexp
),
323 rounded_stack_size_rtx
, next_arg_reg
, n_pop
);
325 emit_call_insn (pat
);
331 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
332 if ((ecf_flags
& ECF_SIBCALL
)
333 && HAVE_sibcall
&& HAVE_sibcall_value
)
336 emit_call_insn (GEN_SIBCALL_VALUE (valreg
,
337 gen_rtx_MEM (FUNCTION_MODE
, funexp
),
338 rounded_stack_size_rtx
,
339 next_arg_reg
, NULL_RTX
));
341 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE
, funexp
),
342 rounded_stack_size_rtx
, next_arg_reg
,
343 struct_value_size_rtx
));
348 #if defined (HAVE_call) && defined (HAVE_call_value)
349 if (HAVE_call
&& HAVE_call_value
)
352 emit_call_insn (GEN_CALL_VALUE (valreg
,
353 gen_rtx_MEM (FUNCTION_MODE
, funexp
),
354 rounded_stack_size_rtx
, next_arg_reg
,
357 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE
, funexp
),
358 rounded_stack_size_rtx
, next_arg_reg
,
359 struct_value_size_rtx
));
365 /* Find the call we just emitted. */
366 call_insn
= last_call_insn ();
368 /* Put the register usage information there. */
369 add_function_usage_to (call_insn
, call_fusage
);
371 /* If this is a const call, then set the insn's unchanging bit. */
372 if (ecf_flags
& ECF_CONST
)
373 RTL_CONST_CALL_P (call_insn
) = 1;
375 /* If this is a pure call, then set the insn's unchanging bit. */
376 if (ecf_flags
& ECF_PURE
)
377 RTL_PURE_CALL_P (call_insn
) = 1;
379 /* If this is a const call, then set the insn's unchanging bit. */
380 if (ecf_flags
& ECF_LOOPING_CONST_OR_PURE
)
381 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn
) = 1;
383 /* Create a nothrow REG_EH_REGION note, if needed. */
384 make_reg_eh_region_note (call_insn
, ecf_flags
, 0);
386 if (ecf_flags
& ECF_NORETURN
)
387 add_reg_note (call_insn
, REG_NORETURN
, const0_rtx
);
389 if (ecf_flags
& ECF_RETURNS_TWICE
)
391 add_reg_note (call_insn
, REG_SETJMP
, const0_rtx
);
392 cfun
->calls_setjmp
= 1;
395 SIBLING_CALL_P (call_insn
) = ((ecf_flags
& ECF_SIBCALL
) != 0);
397 /* Restore this now, so that we do defer pops for this call's args
398 if the context of the call as a whole permits. */
399 inhibit_defer_pop
= old_inhibit_defer_pop
;
404 CALL_INSN_FUNCTION_USAGE (call_insn
)
405 = gen_rtx_EXPR_LIST (VOIDmode
,
406 gen_rtx_CLOBBER (VOIDmode
, stack_pointer_rtx
),
407 CALL_INSN_FUNCTION_USAGE (call_insn
));
408 rounded_stack_size
-= n_popped
;
409 rounded_stack_size_rtx
= GEN_INT (rounded_stack_size
);
410 stack_pointer_delta
-= n_popped
;
412 /* If popup is needed, stack realign must use DRAP */
413 if (SUPPORTS_STACK_ALIGNMENT
)
414 crtl
->need_drap
= true;
417 if (!ACCUMULATE_OUTGOING_ARGS
)
419 /* If returning from the subroutine does not automatically pop the args,
420 we need an instruction to pop them sooner or later.
421 Perhaps do it now; perhaps just record how much space to pop later.
423 If returning from the subroutine does pop the args, indicate that the
424 stack pointer will be changed. */
426 if (rounded_stack_size
!= 0)
428 if (ecf_flags
& ECF_NORETURN
)
429 /* Just pretend we did the pop. */
430 stack_pointer_delta
-= rounded_stack_size
;
431 else if (flag_defer_pop
&& inhibit_defer_pop
== 0
432 && ! (ecf_flags
& (ECF_CONST
| ECF_PURE
)))
433 pending_stack_adjust
+= rounded_stack_size
;
435 adjust_stack (rounded_stack_size_rtx
);
438 /* When we accumulate outgoing args, we must avoid any stack manipulations.
439 Restore the stack pointer to its original value now. Usually
440 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
441 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
442 popping variants of functions exist as well.
444 ??? We may optimize similar to defer_pop above, but it is
445 probably not worthwhile.
447 ??? It will be worthwhile to enable combine_stack_adjustments even for
450 anti_adjust_stack (GEN_INT (n_popped
));
453 /* Determine if the function identified by NAME and FNDECL is one with
454 special properties we wish to know about.
456 For example, if the function might return more than one time (setjmp), then
457 set RETURNS_TWICE to a nonzero value.
459 Similarly set NORETURN if the function is in the longjmp family.
461 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
462 space from the stack such as alloca. */
465 special_function_p (const_tree fndecl
, int flags
)
467 if (fndecl
&& DECL_NAME (fndecl
)
468 && IDENTIFIER_LENGTH (DECL_NAME (fndecl
)) <= 17
469 /* Exclude functions not at the file scope, or not `extern',
470 since they are not the magic functions we would otherwise
472 FIXME: this should be handled with attributes, not with this
473 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
474 because you can declare fork() inside a function if you
476 && (DECL_CONTEXT (fndecl
) == NULL_TREE
477 || TREE_CODE (DECL_CONTEXT (fndecl
)) == TRANSLATION_UNIT_DECL
)
478 && TREE_PUBLIC (fndecl
))
480 const char *name
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
481 const char *tname
= name
;
483 /* We assume that alloca will always be called by name. It
484 makes no sense to pass it as a pointer-to-function to
485 anything that does not understand its behavior. */
486 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl
)) == 6
488 && ! strcmp (name
, "alloca"))
489 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl
)) == 16
491 && ! strcmp (name
, "__builtin_alloca"))))
492 flags
|= ECF_MAY_BE_ALLOCA
;
494 /* Disregard prefix _, __, __x or __builtin_. */
499 && !strncmp (name
+ 3, "uiltin_", 7))
501 else if (name
[1] == '_' && name
[2] == 'x')
503 else if (name
[1] == '_')
512 && (! strcmp (tname
, "setjmp")
513 || ! strcmp (tname
, "setjmp_syscall")))
515 && ! strcmp (tname
, "sigsetjmp"))
517 && ! strcmp (tname
, "savectx")))
518 flags
|= ECF_RETURNS_TWICE
;
521 && ! strcmp (tname
, "siglongjmp"))
522 flags
|= ECF_NORETURN
;
524 else if ((tname
[0] == 'q' && tname
[1] == 's'
525 && ! strcmp (tname
, "qsetjmp"))
526 || (tname
[0] == 'v' && tname
[1] == 'f'
527 && ! strcmp (tname
, "vfork"))
528 || (tname
[0] == 'g' && tname
[1] == 'e'
529 && !strcmp (tname
, "getcontext")))
530 flags
|= ECF_RETURNS_TWICE
;
532 else if (tname
[0] == 'l' && tname
[1] == 'o'
533 && ! strcmp (tname
, "longjmp"))
534 flags
|= ECF_NORETURN
;
540 /* Return nonzero when FNDECL represents a call to setjmp. */
543 setjmp_call_p (const_tree fndecl
)
545 return special_function_p (fndecl
, 0) & ECF_RETURNS_TWICE
;
549 /* Return true if STMT is an alloca call. */
552 gimple_alloca_call_p (const_gimple stmt
)
556 if (!is_gimple_call (stmt
))
559 fndecl
= gimple_call_fndecl (stmt
);
560 if (fndecl
&& (special_function_p (fndecl
, 0) & ECF_MAY_BE_ALLOCA
))
566 /* Return true when exp contains alloca call. */
569 alloca_call_p (const_tree exp
)
571 if (TREE_CODE (exp
) == CALL_EXPR
572 && TREE_CODE (CALL_EXPR_FN (exp
)) == ADDR_EXPR
573 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)) == FUNCTION_DECL
)
574 && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp
), 0), 0)
575 & ECF_MAY_BE_ALLOCA
))
580 /* Detect flags (function attributes) from the function decl or type node. */
583 flags_from_decl_or_type (const_tree exp
)
586 const_tree type
= exp
;
590 type
= TREE_TYPE (exp
);
592 /* The function exp may have the `malloc' attribute. */
593 if (DECL_IS_MALLOC (exp
))
596 /* The function exp may have the `returns_twice' attribute. */
597 if (DECL_IS_RETURNS_TWICE (exp
))
598 flags
|= ECF_RETURNS_TWICE
;
600 /* Process the pure and const attributes. */
601 if (TREE_READONLY (exp
) && ! TREE_THIS_VOLATILE (exp
))
603 if (DECL_PURE_P (exp
))
605 if (DECL_LOOPING_CONST_OR_PURE_P (exp
))
606 flags
|= ECF_LOOPING_CONST_OR_PURE
;
608 if (DECL_IS_NOVOPS (exp
))
611 if (TREE_NOTHROW (exp
))
612 flags
|= ECF_NOTHROW
;
614 flags
= special_function_p (exp
, flags
);
616 else if (TYPE_P (exp
) && TYPE_READONLY (exp
) && ! TREE_THIS_VOLATILE (exp
))
619 if (TREE_THIS_VOLATILE (exp
))
620 flags
|= ECF_NORETURN
;
625 /* Detect flags from a CALL_EXPR. */
628 call_expr_flags (const_tree t
)
631 tree decl
= get_callee_fndecl (t
);
634 flags
= flags_from_decl_or_type (decl
);
637 t
= TREE_TYPE (CALL_EXPR_FN (t
));
638 if (t
&& TREE_CODE (t
) == POINTER_TYPE
)
639 flags
= flags_from_decl_or_type (TREE_TYPE (t
));
647 /* Precompute all register parameters as described by ARGS, storing values
648 into fields within the ARGS array.
650 NUM_ACTUALS indicates the total number elements in the ARGS array.
652 Set REG_PARM_SEEN if we encounter a register parameter. */
655 precompute_register_parameters (int num_actuals
, struct arg_data
*args
,
662 for (i
= 0; i
< num_actuals
; i
++)
663 if (args
[i
].reg
!= 0 && ! args
[i
].pass_on_stack
)
667 if (args
[i
].value
== 0)
670 args
[i
].value
= expand_normal (args
[i
].tree_value
);
671 preserve_temp_slots (args
[i
].value
);
675 /* If the value is a non-legitimate constant, force it into a
676 pseudo now. TLS symbols sometimes need a call to resolve. */
677 if (CONSTANT_P (args
[i
].value
)
678 && !LEGITIMATE_CONSTANT_P (args
[i
].value
))
679 args
[i
].value
= force_reg (args
[i
].mode
, args
[i
].value
);
681 /* If we are to promote the function arg to a wider mode,
684 if (args
[i
].mode
!= TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)))
686 = convert_modes (args
[i
].mode
,
687 TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)),
688 args
[i
].value
, args
[i
].unsignedp
);
690 /* If we're going to have to load the value by parts, pull the
691 parts into pseudos. The part extraction process can involve
692 non-trivial computation. */
693 if (GET_CODE (args
[i
].reg
) == PARALLEL
)
695 tree type
= TREE_TYPE (args
[i
].tree_value
);
696 args
[i
].parallel_value
697 = emit_group_load_into_temps (args
[i
].reg
, args
[i
].value
,
698 type
, int_size_in_bytes (type
));
701 /* If the value is expensive, and we are inside an appropriately
702 short loop, put the value into a pseudo and then put the pseudo
705 For small register classes, also do this if this call uses
706 register parameters. This is to avoid reload conflicts while
707 loading the parameters registers. */
709 else if ((! (REG_P (args
[i
].value
)
710 || (GET_CODE (args
[i
].value
) == SUBREG
711 && REG_P (SUBREG_REG (args
[i
].value
)))))
712 && args
[i
].mode
!= BLKmode
713 && rtx_cost (args
[i
].value
, SET
, optimize_insn_for_speed_p ())
715 && ((SMALL_REGISTER_CLASSES
&& *reg_parm_seen
)
717 args
[i
].value
= copy_to_mode_reg (args
[i
].mode
, args
[i
].value
);
721 #ifdef REG_PARM_STACK_SPACE
723 /* The argument list is the property of the called routine and it
724 may clobber it. If the fixed area has been used for previous
725 parameters, we must save and restore it. */
728 save_fixed_argument_area (int reg_parm_stack_space
, rtx argblock
, int *low_to_save
, int *high_to_save
)
733 /* Compute the boundary of the area that needs to be saved, if any. */
734 high
= reg_parm_stack_space
;
735 #ifdef ARGS_GROW_DOWNWARD
738 if (high
> highest_outgoing_arg_in_use
)
739 high
= highest_outgoing_arg_in_use
;
741 for (low
= 0; low
< high
; low
++)
742 if (stack_usage_map
[low
] != 0)
745 enum machine_mode save_mode
;
750 while (stack_usage_map
[--high
] == 0)
754 *high_to_save
= high
;
756 num_to_save
= high
- low
+ 1;
757 save_mode
= mode_for_size (num_to_save
* BITS_PER_UNIT
, MODE_INT
, 1);
759 /* If we don't have the required alignment, must do this
761 if ((low
& (MIN (GET_MODE_SIZE (save_mode
),
762 BIGGEST_ALIGNMENT
/ UNITS_PER_WORD
) - 1)))
765 #ifdef ARGS_GROW_DOWNWARD
770 stack_area
= gen_rtx_MEM (save_mode
,
771 memory_address (save_mode
,
772 plus_constant (argblock
,
775 set_mem_align (stack_area
, PARM_BOUNDARY
);
776 if (save_mode
== BLKmode
)
778 save_area
= assign_stack_temp (BLKmode
, num_to_save
, 0);
779 emit_block_move (validize_mem (save_area
), stack_area
,
780 GEN_INT (num_to_save
), BLOCK_OP_CALL_PARM
);
784 save_area
= gen_reg_rtx (save_mode
);
785 emit_move_insn (save_area
, stack_area
);
795 restore_fixed_argument_area (rtx save_area
, rtx argblock
, int high_to_save
, int low_to_save
)
797 enum machine_mode save_mode
= GET_MODE (save_area
);
801 #ifdef ARGS_GROW_DOWNWARD
802 delta
= -high_to_save
;
806 stack_area
= gen_rtx_MEM (save_mode
,
807 memory_address (save_mode
,
808 plus_constant (argblock
, delta
)));
809 set_mem_align (stack_area
, PARM_BOUNDARY
);
811 if (save_mode
!= BLKmode
)
812 emit_move_insn (stack_area
, save_area
);
814 emit_block_move (stack_area
, validize_mem (save_area
),
815 GEN_INT (high_to_save
- low_to_save
+ 1),
818 #endif /* REG_PARM_STACK_SPACE */
820 /* If any elements in ARGS refer to parameters that are to be passed in
821 registers, but not in memory, and whose alignment does not permit a
822 direct copy into registers. Copy the values into a group of pseudos
823 which we will later copy into the appropriate hard registers.
825 Pseudos for each unaligned argument will be stored into the array
826 args[argnum].aligned_regs. The caller is responsible for deallocating
827 the aligned_regs array if it is nonzero. */
830 store_unaligned_arguments_into_pseudos (struct arg_data
*args
, int num_actuals
)
834 for (i
= 0; i
< num_actuals
; i
++)
835 if (args
[i
].reg
!= 0 && ! args
[i
].pass_on_stack
836 && args
[i
].mode
== BLKmode
837 && MEM_P (args
[i
].value
)
838 && (MEM_ALIGN (args
[i
].value
)
839 < (unsigned int) MIN (BIGGEST_ALIGNMENT
, BITS_PER_WORD
)))
841 int bytes
= int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
));
842 int endian_correction
= 0;
846 gcc_assert (args
[i
].partial
% UNITS_PER_WORD
== 0);
847 args
[i
].n_aligned_regs
= args
[i
].partial
/ UNITS_PER_WORD
;
851 args
[i
].n_aligned_regs
852 = (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
855 args
[i
].aligned_regs
= XNEWVEC (rtx
, args
[i
].n_aligned_regs
);
857 /* Structures smaller than a word are normally aligned to the
858 least significant byte. On a BYTES_BIG_ENDIAN machine,
859 this means we must skip the empty high order bytes when
860 calculating the bit offset. */
861 if (bytes
< UNITS_PER_WORD
862 #ifdef BLOCK_REG_PADDING
863 && (BLOCK_REG_PADDING (args
[i
].mode
,
864 TREE_TYPE (args
[i
].tree_value
), 1)
870 endian_correction
= BITS_PER_WORD
- bytes
* BITS_PER_UNIT
;
872 for (j
= 0; j
< args
[i
].n_aligned_regs
; j
++)
874 rtx reg
= gen_reg_rtx (word_mode
);
875 rtx word
= operand_subword_force (args
[i
].value
, j
, BLKmode
);
876 int bitsize
= MIN (bytes
* BITS_PER_UNIT
, BITS_PER_WORD
);
878 args
[i
].aligned_regs
[j
] = reg
;
879 word
= extract_bit_field (word
, bitsize
, 0, 1, NULL_RTX
,
880 word_mode
, word_mode
);
882 /* There is no need to restrict this code to loading items
883 in TYPE_ALIGN sized hunks. The bitfield instructions can
884 load up entire word sized registers efficiently.
886 ??? This may not be needed anymore.
887 We use to emit a clobber here but that doesn't let later
888 passes optimize the instructions we emit. By storing 0 into
889 the register later passes know the first AND to zero out the
890 bitfield being set in the register is unnecessary. The store
891 of 0 will be deleted as will at least the first AND. */
893 emit_move_insn (reg
, const0_rtx
);
895 bytes
-= bitsize
/ BITS_PER_UNIT
;
896 store_bit_field (reg
, bitsize
, endian_correction
, word_mode
,
902 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
905 NUM_ACTUALS is the total number of parameters.
907 N_NAMED_ARGS is the total number of named arguments.
909 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
912 FNDECL is the tree code for the target of this call (if known)
914 ARGS_SO_FAR holds state needed by the target to know where to place
917 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
918 for arguments which are passed in registers.
920 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
921 and may be modified by this routine.
923 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
924 flags which may may be modified by this routine.
926 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
927 that requires allocation of stack space.
929 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
930 the thunked-to function. */
933 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED
,
934 struct arg_data
*args
,
935 struct args_size
*args_size
,
936 int n_named_args ATTRIBUTE_UNUSED
,
937 tree exp
, tree struct_value_addr_value
,
938 tree fndecl
, tree fntype
,
939 CUMULATIVE_ARGS
*args_so_far
,
940 int reg_parm_stack_space
,
941 rtx
*old_stack_level
, int *old_pending_adj
,
942 int *must_preallocate
, int *ecf_flags
,
943 bool *may_tailcall
, bool call_from_thunk_p
)
945 location_t loc
= EXPR_LOCATION (exp
);
946 /* 1 if scanning parms front to back, -1 if scanning back to front. */
949 /* Count arg position in order args appear. */
954 args_size
->constant
= 0;
957 /* In this loop, we consider args in the order they are written.
958 We fill up ARGS from the front or from the back if necessary
959 so that in any case the first arg to be pushed ends up at the front. */
961 if (PUSH_ARGS_REVERSED
)
963 i
= num_actuals
- 1, inc
= -1;
964 /* In this case, must reverse order of args
965 so that we compute and push the last arg first. */
972 /* First fill in the actual arguments in the ARGS array, splitting
973 complex arguments if necessary. */
976 call_expr_arg_iterator iter
;
979 if (struct_value_addr_value
)
981 args
[j
].tree_value
= struct_value_addr_value
;
984 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
986 tree argtype
= TREE_TYPE (arg
);
987 if (targetm
.calls
.split_complex_arg
989 && TREE_CODE (argtype
) == COMPLEX_TYPE
990 && targetm
.calls
.split_complex_arg (argtype
))
992 tree subtype
= TREE_TYPE (argtype
);
993 args
[j
].tree_value
= build1 (REALPART_EXPR
, subtype
, arg
);
995 args
[j
].tree_value
= build1 (IMAGPART_EXPR
, subtype
, arg
);
998 args
[j
].tree_value
= arg
;
1003 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1004 for (argpos
= 0; argpos
< num_actuals
; i
+= inc
, argpos
++)
1006 tree type
= TREE_TYPE (args
[i
].tree_value
);
1008 enum machine_mode mode
;
1010 /* Replace erroneous argument with constant zero. */
1011 if (type
== error_mark_node
|| !COMPLETE_TYPE_P (type
))
1012 args
[i
].tree_value
= integer_zero_node
, type
= integer_type_node
;
1014 /* If TYPE is a transparent union, pass things the way we would
1015 pass the first field of the union. We have already verified that
1016 the modes are the same. */
1017 if (TREE_CODE (type
) == UNION_TYPE
&& TYPE_TRANSPARENT_UNION (type
))
1018 type
= TREE_TYPE (TYPE_FIELDS (type
));
1020 /* Decide where to pass this arg.
1022 args[i].reg is nonzero if all or part is passed in registers.
1024 args[i].partial is nonzero if part but not all is passed in registers,
1025 and the exact value says how many bytes are passed in registers.
1027 args[i].pass_on_stack is nonzero if the argument must at least be
1028 computed on the stack. It may then be loaded back into registers
1029 if args[i].reg is nonzero.
1031 These decisions are driven by the FUNCTION_... macros and must agree
1032 with those made by function.c. */
1034 /* See if this argument should be passed by invisible reference. */
1035 if (pass_by_reference (args_so_far
, TYPE_MODE (type
),
1036 type
, argpos
< n_named_args
))
1042 = reference_callee_copied (args_so_far
, TYPE_MODE (type
),
1043 type
, argpos
< n_named_args
);
1045 /* If we're compiling a thunk, pass through invisible references
1046 instead of making a copy. */
1047 if (call_from_thunk_p
1049 && !TREE_ADDRESSABLE (type
)
1050 && (base
= get_base_address (args
[i
].tree_value
))
1051 && TREE_CODE (base
) != SSA_NAME
1052 && (!DECL_P (base
) || MEM_P (DECL_RTL (base
)))))
1054 /* We can't use sibcalls if a callee-copied argument is
1055 stored in the current function's frame. */
1056 if (!call_from_thunk_p
&& DECL_P (base
) && !TREE_STATIC (base
))
1057 *may_tailcall
= false;
1059 args
[i
].tree_value
= build_fold_addr_expr_loc (loc
,
1060 args
[i
].tree_value
);
1061 type
= TREE_TYPE (args
[i
].tree_value
);
1063 if (*ecf_flags
& ECF_CONST
)
1064 *ecf_flags
&= ~(ECF_CONST
| ECF_LOOPING_CONST_OR_PURE
);
1068 /* We make a copy of the object and pass the address to the
1069 function being called. */
1072 if (!COMPLETE_TYPE_P (type
)
1073 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
1074 || (flag_stack_check
== GENERIC_STACK_CHECK
1075 && compare_tree_int (TYPE_SIZE_UNIT (type
),
1076 STACK_CHECK_MAX_VAR_SIZE
) > 0))
1078 /* This is a variable-sized object. Make space on the stack
1080 rtx size_rtx
= expr_size (args
[i
].tree_value
);
1082 if (*old_stack_level
== 0)
1084 emit_stack_save (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
1085 *old_pending_adj
= pending_stack_adjust
;
1086 pending_stack_adjust
= 0;
1089 copy
= gen_rtx_MEM (BLKmode
,
1090 allocate_dynamic_stack_space
1091 (size_rtx
, NULL_RTX
, TYPE_ALIGN (type
)));
1092 set_mem_attributes (copy
, type
, 1);
1095 copy
= assign_temp (type
, 0, 1, 0);
1097 store_expr (args
[i
].tree_value
, copy
, 0, false);
1099 /* Just change the const function to pure and then let
1100 the next test clear the pure based on
1102 if (*ecf_flags
& ECF_CONST
)
1104 *ecf_flags
&= ~ECF_CONST
;
1105 *ecf_flags
|= ECF_PURE
;
1108 if (!callee_copies
&& *ecf_flags
& ECF_PURE
)
1109 *ecf_flags
&= ~(ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
);
1112 = build_fold_addr_expr_loc (loc
, make_tree (type
, copy
));
1113 type
= TREE_TYPE (args
[i
].tree_value
);
1114 *may_tailcall
= false;
1118 unsignedp
= TYPE_UNSIGNED (type
);
1119 mode
= promote_function_mode (type
, TYPE_MODE (type
), &unsignedp
,
1120 fndecl
? TREE_TYPE (fndecl
) : fntype
, 0);
1122 args
[i
].unsignedp
= unsignedp
;
1123 args
[i
].mode
= mode
;
1125 args
[i
].reg
= FUNCTION_ARG (*args_so_far
, mode
, type
,
1126 argpos
< n_named_args
);
1127 #ifdef FUNCTION_INCOMING_ARG
1128 /* If this is a sibling call and the machine has register windows, the
1129 register window has to be unwinded before calling the routine, so
1130 arguments have to go into the incoming registers. */
1131 args
[i
].tail_call_reg
= FUNCTION_INCOMING_ARG (*args_so_far
, mode
, type
,
1132 argpos
< n_named_args
);
1134 args
[i
].tail_call_reg
= args
[i
].reg
;
1139 = targetm
.calls
.arg_partial_bytes (args_so_far
, mode
, type
,
1140 argpos
< n_named_args
);
1142 args
[i
].pass_on_stack
= targetm
.calls
.must_pass_in_stack (mode
, type
);
1144 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1145 it means that we are to pass this arg in the register(s) designated
1146 by the PARALLEL, but also to pass it in the stack. */
1147 if (args
[i
].reg
&& GET_CODE (args
[i
].reg
) == PARALLEL
1148 && XEXP (XVECEXP (args
[i
].reg
, 0, 0), 0) == 0)
1149 args
[i
].pass_on_stack
= 1;
1151 /* If this is an addressable type, we must preallocate the stack
1152 since we must evaluate the object into its final location.
1154 If this is to be passed in both registers and the stack, it is simpler
1156 if (TREE_ADDRESSABLE (type
)
1157 || (args
[i
].pass_on_stack
&& args
[i
].reg
!= 0))
1158 *must_preallocate
= 1;
1160 /* Compute the stack-size of this argument. */
1161 if (args
[i
].reg
== 0 || args
[i
].partial
!= 0
1162 || reg_parm_stack_space
> 0
1163 || args
[i
].pass_on_stack
)
1164 locate_and_pad_parm (mode
, type
,
1165 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1170 args
[i
].pass_on_stack
? 0 : args
[i
].partial
,
1171 fndecl
, args_size
, &args
[i
].locate
);
1172 #ifdef BLOCK_REG_PADDING
1174 /* The argument is passed entirely in registers. See at which
1175 end it should be padded. */
1176 args
[i
].locate
.where_pad
=
1177 BLOCK_REG_PADDING (mode
, type
,
1178 int_size_in_bytes (type
) <= UNITS_PER_WORD
);
1181 /* Update ARGS_SIZE, the total stack space for args so far. */
1183 args_size
->constant
+= args
[i
].locate
.size
.constant
;
1184 if (args
[i
].locate
.size
.var
)
1185 ADD_PARM_SIZE (*args_size
, args
[i
].locate
.size
.var
);
1187 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1188 have been used, etc. */
1190 FUNCTION_ARG_ADVANCE (*args_so_far
, TYPE_MODE (type
), type
,
1191 argpos
< n_named_args
);
1195 /* Update ARGS_SIZE to contain the total size for the argument block.
1196 Return the original constant component of the argument block's size.
1198 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1199 for arguments passed in registers. */
1202 compute_argument_block_size (int reg_parm_stack_space
,
1203 struct args_size
*args_size
,
1204 tree fndecl ATTRIBUTE_UNUSED
,
1205 tree fntype ATTRIBUTE_UNUSED
,
1206 int preferred_stack_boundary ATTRIBUTE_UNUSED
)
1208 int unadjusted_args_size
= args_size
->constant
;
1210 /* For accumulate outgoing args mode we don't need to align, since the frame
1211 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1212 backends from generating misaligned frame sizes. */
1213 if (ACCUMULATE_OUTGOING_ARGS
&& preferred_stack_boundary
> STACK_BOUNDARY
)
1214 preferred_stack_boundary
= STACK_BOUNDARY
;
1216 /* Compute the actual size of the argument block required. The variable
1217 and constant sizes must be combined, the size may have to be rounded,
1218 and there may be a minimum required size. */
1222 args_size
->var
= ARGS_SIZE_TREE (*args_size
);
1223 args_size
->constant
= 0;
1225 preferred_stack_boundary
/= BITS_PER_UNIT
;
1226 if (preferred_stack_boundary
> 1)
1228 /* We don't handle this case yet. To handle it correctly we have
1229 to add the delta, round and subtract the delta.
1230 Currently no machine description requires this support. */
1231 gcc_assert (!(stack_pointer_delta
& (preferred_stack_boundary
- 1)));
1232 args_size
->var
= round_up (args_size
->var
, preferred_stack_boundary
);
1235 if (reg_parm_stack_space
> 0)
1238 = size_binop (MAX_EXPR
, args_size
->var
,
1239 ssize_int (reg_parm_stack_space
));
1241 /* The area corresponding to register parameters is not to count in
1242 the size of the block we need. So make the adjustment. */
1243 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl
? fntype
: TREE_TYPE (fndecl
))))
1245 = size_binop (MINUS_EXPR
, args_size
->var
,
1246 ssize_int (reg_parm_stack_space
));
1251 preferred_stack_boundary
/= BITS_PER_UNIT
;
1252 if (preferred_stack_boundary
< 1)
1253 preferred_stack_boundary
= 1;
1254 args_size
->constant
= (((args_size
->constant
1255 + stack_pointer_delta
1256 + preferred_stack_boundary
- 1)
1257 / preferred_stack_boundary
1258 * preferred_stack_boundary
)
1259 - stack_pointer_delta
);
1261 args_size
->constant
= MAX (args_size
->constant
,
1262 reg_parm_stack_space
);
1264 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl
? fntype
: TREE_TYPE (fndecl
))))
1265 args_size
->constant
-= reg_parm_stack_space
;
1267 return unadjusted_args_size
;
1270 /* Precompute parameters as needed for a function call.
1272 FLAGS is mask of ECF_* constants.
1274 NUM_ACTUALS is the number of arguments.
1276 ARGS is an array containing information for each argument; this
1277 routine fills in the INITIAL_VALUE and VALUE fields for each
1278 precomputed argument. */
1281 precompute_arguments (int num_actuals
, struct arg_data
*args
)
1285 /* If this is a libcall, then precompute all arguments so that we do not
1286 get extraneous instructions emitted as part of the libcall sequence. */
1288 /* If we preallocated the stack space, and some arguments must be passed
1289 on the stack, then we must precompute any parameter which contains a
1290 function call which will store arguments on the stack.
1291 Otherwise, evaluating the parameter may clobber previous parameters
1292 which have already been stored into the stack. (we have code to avoid
1293 such case by saving the outgoing stack arguments, but it results in
1295 if (!ACCUMULATE_OUTGOING_ARGS
)
1298 for (i
= 0; i
< num_actuals
; i
++)
1301 enum machine_mode mode
;
1303 if (TREE_CODE (args
[i
].tree_value
) != CALL_EXPR
)
1306 /* If this is an addressable type, we cannot pre-evaluate it. */
1307 type
= TREE_TYPE (args
[i
].tree_value
);
1308 gcc_assert (!TREE_ADDRESSABLE (type
));
1310 args
[i
].initial_value
= args
[i
].value
1311 = expand_normal (args
[i
].tree_value
);
1313 mode
= TYPE_MODE (type
);
1314 if (mode
!= args
[i
].mode
)
1316 int unsignedp
= args
[i
].unsignedp
;
1318 = convert_modes (args
[i
].mode
, mode
,
1319 args
[i
].value
, args
[i
].unsignedp
);
1321 /* CSE will replace this only if it contains args[i].value
1322 pseudo, so convert it down to the declared mode using
1324 if (REG_P (args
[i
].value
)
1325 && GET_MODE_CLASS (args
[i
].mode
) == MODE_INT
1326 && promote_mode (type
, mode
, &unsignedp
) != args
[i
].mode
)
1328 args
[i
].initial_value
1329 = gen_lowpart_SUBREG (mode
, args
[i
].value
);
1330 SUBREG_PROMOTED_VAR_P (args
[i
].initial_value
) = 1;
1331 SUBREG_PROMOTED_UNSIGNED_SET (args
[i
].initial_value
,
1338 /* Given the current state of MUST_PREALLOCATE and information about
1339 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1340 compute and return the final value for MUST_PREALLOCATE. */
1343 finalize_must_preallocate (int must_preallocate
, int num_actuals
,
1344 struct arg_data
*args
, struct args_size
*args_size
)
1346 /* See if we have or want to preallocate stack space.
1348 If we would have to push a partially-in-regs parm
1349 before other stack parms, preallocate stack space instead.
1351 If the size of some parm is not a multiple of the required stack
1352 alignment, we must preallocate.
1354 If the total size of arguments that would otherwise create a copy in
1355 a temporary (such as a CALL) is more than half the total argument list
1356 size, preallocation is faster.
1358 Another reason to preallocate is if we have a machine (like the m88k)
1359 where stack alignment is required to be maintained between every
1360 pair of insns, not just when the call is made. However, we assume here
1361 that such machines either do not have push insns (and hence preallocation
1362 would occur anyway) or the problem is taken care of with
1365 if (! must_preallocate
)
1367 int partial_seen
= 0;
1368 int copy_to_evaluate_size
= 0;
1371 for (i
= 0; i
< num_actuals
&& ! must_preallocate
; i
++)
1373 if (args
[i
].partial
> 0 && ! args
[i
].pass_on_stack
)
1375 else if (partial_seen
&& args
[i
].reg
== 0)
1376 must_preallocate
= 1;
1378 if (TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)) == BLKmode
1379 && (TREE_CODE (args
[i
].tree_value
) == CALL_EXPR
1380 || TREE_CODE (args
[i
].tree_value
) == TARGET_EXPR
1381 || TREE_CODE (args
[i
].tree_value
) == COND_EXPR
1382 || TREE_ADDRESSABLE (TREE_TYPE (args
[i
].tree_value
))))
1383 copy_to_evaluate_size
1384 += int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
));
1387 if (copy_to_evaluate_size
* 2 >= args_size
->constant
1388 && args_size
->constant
> 0)
1389 must_preallocate
= 1;
1391 return must_preallocate
;
1394 /* If we preallocated stack space, compute the address of each argument
1395 and store it into the ARGS array.
1397 We need not ensure it is a valid memory address here; it will be
1398 validized when it is used.
1400 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1403 compute_argument_addresses (struct arg_data
*args
, rtx argblock
, int num_actuals
)
1407 rtx arg_reg
= argblock
;
1408 int i
, arg_offset
= 0;
1410 if (GET_CODE (argblock
) == PLUS
)
1411 arg_reg
= XEXP (argblock
, 0), arg_offset
= INTVAL (XEXP (argblock
, 1));
1413 for (i
= 0; i
< num_actuals
; i
++)
1415 rtx offset
= ARGS_SIZE_RTX (args
[i
].locate
.offset
);
1416 rtx slot_offset
= ARGS_SIZE_RTX (args
[i
].locate
.slot_offset
);
1418 unsigned int align
, boundary
;
1419 unsigned int units_on_stack
= 0;
1420 enum machine_mode partial_mode
= VOIDmode
;
1422 /* Skip this parm if it will not be passed on the stack. */
1423 if (! args
[i
].pass_on_stack
1425 && args
[i
].partial
== 0)
1428 if (CONST_INT_P (offset
))
1429 addr
= plus_constant (arg_reg
, INTVAL (offset
));
1431 addr
= gen_rtx_PLUS (Pmode
, arg_reg
, offset
);
1433 addr
= plus_constant (addr
, arg_offset
);
1435 if (args
[i
].partial
!= 0)
1437 /* Only part of the parameter is being passed on the stack.
1438 Generate a simple memory reference of the correct size. */
1439 units_on_stack
= args
[i
].locate
.size
.constant
;
1440 partial_mode
= mode_for_size (units_on_stack
* BITS_PER_UNIT
,
1442 args
[i
].stack
= gen_rtx_MEM (partial_mode
, addr
);
1443 set_mem_size (args
[i
].stack
, GEN_INT (units_on_stack
));
1447 args
[i
].stack
= gen_rtx_MEM (args
[i
].mode
, addr
);
1448 set_mem_attributes (args
[i
].stack
,
1449 TREE_TYPE (args
[i
].tree_value
), 1);
1451 align
= BITS_PER_UNIT
;
1452 boundary
= args
[i
].locate
.boundary
;
1453 if (args
[i
].locate
.where_pad
!= downward
)
1455 else if (CONST_INT_P (offset
))
1457 align
= INTVAL (offset
) * BITS_PER_UNIT
| boundary
;
1458 align
= align
& -align
;
1460 set_mem_align (args
[i
].stack
, align
);
1462 if (CONST_INT_P (slot_offset
))
1463 addr
= plus_constant (arg_reg
, INTVAL (slot_offset
));
1465 addr
= gen_rtx_PLUS (Pmode
, arg_reg
, slot_offset
);
1467 addr
= plus_constant (addr
, arg_offset
);
1469 if (args
[i
].partial
!= 0)
1471 /* Only part of the parameter is being passed on the stack.
1472 Generate a simple memory reference of the correct size.
1474 args
[i
].stack_slot
= gen_rtx_MEM (partial_mode
, addr
);
1475 set_mem_size (args
[i
].stack_slot
, GEN_INT (units_on_stack
));
1479 args
[i
].stack_slot
= gen_rtx_MEM (args
[i
].mode
, addr
);
1480 set_mem_attributes (args
[i
].stack_slot
,
1481 TREE_TYPE (args
[i
].tree_value
), 1);
1483 set_mem_align (args
[i
].stack_slot
, args
[i
].locate
.boundary
);
1485 /* Function incoming arguments may overlap with sibling call
1486 outgoing arguments and we cannot allow reordering of reads
1487 from function arguments with stores to outgoing arguments
1488 of sibling calls. */
1489 set_mem_alias_set (args
[i
].stack
, 0);
1490 set_mem_alias_set (args
[i
].stack_slot
, 0);
1495 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1496 in a call instruction.
1498 FNDECL is the tree node for the target function. For an indirect call
1499 FNDECL will be NULL_TREE.
1501 ADDR is the operand 0 of CALL_EXPR for this call. */
1504 rtx_for_function_call (tree fndecl
, tree addr
)
1508 /* Get the function to call, in the form of RTL. */
1511 /* If this is the first use of the function, see if we need to
1512 make an external definition for it. */
1513 if (!TREE_USED (fndecl
) && fndecl
!= current_function_decl
)
1515 assemble_external (fndecl
);
1516 TREE_USED (fndecl
) = 1;
1519 /* Get a SYMBOL_REF rtx for the function address. */
1520 funexp
= XEXP (DECL_RTL (fndecl
), 0);
1523 /* Generate an rtx (probably a pseudo-register) for the address. */
1526 funexp
= expand_normal (addr
);
1527 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1532 /* Return true if and only if SIZE storage units (usually bytes)
1533 starting from address ADDR overlap with already clobbered argument
1534 area. This function is used to determine if we should give up a
1538 mem_overlaps_already_clobbered_arg_p (rtx addr
, unsigned HOST_WIDE_INT size
)
1542 if (addr
== crtl
->args
.internal_arg_pointer
)
1544 else if (GET_CODE (addr
) == PLUS
1545 && XEXP (addr
, 0) == crtl
->args
.internal_arg_pointer
1546 && CONST_INT_P (XEXP (addr
, 1)))
1547 i
= INTVAL (XEXP (addr
, 1));
1548 /* Return true for arg pointer based indexed addressing. */
1549 else if (GET_CODE (addr
) == PLUS
1550 && (XEXP (addr
, 0) == crtl
->args
.internal_arg_pointer
1551 || XEXP (addr
, 1) == crtl
->args
.internal_arg_pointer
))
1556 #ifdef ARGS_GROW_DOWNWARD
1561 unsigned HOST_WIDE_INT k
;
1563 for (k
= 0; k
< size
; k
++)
1564 if (i
+ k
< stored_args_map
->n_bits
1565 && TEST_BIT (stored_args_map
, i
+ k
))
1572 /* Do the register loads required for any wholly-register parms or any
1573 parms which are passed both on the stack and in a register. Their
1574 expressions were already evaluated.
1576 Mark all register-parms as living through the call, putting these USE
1577 insns in the CALL_INSN_FUNCTION_USAGE field.
1579 When IS_SIBCALL, perform the check_sibcall_argument_overlap
1580 checking, setting *SIBCALL_FAILURE if appropriate. */
1583 load_register_parameters (struct arg_data
*args
, int num_actuals
,
1584 rtx
*call_fusage
, int flags
, int is_sibcall
,
1585 int *sibcall_failure
)
1589 for (i
= 0; i
< num_actuals
; i
++)
1591 rtx reg
= ((flags
& ECF_SIBCALL
)
1592 ? args
[i
].tail_call_reg
: args
[i
].reg
);
1595 int partial
= args
[i
].partial
;
1598 rtx before_arg
= get_last_insn ();
1599 /* Set non-negative if we must move a word at a time, even if
1600 just one word (e.g, partial == 4 && mode == DFmode). Set
1601 to -1 if we just use a normal move insn. This value can be
1602 zero if the argument is a zero size structure. */
1604 if (GET_CODE (reg
) == PARALLEL
)
1608 gcc_assert (partial
% UNITS_PER_WORD
== 0);
1609 nregs
= partial
/ UNITS_PER_WORD
;
1611 else if (TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)) == BLKmode
)
1613 size
= int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
));
1614 nregs
= (size
+ (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
1617 size
= GET_MODE_SIZE (args
[i
].mode
);
1619 /* Handle calls that pass values in multiple non-contiguous
1620 locations. The Irix 6 ABI has examples of this. */
1622 if (GET_CODE (reg
) == PARALLEL
)
1623 emit_group_move (reg
, args
[i
].parallel_value
);
1625 /* If simple case, just do move. If normal partial, store_one_arg
1626 has already loaded the register for us. In all other cases,
1627 load the register(s) from memory. */
1629 else if (nregs
== -1)
1631 emit_move_insn (reg
, args
[i
].value
);
1632 #ifdef BLOCK_REG_PADDING
1633 /* Handle case where we have a value that needs shifting
1634 up to the msb. eg. a QImode value and we're padding
1635 upward on a BYTES_BIG_ENDIAN machine. */
1636 if (size
< UNITS_PER_WORD
1637 && (args
[i
].locate
.where_pad
1638 == (BYTES_BIG_ENDIAN
? upward
: downward
)))
1641 int shift
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
1643 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1644 report the whole reg as used. Strictly speaking, the
1645 call only uses SIZE bytes at the msb end, but it doesn't
1646 seem worth generating rtl to say that. */
1647 reg
= gen_rtx_REG (word_mode
, REGNO (reg
));
1648 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
,
1649 build_int_cst (NULL_TREE
, shift
),
1652 emit_move_insn (reg
, x
);
1657 /* If we have pre-computed the values to put in the registers in
1658 the case of non-aligned structures, copy them in now. */
1660 else if (args
[i
].n_aligned_regs
!= 0)
1661 for (j
= 0; j
< args
[i
].n_aligned_regs
; j
++)
1662 emit_move_insn (gen_rtx_REG (word_mode
, REGNO (reg
) + j
),
1663 args
[i
].aligned_regs
[j
]);
1665 else if (partial
== 0 || args
[i
].pass_on_stack
)
1667 rtx mem
= validize_mem (args
[i
].value
);
1669 /* Check for overlap with already clobbered argument area. */
1671 && mem_overlaps_already_clobbered_arg_p (XEXP (args
[i
].value
, 0),
1673 *sibcall_failure
= 1;
1675 /* Handle a BLKmode that needs shifting. */
1676 if (nregs
== 1 && size
< UNITS_PER_WORD
1677 #ifdef BLOCK_REG_PADDING
1678 && args
[i
].locate
.where_pad
== downward
1684 rtx tem
= operand_subword_force (mem
, 0, args
[i
].mode
);
1685 rtx ri
= gen_rtx_REG (word_mode
, REGNO (reg
));
1686 rtx x
= gen_reg_rtx (word_mode
);
1687 int shift
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
1688 enum tree_code dir
= BYTES_BIG_ENDIAN
? RSHIFT_EXPR
1691 emit_move_insn (x
, tem
);
1692 x
= expand_shift (dir
, word_mode
, x
,
1693 build_int_cst (NULL_TREE
, shift
),
1696 emit_move_insn (ri
, x
);
1699 move_block_to_reg (REGNO (reg
), mem
, nregs
, args
[i
].mode
);
1702 /* When a parameter is a block, and perhaps in other cases, it is
1703 possible that it did a load from an argument slot that was
1704 already clobbered. */
1706 && check_sibcall_argument_overlap (before_arg
, &args
[i
], 0))
1707 *sibcall_failure
= 1;
1709 /* Handle calls that pass values in multiple non-contiguous
1710 locations. The Irix 6 ABI has examples of this. */
1711 if (GET_CODE (reg
) == PARALLEL
)
1712 use_group_regs (call_fusage
, reg
);
1713 else if (nregs
== -1)
1714 use_reg (call_fusage
, reg
);
1716 use_regs (call_fusage
, REGNO (reg
), nregs
);
1721 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1722 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1723 bytes, then we would need to push some additional bytes to pad the
1724 arguments. So, we compute an adjust to the stack pointer for an
1725 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1726 bytes. Then, when the arguments are pushed the stack will be perfectly
1727 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1728 be popped after the call. Returns the adjustment. */
1731 combine_pending_stack_adjustment_and_call (int unadjusted_args_size
,
1732 struct args_size
*args_size
,
1733 unsigned int preferred_unit_stack_boundary
)
1735 /* The number of bytes to pop so that the stack will be
1736 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1737 HOST_WIDE_INT adjustment
;
1738 /* The alignment of the stack after the arguments are pushed, if we
1739 just pushed the arguments without adjust the stack here. */
1740 unsigned HOST_WIDE_INT unadjusted_alignment
;
1742 unadjusted_alignment
1743 = ((stack_pointer_delta
+ unadjusted_args_size
)
1744 % preferred_unit_stack_boundary
);
1746 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1747 as possible -- leaving just enough left to cancel out the
1748 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1749 PENDING_STACK_ADJUST is non-negative, and congruent to
1750 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1752 /* Begin by trying to pop all the bytes. */
1753 unadjusted_alignment
1754 = (unadjusted_alignment
1755 - (pending_stack_adjust
% preferred_unit_stack_boundary
));
1756 adjustment
= pending_stack_adjust
;
1757 /* Push enough additional bytes that the stack will be aligned
1758 after the arguments are pushed. */
1759 if (preferred_unit_stack_boundary
> 1)
1761 if (unadjusted_alignment
> 0)
1762 adjustment
-= preferred_unit_stack_boundary
- unadjusted_alignment
;
1764 adjustment
+= unadjusted_alignment
;
1767 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1768 bytes after the call. The right number is the entire
1769 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1770 by the arguments in the first place. */
1772 = pending_stack_adjust
- adjustment
+ unadjusted_args_size
;
1777 /* Scan X expression if it does not dereference any argument slots
1778 we already clobbered by tail call arguments (as noted in stored_args_map
1780 Return nonzero if X expression dereferences such argument slots,
1784 check_sibcall_argument_overlap_1 (rtx x
)
1793 code
= GET_CODE (x
);
1796 return mem_overlaps_already_clobbered_arg_p (XEXP (x
, 0),
1797 GET_MODE_SIZE (GET_MODE (x
)));
1799 /* Scan all subexpressions. */
1800 fmt
= GET_RTX_FORMAT (code
);
1801 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
1805 if (check_sibcall_argument_overlap_1 (XEXP (x
, i
)))
1808 else if (*fmt
== 'E')
1810 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1811 if (check_sibcall_argument_overlap_1 (XVECEXP (x
, i
, j
)))
1818 /* Scan sequence after INSN if it does not dereference any argument slots
1819 we already clobbered by tail call arguments (as noted in stored_args_map
1820 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1821 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1822 should be 0). Return nonzero if sequence after INSN dereferences such argument
1823 slots, zero otherwise. */
1826 check_sibcall_argument_overlap (rtx insn
, struct arg_data
*arg
, int mark_stored_args_map
)
1830 if (insn
== NULL_RTX
)
1831 insn
= get_insns ();
1833 insn
= NEXT_INSN (insn
);
1835 for (; insn
; insn
= NEXT_INSN (insn
))
1837 && check_sibcall_argument_overlap_1 (PATTERN (insn
)))
1840 if (mark_stored_args_map
)
1842 #ifdef ARGS_GROW_DOWNWARD
1843 low
= -arg
->locate
.slot_offset
.constant
- arg
->locate
.size
.constant
;
1845 low
= arg
->locate
.slot_offset
.constant
;
1848 for (high
= low
+ arg
->locate
.size
.constant
; low
< high
; low
++)
1849 SET_BIT (stored_args_map
, low
);
1851 return insn
!= NULL_RTX
;
1854 /* Given that a function returns a value of mode MODE at the most
1855 significant end of hard register VALUE, shift VALUE left or right
1856 as specified by LEFT_P. Return true if some action was needed. */
1859 shift_return_value (enum machine_mode mode
, bool left_p
, rtx value
)
1861 HOST_WIDE_INT shift
;
1863 gcc_assert (REG_P (value
) && HARD_REGISTER_P (value
));
1864 shift
= GET_MODE_BITSIZE (GET_MODE (value
)) - GET_MODE_BITSIZE (mode
);
1868 /* Use ashr rather than lshr for right shifts. This is for the benefit
1869 of the MIPS port, which requires SImode values to be sign-extended
1870 when stored in 64-bit registers. */
1871 if (!force_expand_binop (GET_MODE (value
), left_p
? ashl_optab
: ashr_optab
,
1872 value
, GEN_INT (shift
), value
, 1, OPTAB_WIDEN
))
1877 /* If X is a likely-spilled register value, copy it to a pseudo
1878 register and return that register. Return X otherwise. */
1881 avoid_likely_spilled_reg (rtx x
)
1886 && HARD_REGISTER_P (x
)
1887 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (x
))))
1889 /* Make sure that we generate a REG rather than a CONCAT.
1890 Moves into CONCATs can need nontrivial instructions,
1891 and the whole point of this function is to avoid
1892 using the hard register directly in such a situation. */
1893 generating_concat_p
= 0;
1894 new_rtx
= gen_reg_rtx (GET_MODE (x
));
1895 generating_concat_p
= 1;
1896 emit_move_insn (new_rtx
, x
);
1902 /* Generate all the code for a CALL_EXPR exp
1903 and return an rtx for its value.
1904 Store the value in TARGET (specified as an rtx) if convenient.
1905 If the value is stored in TARGET then TARGET is returned.
1906 If IGNORE is nonzero, then we ignore the value of the function call. */
1909 expand_call (tree exp
, rtx target
, int ignore
)
1911 /* Nonzero if we are currently expanding a call. */
1912 static int currently_expanding_call
= 0;
1914 /* RTX for the function to be called. */
1916 /* Sequence of insns to perform a normal "call". */
1917 rtx normal_call_insns
= NULL_RTX
;
1918 /* Sequence of insns to perform a tail "call". */
1919 rtx tail_call_insns
= NULL_RTX
;
1920 /* Data type of the function. */
1922 tree type_arg_types
;
1924 /* Declaration of the function being called,
1925 or 0 if the function is computed (not known by name). */
1927 /* The type of the function being called. */
1929 bool try_tail_call
= CALL_EXPR_TAILCALL (exp
);
1932 /* Register in which non-BLKmode value will be returned,
1933 or 0 if no value or if value is BLKmode. */
1935 /* Address where we should return a BLKmode value;
1936 0 if value not BLKmode. */
1937 rtx structure_value_addr
= 0;
1938 /* Nonzero if that address is being passed by treating it as
1939 an extra, implicit first parameter. Otherwise,
1940 it is passed by being copied directly into struct_value_rtx. */
1941 int structure_value_addr_parm
= 0;
1942 /* Holds the value of implicit argument for the struct value. */
1943 tree structure_value_addr_value
= NULL_TREE
;
1944 /* Size of aggregate value wanted, or zero if none wanted
1945 or if we are using the non-reentrant PCC calling convention
1946 or expecting the value in registers. */
1947 HOST_WIDE_INT struct_value_size
= 0;
1948 /* Nonzero if called function returns an aggregate in memory PCC style,
1949 by returning the address of where to find it. */
1950 int pcc_struct_value
= 0;
1951 rtx struct_value
= 0;
1953 /* Number of actual parameters in this call, including struct value addr. */
1955 /* Number of named args. Args after this are anonymous ones
1956 and they must all go on the stack. */
1958 /* Number of complex actual arguments that need to be split. */
1959 int num_complex_actuals
= 0;
1961 /* Vector of information about each argument.
1962 Arguments are numbered in the order they will be pushed,
1963 not the order they are written. */
1964 struct arg_data
*args
;
1966 /* Total size in bytes of all the stack-parms scanned so far. */
1967 struct args_size args_size
;
1968 struct args_size adjusted_args_size
;
1969 /* Size of arguments before any adjustments (such as rounding). */
1970 int unadjusted_args_size
;
1971 /* Data on reg parms scanned so far. */
1972 CUMULATIVE_ARGS args_so_far
;
1973 /* Nonzero if a reg parm has been scanned. */
1975 /* Nonzero if this is an indirect function call. */
1977 /* Nonzero if we must avoid push-insns in the args for this call.
1978 If stack space is allocated for register parameters, but not by the
1979 caller, then it is preallocated in the fixed part of the stack frame.
1980 So the entire argument block must then be preallocated (i.e., we
1981 ignore PUSH_ROUNDING in that case). */
1983 int must_preallocate
= !PUSH_ARGS
;
1985 /* Size of the stack reserved for parameter registers. */
1986 int reg_parm_stack_space
= 0;
1988 /* Address of space preallocated for stack parms
1989 (on machines that lack push insns), or 0 if space not preallocated. */
1992 /* Mask of ECF_ flags. */
1994 #ifdef REG_PARM_STACK_SPACE
1995 /* Define the boundary of the register parm stack space that needs to be
1997 int low_to_save
, high_to_save
;
1998 rtx save_area
= 0; /* Place that it is saved */
2001 int initial_highest_arg_in_use
= highest_outgoing_arg_in_use
;
2002 char *initial_stack_usage_map
= stack_usage_map
;
2003 char *stack_usage_map_buf
= NULL
;
2005 int old_stack_allocated
;
2007 /* State variables to track stack modifications. */
2008 rtx old_stack_level
= 0;
2009 int old_stack_arg_under_construction
= 0;
2010 int old_pending_adj
= 0;
2011 int old_inhibit_defer_pop
= inhibit_defer_pop
;
2013 /* Some stack pointer alterations we make are performed via
2014 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2015 which we then also need to save/restore along the way. */
2016 int old_stack_pointer_delta
= 0;
2019 tree addr
= CALL_EXPR_FN (exp
);
2021 /* The alignment of the stack, in bits. */
2022 unsigned HOST_WIDE_INT preferred_stack_boundary
;
2023 /* The alignment of the stack, in bytes. */
2024 unsigned HOST_WIDE_INT preferred_unit_stack_boundary
;
2025 /* The static chain value to use for this call. */
2026 rtx static_chain_value
;
2027 /* See if this is "nothrow" function call. */
2028 if (TREE_NOTHROW (exp
))
2029 flags
|= ECF_NOTHROW
;
2031 /* See if we can find a DECL-node for the actual function, and get the
2032 function attributes (flags) from the function decl or type node. */
2033 fndecl
= get_callee_fndecl (exp
);
2036 fntype
= TREE_TYPE (fndecl
);
2037 flags
|= flags_from_decl_or_type (fndecl
);
2041 fntype
= TREE_TYPE (TREE_TYPE (addr
));
2042 flags
|= flags_from_decl_or_type (fntype
);
2044 rettype
= TREE_TYPE (exp
);
2046 struct_value
= targetm
.calls
.struct_value_rtx (fntype
, 0);
2048 /* Warn if this value is an aggregate type,
2049 regardless of which calling convention we are using for it. */
2050 if (AGGREGATE_TYPE_P (rettype
))
2051 warning (OPT_Waggregate_return
, "function call has aggregate value");
2053 /* If the result of a non looping pure or const function call is
2054 ignored (or void), and none of its arguments are volatile, we can
2055 avoid expanding the call and just evaluate the arguments for
2057 if ((flags
& (ECF_CONST
| ECF_PURE
))
2058 && (!(flags
& ECF_LOOPING_CONST_OR_PURE
))
2059 && (ignore
|| target
== const0_rtx
2060 || TYPE_MODE (rettype
) == VOIDmode
))
2062 bool volatilep
= false;
2064 call_expr_arg_iterator iter
;
2066 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
2067 if (TREE_THIS_VOLATILE (arg
))
2075 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
2076 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2081 #ifdef REG_PARM_STACK_SPACE
2082 reg_parm_stack_space
= REG_PARM_STACK_SPACE (!fndecl
? fntype
: fndecl
);
2085 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl
? fntype
: TREE_TYPE (fndecl
)))
2086 && reg_parm_stack_space
> 0 && PUSH_ARGS
)
2087 must_preallocate
= 1;
2089 /* Set up a place to return a structure. */
2091 /* Cater to broken compilers. */
2092 if (aggregate_value_p (exp
, (!fndecl
? fntype
: fndecl
)))
2094 /* This call returns a big structure. */
2095 flags
&= ~(ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
);
2097 #ifdef PCC_STATIC_STRUCT_RETURN
2099 pcc_struct_value
= 1;
2101 #else /* not PCC_STATIC_STRUCT_RETURN */
2103 struct_value_size
= int_size_in_bytes (rettype
);
2105 if (target
&& MEM_P (target
) && CALL_EXPR_RETURN_SLOT_OPT (exp
))
2106 structure_value_addr
= XEXP (target
, 0);
2109 /* For variable-sized objects, we must be called with a target
2110 specified. If we were to allocate space on the stack here,
2111 we would have no way of knowing when to free it. */
2112 rtx d
= assign_temp (rettype
, 0, 1, 1);
2114 mark_temp_addr_taken (d
);
2115 structure_value_addr
= XEXP (d
, 0);
2119 #endif /* not PCC_STATIC_STRUCT_RETURN */
2122 /* Figure out the amount to which the stack should be aligned. */
2123 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
2126 struct cgraph_rtl_info
*i
= cgraph_rtl_info (fndecl
);
2127 /* Without automatic stack alignment, we can't increase preferred
2128 stack boundary. With automatic stack alignment, it is
2129 unnecessary since unless we can guarantee that all callers will
2130 align the outgoing stack properly, callee has to align its
2133 && i
->preferred_incoming_stack_boundary
2134 && i
->preferred_incoming_stack_boundary
< preferred_stack_boundary
)
2135 preferred_stack_boundary
= i
->preferred_incoming_stack_boundary
;
2138 /* Operand 0 is a pointer-to-function; get the type of the function. */
2139 funtype
= TREE_TYPE (addr
);
2140 gcc_assert (POINTER_TYPE_P (funtype
));
2141 funtype
= TREE_TYPE (funtype
);
2143 /* Count whether there are actual complex arguments that need to be split
2144 into their real and imaginary parts. Munge the type_arg_types
2145 appropriately here as well. */
2146 if (targetm
.calls
.split_complex_arg
)
2148 call_expr_arg_iterator iter
;
2150 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
2152 tree type
= TREE_TYPE (arg
);
2153 if (type
&& TREE_CODE (type
) == COMPLEX_TYPE
2154 && targetm
.calls
.split_complex_arg (type
))
2155 num_complex_actuals
++;
2157 type_arg_types
= split_complex_types (TYPE_ARG_TYPES (funtype
));
2160 type_arg_types
= TYPE_ARG_TYPES (funtype
);
2162 if (flags
& ECF_MAY_BE_ALLOCA
)
2163 cfun
->calls_alloca
= 1;
2165 /* If struct_value_rtx is 0, it means pass the address
2166 as if it were an extra parameter. Put the argument expression
2167 in structure_value_addr_value. */
2168 if (structure_value_addr
&& struct_value
== 0)
2170 /* If structure_value_addr is a REG other than
2171 virtual_outgoing_args_rtx, we can use always use it. If it
2172 is not a REG, we must always copy it into a register.
2173 If it is virtual_outgoing_args_rtx, we must copy it to another
2174 register in some cases. */
2175 rtx temp
= (!REG_P (structure_value_addr
)
2176 || (ACCUMULATE_OUTGOING_ARGS
2177 && stack_arg_under_construction
2178 && structure_value_addr
== virtual_outgoing_args_rtx
)
2179 ? copy_addr_to_reg (convert_memory_address
2180 (Pmode
, structure_value_addr
))
2181 : structure_value_addr
);
2183 structure_value_addr_value
=
2184 make_tree (build_pointer_type (TREE_TYPE (funtype
)), temp
);
2185 structure_value_addr_parm
= 1;
2188 /* Count the arguments and set NUM_ACTUALS. */
2190 call_expr_nargs (exp
) + num_complex_actuals
+ structure_value_addr_parm
;
2192 /* Compute number of named args.
2193 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2195 if (type_arg_types
!= 0)
2197 = (list_length (type_arg_types
)
2198 /* Count the struct value address, if it is passed as a parm. */
2199 + structure_value_addr_parm
);
2201 /* If we know nothing, treat all args as named. */
2202 n_named_args
= num_actuals
;
2204 /* Start updating where the next arg would go.
2206 On some machines (such as the PA) indirect calls have a different
2207 calling convention than normal calls. The fourth argument in
2208 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2210 INIT_CUMULATIVE_ARGS (args_so_far
, funtype
, NULL_RTX
, fndecl
, n_named_args
);
2212 /* Now possibly adjust the number of named args.
2213 Normally, don't include the last named arg if anonymous args follow.
2214 We do include the last named arg if
2215 targetm.calls.strict_argument_naming() returns nonzero.
2216 (If no anonymous args follow, the result of list_length is actually
2217 one too large. This is harmless.)
2219 If targetm.calls.pretend_outgoing_varargs_named() returns
2220 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2221 this machine will be able to place unnamed args that were passed
2222 in registers into the stack. So treat all args as named. This
2223 allows the insns emitting for a specific argument list to be
2224 independent of the function declaration.
2226 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2227 we do not have any reliable way to pass unnamed args in
2228 registers, so we must force them into memory. */
2230 if (type_arg_types
!= 0
2231 && targetm
.calls
.strict_argument_naming (&args_so_far
))
2233 else if (type_arg_types
!= 0
2234 && ! targetm
.calls
.pretend_outgoing_varargs_named (&args_so_far
))
2235 /* Don't include the last named arg. */
2238 /* Treat all args as named. */
2239 n_named_args
= num_actuals
;
2241 /* Make a vector to hold all the information about each arg. */
2242 args
= XALLOCAVEC (struct arg_data
, num_actuals
);
2243 memset (args
, 0, num_actuals
* sizeof (struct arg_data
));
2245 /* Build up entries in the ARGS array, compute the size of the
2246 arguments into ARGS_SIZE, etc. */
2247 initialize_argument_information (num_actuals
, args
, &args_size
,
2249 structure_value_addr_value
, fndecl
, fntype
,
2250 &args_so_far
, reg_parm_stack_space
,
2251 &old_stack_level
, &old_pending_adj
,
2252 &must_preallocate
, &flags
,
2253 &try_tail_call
, CALL_FROM_THUNK_P (exp
));
2256 must_preallocate
= 1;
2258 /* Now make final decision about preallocating stack space. */
2259 must_preallocate
= finalize_must_preallocate (must_preallocate
,
2263 /* If the structure value address will reference the stack pointer, we
2264 must stabilize it. We don't need to do this if we know that we are
2265 not going to adjust the stack pointer in processing this call. */
2267 if (structure_value_addr
2268 && (reg_mentioned_p (virtual_stack_dynamic_rtx
, structure_value_addr
)
2269 || reg_mentioned_p (virtual_outgoing_args_rtx
,
2270 structure_value_addr
))
2272 || (!ACCUMULATE_OUTGOING_ARGS
&& args_size
.constant
)))
2273 structure_value_addr
= copy_to_reg (structure_value_addr
);
2275 /* Tail calls can make things harder to debug, and we've traditionally
2276 pushed these optimizations into -O2. Don't try if we're already
2277 expanding a call, as that means we're an argument. Don't try if
2278 there's cleanups, as we know there's code to follow the call. */
2280 if (currently_expanding_call
++ != 0
2281 || !flag_optimize_sibling_calls
2283 || dbg_cnt (tail_call
) == false)
2286 /* Rest of purposes for tail call optimizations to fail. */
2288 #ifdef HAVE_sibcall_epilogue
2289 !HAVE_sibcall_epilogue
2294 /* Doing sibling call optimization needs some work, since
2295 structure_value_addr can be allocated on the stack.
2296 It does not seem worth the effort since few optimizable
2297 sibling calls will return a structure. */
2298 || structure_value_addr
!= NULL_RTX
2299 #ifdef REG_PARM_STACK_SPACE
2300 /* If outgoing reg parm stack space changes, we can not do sibcall. */
2301 || (OUTGOING_REG_PARM_STACK_SPACE (funtype
)
2302 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl
)))
2303 || (reg_parm_stack_space
!= REG_PARM_STACK_SPACE (fndecl
))
2305 /* Check whether the target is able to optimize the call
2307 || !targetm
.function_ok_for_sibcall (fndecl
, exp
)
2308 /* Functions that do not return exactly once may not be sibcall
2310 || (flags
& (ECF_RETURNS_TWICE
| ECF_NORETURN
))
2311 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr
)))
2312 /* If the called function is nested in the current one, it might access
2313 some of the caller's arguments, but could clobber them beforehand if
2314 the argument areas are shared. */
2315 || (fndecl
&& decl_function_context (fndecl
) == current_function_decl
)
2316 /* If this function requires more stack slots than the current
2317 function, we cannot change it into a sibling call.
2318 crtl->args.pretend_args_size is not part of the
2319 stack allocated by our caller. */
2320 || args_size
.constant
> (crtl
->args
.size
2321 - crtl
->args
.pretend_args_size
)
2322 /* If the callee pops its own arguments, then it must pop exactly
2323 the same number of arguments as the current function. */
2324 || (RETURN_POPS_ARGS (fndecl
, funtype
, args_size
.constant
)
2325 != RETURN_POPS_ARGS (current_function_decl
,
2326 TREE_TYPE (current_function_decl
),
2328 || !lang_hooks
.decls
.ok_for_sibcall (fndecl
))
2331 /* Check if caller and callee disagree in promotion of function
2335 enum machine_mode caller_mode
, caller_promoted_mode
;
2336 enum machine_mode callee_mode
, callee_promoted_mode
;
2337 int caller_unsignedp
, callee_unsignedp
;
2338 tree caller_res
= DECL_RESULT (current_function_decl
);
2340 caller_unsignedp
= TYPE_UNSIGNED (TREE_TYPE (caller_res
));
2341 caller_mode
= DECL_MODE (caller_res
);
2342 callee_unsignedp
= TYPE_UNSIGNED (TREE_TYPE (funtype
));
2343 callee_mode
= TYPE_MODE (TREE_TYPE (funtype
));
2344 caller_promoted_mode
2345 = promote_function_mode (TREE_TYPE (caller_res
), caller_mode
,
2347 TREE_TYPE (current_function_decl
), 1);
2348 callee_promoted_mode
2349 = promote_function_mode (TREE_TYPE (funtype
), callee_mode
,
2352 if (caller_mode
!= VOIDmode
2353 && (caller_promoted_mode
!= callee_promoted_mode
2354 || ((caller_mode
!= caller_promoted_mode
2355 || callee_mode
!= callee_promoted_mode
)
2356 && (caller_unsignedp
!= callee_unsignedp
2357 || GET_MODE_BITSIZE (caller_mode
)
2358 < GET_MODE_BITSIZE (callee_mode
)))))
2362 /* Ensure current function's preferred stack boundary is at least
2363 what we need. Stack alignment may also increase preferred stack
2365 if (crtl
->preferred_stack_boundary
< preferred_stack_boundary
)
2366 crtl
->preferred_stack_boundary
= preferred_stack_boundary
;
2368 preferred_stack_boundary
= crtl
->preferred_stack_boundary
;
2370 preferred_unit_stack_boundary
= preferred_stack_boundary
/ BITS_PER_UNIT
;
2372 /* We want to make two insn chains; one for a sibling call, the other
2373 for a normal call. We will select one of the two chains after
2374 initial RTL generation is complete. */
2375 for (pass
= try_tail_call
? 0 : 1; pass
< 2; pass
++)
2377 int sibcall_failure
= 0;
2378 /* We want to emit any pending stack adjustments before the tail
2379 recursion "call". That way we know any adjustment after the tail
2380 recursion call can be ignored if we indeed use the tail
2382 int save_pending_stack_adjust
= 0;
2383 int save_stack_pointer_delta
= 0;
2385 rtx before_call
, next_arg_reg
, after_args
;
2389 /* State variables we need to save and restore between
2391 save_pending_stack_adjust
= pending_stack_adjust
;
2392 save_stack_pointer_delta
= stack_pointer_delta
;
2395 flags
&= ~ECF_SIBCALL
;
2397 flags
|= ECF_SIBCALL
;
2399 /* Other state variables that we must reinitialize each time
2400 through the loop (that are not initialized by the loop itself). */
2404 /* Start a new sequence for the normal call case.
2406 From this point on, if the sibling call fails, we want to set
2407 sibcall_failure instead of continuing the loop. */
2410 /* Don't let pending stack adjusts add up to too much.
2411 Also, do all pending adjustments now if there is any chance
2412 this might be a call to alloca or if we are expanding a sibling
2414 Also do the adjustments before a throwing call, otherwise
2415 exception handling can fail; PR 19225. */
2416 if (pending_stack_adjust
>= 32
2417 || (pending_stack_adjust
> 0
2418 && (flags
& ECF_MAY_BE_ALLOCA
))
2419 || (pending_stack_adjust
> 0
2420 && flag_exceptions
&& !(flags
& ECF_NOTHROW
))
2422 do_pending_stack_adjust ();
2424 /* Precompute any arguments as needed. */
2426 precompute_arguments (num_actuals
, args
);
2428 /* Now we are about to start emitting insns that can be deleted
2429 if a libcall is deleted. */
2430 if (pass
&& (flags
& ECF_MALLOC
))
2433 if (pass
== 0 && crtl
->stack_protect_guard
)
2434 stack_protect_epilogue ();
2436 adjusted_args_size
= args_size
;
2437 /* Compute the actual size of the argument block required. The variable
2438 and constant sizes must be combined, the size may have to be rounded,
2439 and there may be a minimum required size. When generating a sibcall
2440 pattern, do not round up, since we'll be re-using whatever space our
2442 unadjusted_args_size
2443 = compute_argument_block_size (reg_parm_stack_space
,
2444 &adjusted_args_size
,
2447 : preferred_stack_boundary
));
2449 old_stack_allocated
= stack_pointer_delta
- pending_stack_adjust
;
2451 /* The argument block when performing a sibling call is the
2452 incoming argument block. */
2455 argblock
= crtl
->args
.internal_arg_pointer
;
2457 #ifdef STACK_GROWS_DOWNWARD
2458 = plus_constant (argblock
, crtl
->args
.pretend_args_size
);
2460 = plus_constant (argblock
, -crtl
->args
.pretend_args_size
);
2462 stored_args_map
= sbitmap_alloc (args_size
.constant
);
2463 sbitmap_zero (stored_args_map
);
2466 /* If we have no actual push instructions, or shouldn't use them,
2467 make space for all args right now. */
2468 else if (adjusted_args_size
.var
!= 0)
2470 if (old_stack_level
== 0)
2472 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
2473 old_stack_pointer_delta
= stack_pointer_delta
;
2474 old_pending_adj
= pending_stack_adjust
;
2475 pending_stack_adjust
= 0;
2476 /* stack_arg_under_construction says whether a stack arg is
2477 being constructed at the old stack level. Pushing the stack
2478 gets a clean outgoing argument block. */
2479 old_stack_arg_under_construction
= stack_arg_under_construction
;
2480 stack_arg_under_construction
= 0;
2482 argblock
= push_block (ARGS_SIZE_RTX (adjusted_args_size
), 0, 0);
2486 /* Note that we must go through the motions of allocating an argument
2487 block even if the size is zero because we may be storing args
2488 in the area reserved for register arguments, which may be part of
2491 int needed
= adjusted_args_size
.constant
;
2493 /* Store the maximum argument space used. It will be pushed by
2494 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2497 if (needed
> crtl
->outgoing_args_size
)
2498 crtl
->outgoing_args_size
= needed
;
2500 if (must_preallocate
)
2502 if (ACCUMULATE_OUTGOING_ARGS
)
2504 /* Since the stack pointer will never be pushed, it is
2505 possible for the evaluation of a parm to clobber
2506 something we have already written to the stack.
2507 Since most function calls on RISC machines do not use
2508 the stack, this is uncommon, but must work correctly.
2510 Therefore, we save any area of the stack that was already
2511 written and that we are using. Here we set up to do this
2512 by making a new stack usage map from the old one. The
2513 actual save will be done by store_one_arg.
2515 Another approach might be to try to reorder the argument
2516 evaluations to avoid this conflicting stack usage. */
2518 /* Since we will be writing into the entire argument area,
2519 the map must be allocated for its entire size, not just
2520 the part that is the responsibility of the caller. */
2521 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl
? fntype
: TREE_TYPE (fndecl
))))
2522 needed
+= reg_parm_stack_space
;
2524 #ifdef ARGS_GROW_DOWNWARD
2525 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
2528 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
2531 if (stack_usage_map_buf
)
2532 free (stack_usage_map_buf
);
2533 stack_usage_map_buf
= XNEWVEC (char, highest_outgoing_arg_in_use
);
2534 stack_usage_map
= stack_usage_map_buf
;
2536 if (initial_highest_arg_in_use
)
2537 memcpy (stack_usage_map
, initial_stack_usage_map
,
2538 initial_highest_arg_in_use
);
2540 if (initial_highest_arg_in_use
!= highest_outgoing_arg_in_use
)
2541 memset (&stack_usage_map
[initial_highest_arg_in_use
], 0,
2542 (highest_outgoing_arg_in_use
2543 - initial_highest_arg_in_use
));
2546 /* The address of the outgoing argument list must not be
2547 copied to a register here, because argblock would be left
2548 pointing to the wrong place after the call to
2549 allocate_dynamic_stack_space below. */
2551 argblock
= virtual_outgoing_args_rtx
;
2555 if (inhibit_defer_pop
== 0)
2557 /* Try to reuse some or all of the pending_stack_adjust
2558 to get this space. */
2560 = (combine_pending_stack_adjustment_and_call
2561 (unadjusted_args_size
,
2562 &adjusted_args_size
,
2563 preferred_unit_stack_boundary
));
2565 /* combine_pending_stack_adjustment_and_call computes
2566 an adjustment before the arguments are allocated.
2567 Account for them and see whether or not the stack
2568 needs to go up or down. */
2569 needed
= unadjusted_args_size
- needed
;
2573 /* We're releasing stack space. */
2574 /* ??? We can avoid any adjustment at all if we're
2575 already aligned. FIXME. */
2576 pending_stack_adjust
= -needed
;
2577 do_pending_stack_adjust ();
2581 /* We need to allocate space. We'll do that in
2582 push_block below. */
2583 pending_stack_adjust
= 0;
2586 /* Special case this because overhead of `push_block' in
2587 this case is non-trivial. */
2589 argblock
= virtual_outgoing_args_rtx
;
2592 argblock
= push_block (GEN_INT (needed
), 0, 0);
2593 #ifdef ARGS_GROW_DOWNWARD
2594 argblock
= plus_constant (argblock
, needed
);
2598 /* We only really need to call `copy_to_reg' in the case
2599 where push insns are going to be used to pass ARGBLOCK
2600 to a function call in ARGS. In that case, the stack
2601 pointer changes value from the allocation point to the
2602 call point, and hence the value of
2603 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2604 as well always do it. */
2605 argblock
= copy_to_reg (argblock
);
2610 if (ACCUMULATE_OUTGOING_ARGS
)
2612 /* The save/restore code in store_one_arg handles all
2613 cases except one: a constructor call (including a C
2614 function returning a BLKmode struct) to initialize
2616 if (stack_arg_under_construction
)
2619 = GEN_INT (adjusted_args_size
.constant
2620 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl
? fntype
2621 : TREE_TYPE (fndecl
))) ? 0
2622 : reg_parm_stack_space
));
2623 if (old_stack_level
== 0)
2625 emit_stack_save (SAVE_BLOCK
, &old_stack_level
,
2627 old_stack_pointer_delta
= stack_pointer_delta
;
2628 old_pending_adj
= pending_stack_adjust
;
2629 pending_stack_adjust
= 0;
2630 /* stack_arg_under_construction says whether a stack
2631 arg is being constructed at the old stack level.
2632 Pushing the stack gets a clean outgoing argument
2634 old_stack_arg_under_construction
2635 = stack_arg_under_construction
;
2636 stack_arg_under_construction
= 0;
2637 /* Make a new map for the new argument list. */
2638 if (stack_usage_map_buf
)
2639 free (stack_usage_map_buf
);
2640 stack_usage_map_buf
= XCNEWVEC (char, highest_outgoing_arg_in_use
);
2641 stack_usage_map
= stack_usage_map_buf
;
2642 highest_outgoing_arg_in_use
= 0;
2644 allocate_dynamic_stack_space (push_size
, NULL_RTX
,
2648 /* If argument evaluation might modify the stack pointer,
2649 copy the address of the argument list to a register. */
2650 for (i
= 0; i
< num_actuals
; i
++)
2651 if (args
[i
].pass_on_stack
)
2653 argblock
= copy_addr_to_reg (argblock
);
2658 compute_argument_addresses (args
, argblock
, num_actuals
);
2660 /* If we push args individually in reverse order, perform stack alignment
2661 before the first push (the last arg). */
2662 if (PUSH_ARGS_REVERSED
&& argblock
== 0
2663 && adjusted_args_size
.constant
!= unadjusted_args_size
)
2665 /* When the stack adjustment is pending, we get better code
2666 by combining the adjustments. */
2667 if (pending_stack_adjust
2668 && ! inhibit_defer_pop
)
2670 pending_stack_adjust
2671 = (combine_pending_stack_adjustment_and_call
2672 (unadjusted_args_size
,
2673 &adjusted_args_size
,
2674 preferred_unit_stack_boundary
));
2675 do_pending_stack_adjust ();
2677 else if (argblock
== 0)
2678 anti_adjust_stack (GEN_INT (adjusted_args_size
.constant
2679 - unadjusted_args_size
));
2681 /* Now that the stack is properly aligned, pops can't safely
2682 be deferred during the evaluation of the arguments. */
2685 funexp
= rtx_for_function_call (fndecl
, addr
);
2687 /* Figure out the register where the value, if any, will come back. */
2689 if (TYPE_MODE (rettype
) != VOIDmode
2690 && ! structure_value_addr
)
2692 if (pcc_struct_value
)
2693 valreg
= hard_function_value (build_pointer_type (rettype
),
2694 fndecl
, NULL
, (pass
== 0));
2696 valreg
= hard_function_value (rettype
, fndecl
, fntype
,
2699 /* If VALREG is a PARALLEL whose first member has a zero
2700 offset, use that. This is for targets such as m68k that
2701 return the same value in multiple places. */
2702 if (GET_CODE (valreg
) == PARALLEL
)
2704 rtx elem
= XVECEXP (valreg
, 0, 0);
2705 rtx where
= XEXP (elem
, 0);
2706 rtx offset
= XEXP (elem
, 1);
2707 if (offset
== const0_rtx
2708 && GET_MODE (where
) == GET_MODE (valreg
))
2713 /* Precompute all register parameters. It isn't safe to compute anything
2714 once we have started filling any specific hard regs. */
2715 precompute_register_parameters (num_actuals
, args
, ®_parm_seen
);
2717 if (CALL_EXPR_STATIC_CHAIN (exp
))
2718 static_chain_value
= expand_normal (CALL_EXPR_STATIC_CHAIN (exp
));
2720 static_chain_value
= 0;
2722 #ifdef REG_PARM_STACK_SPACE
2723 /* Save the fixed argument area if it's part of the caller's frame and
2724 is clobbered by argument setup for this call. */
2725 if (ACCUMULATE_OUTGOING_ARGS
&& pass
)
2726 save_area
= save_fixed_argument_area (reg_parm_stack_space
, argblock
,
2727 &low_to_save
, &high_to_save
);
2730 /* Now store (and compute if necessary) all non-register parms.
2731 These come before register parms, since they can require block-moves,
2732 which could clobber the registers used for register parms.
2733 Parms which have partial registers are not stored here,
2734 but we do preallocate space here if they want that. */
2736 for (i
= 0; i
< num_actuals
; i
++)
2738 if (args
[i
].reg
== 0 || args
[i
].pass_on_stack
)
2740 rtx before_arg
= get_last_insn ();
2742 if (store_one_arg (&args
[i
], argblock
, flags
,
2743 adjusted_args_size
.var
!= 0,
2744 reg_parm_stack_space
)
2746 && check_sibcall_argument_overlap (before_arg
,
2748 sibcall_failure
= 1;
2751 if (((flags
& ECF_CONST
)
2752 || ((flags
& ECF_PURE
) && ACCUMULATE_OUTGOING_ARGS
))
2754 call_fusage
= gen_rtx_EXPR_LIST (VOIDmode
,
2755 gen_rtx_USE (VOIDmode
,
2760 /* If we have a parm that is passed in registers but not in memory
2761 and whose alignment does not permit a direct copy into registers,
2762 make a group of pseudos that correspond to each register that we
2764 if (STRICT_ALIGNMENT
)
2765 store_unaligned_arguments_into_pseudos (args
, num_actuals
);
2767 /* Now store any partially-in-registers parm.
2768 This is the last place a block-move can happen. */
2770 for (i
= 0; i
< num_actuals
; i
++)
2771 if (args
[i
].partial
!= 0 && ! args
[i
].pass_on_stack
)
2773 rtx before_arg
= get_last_insn ();
2775 if (store_one_arg (&args
[i
], argblock
, flags
,
2776 adjusted_args_size
.var
!= 0,
2777 reg_parm_stack_space
)
2779 && check_sibcall_argument_overlap (before_arg
,
2781 sibcall_failure
= 1;
2784 /* If we pushed args in forward order, perform stack alignment
2785 after pushing the last arg. */
2786 if (!PUSH_ARGS_REVERSED
&& argblock
== 0)
2787 anti_adjust_stack (GEN_INT (adjusted_args_size
.constant
2788 - unadjusted_args_size
));
2790 /* If register arguments require space on the stack and stack space
2791 was not preallocated, allocate stack space here for arguments
2792 passed in registers. */
2793 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl
? fntype
: TREE_TYPE (fndecl
)))
2794 && !ACCUMULATE_OUTGOING_ARGS
2795 && must_preallocate
== 0 && reg_parm_stack_space
> 0)
2796 anti_adjust_stack (GEN_INT (reg_parm_stack_space
));
2798 /* Pass the function the address in which to return a
2800 if (pass
!= 0 && structure_value_addr
&& ! structure_value_addr_parm
)
2802 structure_value_addr
2803 = convert_memory_address (Pmode
, structure_value_addr
);
2804 emit_move_insn (struct_value
,
2806 force_operand (structure_value_addr
,
2809 if (REG_P (struct_value
))
2810 use_reg (&call_fusage
, struct_value
);
2813 after_args
= get_last_insn ();
2814 funexp
= prepare_call_address (fndecl
, funexp
, static_chain_value
,
2815 &call_fusage
, reg_parm_seen
, pass
== 0);
2817 load_register_parameters (args
, num_actuals
, &call_fusage
, flags
,
2818 pass
== 0, &sibcall_failure
);
2820 /* Save a pointer to the last insn before the call, so that we can
2821 later safely search backwards to find the CALL_INSN. */
2822 before_call
= get_last_insn ();
2824 /* Set up next argument register. For sibling calls on machines
2825 with register windows this should be the incoming register. */
2826 #ifdef FUNCTION_INCOMING_ARG
2828 next_arg_reg
= FUNCTION_INCOMING_ARG (args_so_far
, VOIDmode
,
2832 next_arg_reg
= FUNCTION_ARG (args_so_far
, VOIDmode
,
2835 /* All arguments and registers used for the call must be set up by
2838 /* Stack must be properly aligned now. */
2840 || !(stack_pointer_delta
% preferred_unit_stack_boundary
));
2842 /* Generate the actual call instruction. */
2843 emit_call_1 (funexp
, exp
, fndecl
, funtype
, unadjusted_args_size
,
2844 adjusted_args_size
.constant
, struct_value_size
,
2845 next_arg_reg
, valreg
, old_inhibit_defer_pop
, call_fusage
,
2846 flags
, & args_so_far
);
2848 /* If the call setup or the call itself overlaps with anything
2849 of the argument setup we probably clobbered our call address.
2850 In that case we can't do sibcalls. */
2852 && check_sibcall_argument_overlap (after_args
, 0, 0))
2853 sibcall_failure
= 1;
2855 /* If a non-BLKmode value is returned at the most significant end
2856 of a register, shift the register right by the appropriate amount
2857 and update VALREG accordingly. BLKmode values are handled by the
2858 group load/store machinery below. */
2859 if (!structure_value_addr
2860 && !pcc_struct_value
2861 && TYPE_MODE (rettype
) != BLKmode
2862 && targetm
.calls
.return_in_msb (rettype
))
2864 if (shift_return_value (TYPE_MODE (rettype
), false, valreg
))
2865 sibcall_failure
= 1;
2866 valreg
= gen_rtx_REG (TYPE_MODE (rettype
), REGNO (valreg
));
2869 if (pass
&& (flags
& ECF_MALLOC
))
2871 rtx temp
= gen_reg_rtx (GET_MODE (valreg
));
2874 /* The return value from a malloc-like function is a pointer. */
2875 if (TREE_CODE (rettype
) == POINTER_TYPE
)
2876 mark_reg_pointer (temp
, BIGGEST_ALIGNMENT
);
2878 emit_move_insn (temp
, valreg
);
2880 /* The return value from a malloc-like function can not alias
2882 last
= get_last_insn ();
2883 add_reg_note (last
, REG_NOALIAS
, temp
);
2885 /* Write out the sequence. */
2886 insns
= get_insns ();
2892 /* For calls to `setjmp', etc., inform
2893 function.c:setjmp_warnings that it should complain if
2894 nonvolatile values are live. For functions that cannot
2895 return, inform flow that control does not fall through. */
2897 if ((flags
& ECF_NORETURN
) || pass
== 0)
2899 /* The barrier must be emitted
2900 immediately after the CALL_INSN. Some ports emit more
2901 than just a CALL_INSN above, so we must search for it here. */
2903 rtx last
= get_last_insn ();
2904 while (!CALL_P (last
))
2906 last
= PREV_INSN (last
);
2907 /* There was no CALL_INSN? */
2908 gcc_assert (last
!= before_call
);
2911 emit_barrier_after (last
);
2913 /* Stack adjustments after a noreturn call are dead code.
2914 However when NO_DEFER_POP is in effect, we must preserve
2915 stack_pointer_delta. */
2916 if (inhibit_defer_pop
== 0)
2918 stack_pointer_delta
= old_stack_allocated
;
2919 pending_stack_adjust
= 0;
2923 /* If value type not void, return an rtx for the value. */
2925 if (TYPE_MODE (rettype
) == VOIDmode
2927 target
= const0_rtx
;
2928 else if (structure_value_addr
)
2930 if (target
== 0 || !MEM_P (target
))
2933 = gen_rtx_MEM (TYPE_MODE (rettype
),
2934 memory_address (TYPE_MODE (rettype
),
2935 structure_value_addr
));
2936 set_mem_attributes (target
, rettype
, 1);
2939 else if (pcc_struct_value
)
2941 /* This is the special C++ case where we need to
2942 know what the true target was. We take care to
2943 never use this value more than once in one expression. */
2944 target
= gen_rtx_MEM (TYPE_MODE (rettype
),
2945 copy_to_reg (valreg
));
2946 set_mem_attributes (target
, rettype
, 1);
2948 /* Handle calls that return values in multiple non-contiguous locations.
2949 The Irix 6 ABI has examples of this. */
2950 else if (GET_CODE (valreg
) == PARALLEL
)
2954 /* This will only be assigned once, so it can be readonly. */
2955 tree nt
= build_qualified_type (rettype
,
2956 (TYPE_QUALS (rettype
)
2957 | TYPE_QUAL_CONST
));
2959 target
= assign_temp (nt
, 0, 1, 1);
2962 if (! rtx_equal_p (target
, valreg
))
2963 emit_group_store (target
, valreg
, rettype
,
2964 int_size_in_bytes (rettype
));
2966 /* We can not support sibling calls for this case. */
2967 sibcall_failure
= 1;
2970 && GET_MODE (target
) == TYPE_MODE (rettype
)
2971 && GET_MODE (target
) == GET_MODE (valreg
))
2973 bool may_overlap
= false;
2975 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
2976 reg to a plain register. */
2977 if (!REG_P (target
) || HARD_REGISTER_P (target
))
2978 valreg
= avoid_likely_spilled_reg (valreg
);
2980 /* If TARGET is a MEM in the argument area, and we have
2981 saved part of the argument area, then we can't store
2982 directly into TARGET as it may get overwritten when we
2983 restore the argument save area below. Don't work too
2984 hard though and simply force TARGET to a register if it
2985 is a MEM; the optimizer is quite likely to sort it out. */
2986 if (ACCUMULATE_OUTGOING_ARGS
&& pass
&& MEM_P (target
))
2987 for (i
= 0; i
< num_actuals
; i
++)
2988 if (args
[i
].save_area
)
2995 target
= copy_to_reg (valreg
);
2998 /* TARGET and VALREG cannot be equal at this point
2999 because the latter would not have
3000 REG_FUNCTION_VALUE_P true, while the former would if
3001 it were referring to the same register.
3003 If they refer to the same register, this move will be
3004 a no-op, except when function inlining is being
3006 emit_move_insn (target
, valreg
);
3008 /* If we are setting a MEM, this code must be executed.
3009 Since it is emitted after the call insn, sibcall
3010 optimization cannot be performed in that case. */
3012 sibcall_failure
= 1;
3015 else if (TYPE_MODE (rettype
) == BLKmode
)
3017 target
= copy_blkmode_from_reg (target
, valreg
, rettype
);
3019 /* We can not support sibling calls for this case. */
3020 sibcall_failure
= 1;
3023 target
= copy_to_reg (avoid_likely_spilled_reg (valreg
));
3025 /* If we promoted this return value, make the proper SUBREG.
3026 TARGET might be const0_rtx here, so be careful. */
3028 && TYPE_MODE (rettype
) != BLKmode
3029 && GET_MODE (target
) != TYPE_MODE (rettype
))
3031 tree type
= rettype
;
3032 int unsignedp
= TYPE_UNSIGNED (type
);
3034 enum machine_mode pmode
;
3036 /* Ensure we promote as expected, and get the new unsignedness. */
3037 pmode
= promote_function_mode (type
, TYPE_MODE (type
), &unsignedp
,
3039 gcc_assert (GET_MODE (target
) == pmode
);
3041 if ((WORDS_BIG_ENDIAN
|| BYTES_BIG_ENDIAN
)
3042 && (GET_MODE_SIZE (GET_MODE (target
))
3043 > GET_MODE_SIZE (TYPE_MODE (type
))))
3045 offset
= GET_MODE_SIZE (GET_MODE (target
))
3046 - GET_MODE_SIZE (TYPE_MODE (type
));
3047 if (! BYTES_BIG_ENDIAN
)
3048 offset
= (offset
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3049 else if (! WORDS_BIG_ENDIAN
)
3050 offset
%= UNITS_PER_WORD
;
3053 target
= gen_rtx_SUBREG (TYPE_MODE (type
), target
, offset
);
3054 SUBREG_PROMOTED_VAR_P (target
) = 1;
3055 SUBREG_PROMOTED_UNSIGNED_SET (target
, unsignedp
);
3058 /* If size of args is variable or this was a constructor call for a stack
3059 argument, restore saved stack-pointer value. */
3061 if (old_stack_level
)
3063 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
3064 stack_pointer_delta
= old_stack_pointer_delta
;
3065 pending_stack_adjust
= old_pending_adj
;
3066 old_stack_allocated
= stack_pointer_delta
- pending_stack_adjust
;
3067 stack_arg_under_construction
= old_stack_arg_under_construction
;
3068 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
3069 stack_usage_map
= initial_stack_usage_map
;
3070 sibcall_failure
= 1;
3072 else if (ACCUMULATE_OUTGOING_ARGS
&& pass
)
3074 #ifdef REG_PARM_STACK_SPACE
3076 restore_fixed_argument_area (save_area
, argblock
,
3077 high_to_save
, low_to_save
);
3080 /* If we saved any argument areas, restore them. */
3081 for (i
= 0; i
< num_actuals
; i
++)
3082 if (args
[i
].save_area
)
3084 enum machine_mode save_mode
= GET_MODE (args
[i
].save_area
);
3086 = gen_rtx_MEM (save_mode
,
3087 memory_address (save_mode
,
3088 XEXP (args
[i
].stack_slot
, 0)));
3090 if (save_mode
!= BLKmode
)
3091 emit_move_insn (stack_area
, args
[i
].save_area
);
3093 emit_block_move (stack_area
, args
[i
].save_area
,
3094 GEN_INT (args
[i
].locate
.size
.constant
),
3095 BLOCK_OP_CALL_PARM
);
3098 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
3099 stack_usage_map
= initial_stack_usage_map
;
3102 /* If this was alloca, record the new stack level for nonlocal gotos.
3103 Check for the handler slots since we might not have a save area
3104 for non-local gotos. */
3106 if ((flags
& ECF_MAY_BE_ALLOCA
) && cfun
->nonlocal_goto_save_area
!= 0)
3107 update_nonlocal_goto_save_area ();
3109 /* Free up storage we no longer need. */
3110 for (i
= 0; i
< num_actuals
; ++i
)
3111 if (args
[i
].aligned_regs
)
3112 free (args
[i
].aligned_regs
);
3114 insns
= get_insns ();
3119 tail_call_insns
= insns
;
3121 /* Restore the pending stack adjustment now that we have
3122 finished generating the sibling call sequence. */
3124 pending_stack_adjust
= save_pending_stack_adjust
;
3125 stack_pointer_delta
= save_stack_pointer_delta
;
3127 /* Prepare arg structure for next iteration. */
3128 for (i
= 0; i
< num_actuals
; i
++)
3131 args
[i
].aligned_regs
= 0;
3135 sbitmap_free (stored_args_map
);
3139 normal_call_insns
= insns
;
3141 /* Verify that we've deallocated all the stack we used. */
3142 gcc_assert ((flags
& ECF_NORETURN
)
3143 || (old_stack_allocated
3144 == stack_pointer_delta
- pending_stack_adjust
));
3147 /* If something prevents making this a sibling call,
3148 zero out the sequence. */
3149 if (sibcall_failure
)
3150 tail_call_insns
= NULL_RTX
;
3155 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3156 arguments too, as argument area is now clobbered by the call. */
3157 if (tail_call_insns
)
3159 emit_insn (tail_call_insns
);
3160 crtl
->tail_call_emit
= true;
3163 emit_insn (normal_call_insns
);
3165 currently_expanding_call
--;
3167 if (stack_usage_map_buf
)
3168 free (stack_usage_map_buf
);
3173 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3174 this function's incoming arguments.
3176 At the start of RTL generation we know the only REG_EQUIV notes
3177 in the rtl chain are those for incoming arguments, so we can look
3178 for REG_EQUIV notes between the start of the function and the
3179 NOTE_INSN_FUNCTION_BEG.
3181 This is (slight) overkill. We could keep track of the highest
3182 argument we clobber and be more selective in removing notes, but it
3183 does not seem to be worth the effort. */
3186 fixup_tail_calls (void)
3190 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3194 /* There are never REG_EQUIV notes for the incoming arguments
3195 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3197 && NOTE_KIND (insn
) == NOTE_INSN_FUNCTION_BEG
)
3200 note
= find_reg_note (insn
, REG_EQUIV
, 0);
3202 remove_note (insn
, note
);
3203 note
= find_reg_note (insn
, REG_EQUIV
, 0);
3208 /* Traverse a list of TYPES and expand all complex types into their
3211 split_complex_types (tree types
)
3215 /* Before allocating memory, check for the common case of no complex. */
3216 for (p
= types
; p
; p
= TREE_CHAIN (p
))
3218 tree type
= TREE_VALUE (p
);
3219 if (TREE_CODE (type
) == COMPLEX_TYPE
3220 && targetm
.calls
.split_complex_arg (type
))
3226 types
= copy_list (types
);
3228 for (p
= types
; p
; p
= TREE_CHAIN (p
))
3230 tree complex_type
= TREE_VALUE (p
);
3232 if (TREE_CODE (complex_type
) == COMPLEX_TYPE
3233 && targetm
.calls
.split_complex_arg (complex_type
))
3237 /* Rewrite complex type with component type. */
3238 TREE_VALUE (p
) = TREE_TYPE (complex_type
);
3239 next
= TREE_CHAIN (p
);
3241 /* Add another component type for the imaginary part. */
3242 imag
= build_tree_list (NULL_TREE
, TREE_VALUE (p
));
3243 TREE_CHAIN (p
) = imag
;
3244 TREE_CHAIN (imag
) = next
;
3246 /* Skip the newly created node. */
3254 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3255 The RETVAL parameter specifies whether return value needs to be saved, other
3256 parameters are documented in the emit_library_call function below. */
3259 emit_library_call_value_1 (int retval
, rtx orgfun
, rtx value
,
3260 enum libcall_type fn_type
,
3261 enum machine_mode outmode
, int nargs
, va_list p
)
3263 /* Total size in bytes of all the stack-parms scanned so far. */
3264 struct args_size args_size
;
3265 /* Size of arguments before any adjustments (such as rounding). */
3266 struct args_size original_args_size
;
3269 /* Todo, choose the correct decl type of orgfun. Sadly this information
3270 isn't present here, so we default to native calling abi here. */
3271 tree fndecl ATTRIBUTE_UNUSED
= NULL_TREE
; /* library calls default to host calling abi ? */
3272 tree fntype ATTRIBUTE_UNUSED
= NULL_TREE
; /* library calls default to host calling abi ? */
3276 CUMULATIVE_ARGS args_so_far
;
3280 enum machine_mode mode
;
3283 struct locate_and_pad_arg_data locate
;
3287 int old_inhibit_defer_pop
= inhibit_defer_pop
;
3288 rtx call_fusage
= 0;
3291 int pcc_struct_value
= 0;
3292 int struct_value_size
= 0;
3294 int reg_parm_stack_space
= 0;
3297 tree tfom
; /* type_for_mode (outmode, 0) */
3299 #ifdef REG_PARM_STACK_SPACE
3300 /* Define the boundary of the register parm stack space that needs to be
3302 int low_to_save
= 0, high_to_save
= 0;
3303 rtx save_area
= 0; /* Place that it is saved. */
3306 /* Size of the stack reserved for parameter registers. */
3307 int initial_highest_arg_in_use
= highest_outgoing_arg_in_use
;
3308 char *initial_stack_usage_map
= stack_usage_map
;
3309 char *stack_usage_map_buf
= NULL
;
3311 rtx struct_value
= targetm
.calls
.struct_value_rtx (0, 0);
3313 #ifdef REG_PARM_STACK_SPACE
3314 reg_parm_stack_space
= REG_PARM_STACK_SPACE ((tree
) 0);
3317 /* By default, library functions can not throw. */
3318 flags
= ECF_NOTHROW
;
3331 flags
|= ECF_NORETURN
;
3334 flags
= ECF_NORETURN
;
3336 case LCT_RETURNS_TWICE
:
3337 flags
= ECF_RETURNS_TWICE
;
3342 /* Ensure current function's preferred stack boundary is at least
3344 if (crtl
->preferred_stack_boundary
< PREFERRED_STACK_BOUNDARY
)
3345 crtl
->preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
3347 /* If this kind of value comes back in memory,
3348 decide where in memory it should come back. */
3349 if (outmode
!= VOIDmode
)
3351 tfom
= lang_hooks
.types
.type_for_mode (outmode
, 0);
3352 if (aggregate_value_p (tfom
, 0))
3354 #ifdef PCC_STATIC_STRUCT_RETURN
3356 = hard_function_value (build_pointer_type (tfom
), 0, 0, 0);
3357 mem_value
= gen_rtx_MEM (outmode
, pointer_reg
);
3358 pcc_struct_value
= 1;
3360 value
= gen_reg_rtx (outmode
);
3361 #else /* not PCC_STATIC_STRUCT_RETURN */
3362 struct_value_size
= GET_MODE_SIZE (outmode
);
3363 if (value
!= 0 && MEM_P (value
))
3366 mem_value
= assign_temp (tfom
, 0, 1, 1);
3368 /* This call returns a big structure. */
3369 flags
&= ~(ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
);
3373 tfom
= void_type_node
;
3375 /* ??? Unfinished: must pass the memory address as an argument. */
3377 /* Copy all the libcall-arguments out of the varargs data
3378 and into a vector ARGVEC.
3380 Compute how to pass each argument. We only support a very small subset
3381 of the full argument passing conventions to limit complexity here since
3382 library functions shouldn't have many args. */
3384 argvec
= XALLOCAVEC (struct arg
, nargs
+ 1);
3385 memset (argvec
, 0, (nargs
+ 1) * sizeof (struct arg
));
3387 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3388 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far
, outmode
, fun
);
3390 INIT_CUMULATIVE_ARGS (args_so_far
, NULL_TREE
, fun
, 0, nargs
);
3393 args_size
.constant
= 0;
3400 /* If there's a structure value address to be passed,
3401 either pass it in the special place, or pass it as an extra argument. */
3402 if (mem_value
&& struct_value
== 0 && ! pcc_struct_value
)
3404 rtx addr
= XEXP (mem_value
, 0);
3408 /* Make sure it is a reasonable operand for a move or push insn. */
3409 if (!REG_P (addr
) && !MEM_P (addr
)
3410 && ! (CONSTANT_P (addr
) && LEGITIMATE_CONSTANT_P (addr
)))
3411 addr
= force_operand (addr
, NULL_RTX
);
3413 argvec
[count
].value
= addr
;
3414 argvec
[count
].mode
= Pmode
;
3415 argvec
[count
].partial
= 0;
3417 argvec
[count
].reg
= FUNCTION_ARG (args_so_far
, Pmode
, NULL_TREE
, 1);
3418 gcc_assert (targetm
.calls
.arg_partial_bytes (&args_so_far
, Pmode
,
3419 NULL_TREE
, 1) == 0);
3421 locate_and_pad_parm (Pmode
, NULL_TREE
,
3422 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3425 argvec
[count
].reg
!= 0,
3427 0, NULL_TREE
, &args_size
, &argvec
[count
].locate
);
3429 if (argvec
[count
].reg
== 0 || argvec
[count
].partial
!= 0
3430 || reg_parm_stack_space
> 0)
3431 args_size
.constant
+= argvec
[count
].locate
.size
.constant
;
3433 FUNCTION_ARG_ADVANCE (args_so_far
, Pmode
, (tree
) 0, 1);
3438 for (; count
< nargs
; count
++)
3440 rtx val
= va_arg (p
, rtx
);
3441 enum machine_mode mode
= (enum machine_mode
) va_arg (p
, int);
3443 /* We cannot convert the arg value to the mode the library wants here;
3444 must do it earlier where we know the signedness of the arg. */
3445 gcc_assert (mode
!= BLKmode
3446 && (GET_MODE (val
) == mode
|| GET_MODE (val
) == VOIDmode
));
3448 /* Make sure it is a reasonable operand for a move or push insn. */
3449 if (!REG_P (val
) && !MEM_P (val
)
3450 && ! (CONSTANT_P (val
) && LEGITIMATE_CONSTANT_P (val
)))
3451 val
= force_operand (val
, NULL_RTX
);
3453 if (pass_by_reference (&args_so_far
, mode
, NULL_TREE
, 1))
3457 = !reference_callee_copied (&args_so_far
, mode
, NULL_TREE
, 1);
3459 /* If this was a CONST function, it is now PURE since it now
3461 if (flags
& ECF_CONST
)
3463 flags
&= ~ECF_CONST
;
3467 if (MEM_P (val
) && !must_copy
)
3471 slot
= assign_temp (lang_hooks
.types
.type_for_mode (mode
, 0),
3473 emit_move_insn (slot
, val
);
3476 call_fusage
= gen_rtx_EXPR_LIST (VOIDmode
,
3477 gen_rtx_USE (VOIDmode
, slot
),
3480 call_fusage
= gen_rtx_EXPR_LIST (VOIDmode
,
3481 gen_rtx_CLOBBER (VOIDmode
,
3486 val
= force_operand (XEXP (slot
, 0), NULL_RTX
);
3489 argvec
[count
].value
= val
;
3490 argvec
[count
].mode
= mode
;
3492 argvec
[count
].reg
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
3494 argvec
[count
].partial
3495 = targetm
.calls
.arg_partial_bytes (&args_so_far
, mode
, NULL_TREE
, 1);
3497 locate_and_pad_parm (mode
, NULL_TREE
,
3498 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3501 argvec
[count
].reg
!= 0,
3503 argvec
[count
].partial
,
3504 NULL_TREE
, &args_size
, &argvec
[count
].locate
);
3506 gcc_assert (!argvec
[count
].locate
.size
.var
);
3508 if (argvec
[count
].reg
== 0 || argvec
[count
].partial
!= 0
3509 || reg_parm_stack_space
> 0)
3510 args_size
.constant
+= argvec
[count
].locate
.size
.constant
;
3512 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, (tree
) 0, 1);
3515 /* If this machine requires an external definition for library
3516 functions, write one out. */
3517 assemble_external_libcall (fun
);
3519 original_args_size
= args_size
;
3520 args_size
.constant
= (((args_size
.constant
3521 + stack_pointer_delta
3525 - stack_pointer_delta
);
3527 args_size
.constant
= MAX (args_size
.constant
,
3528 reg_parm_stack_space
);
3530 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl
? fntype
: TREE_TYPE (fndecl
))))
3531 args_size
.constant
-= reg_parm_stack_space
;
3533 if (args_size
.constant
> crtl
->outgoing_args_size
)
3534 crtl
->outgoing_args_size
= args_size
.constant
;
3536 if (ACCUMULATE_OUTGOING_ARGS
)
3538 /* Since the stack pointer will never be pushed, it is possible for
3539 the evaluation of a parm to clobber something we have already
3540 written to the stack. Since most function calls on RISC machines
3541 do not use the stack, this is uncommon, but must work correctly.
3543 Therefore, we save any area of the stack that was already written
3544 and that we are using. Here we set up to do this by making a new
3545 stack usage map from the old one.
3547 Another approach might be to try to reorder the argument
3548 evaluations to avoid this conflicting stack usage. */
3550 needed
= args_size
.constant
;
3552 /* Since we will be writing into the entire argument area, the
3553 map must be allocated for its entire size, not just the part that
3554 is the responsibility of the caller. */
3555 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl
? fntype
: TREE_TYPE (fndecl
))))
3556 needed
+= reg_parm_stack_space
;
3558 #ifdef ARGS_GROW_DOWNWARD
3559 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
3562 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
3565 stack_usage_map_buf
= XNEWVEC (char, highest_outgoing_arg_in_use
);
3566 stack_usage_map
= stack_usage_map_buf
;
3568 if (initial_highest_arg_in_use
)
3569 memcpy (stack_usage_map
, initial_stack_usage_map
,
3570 initial_highest_arg_in_use
);
3572 if (initial_highest_arg_in_use
!= highest_outgoing_arg_in_use
)
3573 memset (&stack_usage_map
[initial_highest_arg_in_use
], 0,
3574 highest_outgoing_arg_in_use
- initial_highest_arg_in_use
);
3577 /* We must be careful to use virtual regs before they're instantiated,
3578 and real regs afterwards. Loop optimization, for example, can create
3579 new libcalls after we've instantiated the virtual regs, and if we
3580 use virtuals anyway, they won't match the rtl patterns. */
3582 if (virtuals_instantiated
)
3583 argblock
= plus_constant (stack_pointer_rtx
, STACK_POINTER_OFFSET
);
3585 argblock
= virtual_outgoing_args_rtx
;
3590 argblock
= push_block (GEN_INT (args_size
.constant
), 0, 0);
3593 /* If we push args individually in reverse order, perform stack alignment
3594 before the first push (the last arg). */
3595 if (argblock
== 0 && PUSH_ARGS_REVERSED
)
3596 anti_adjust_stack (GEN_INT (args_size
.constant
3597 - original_args_size
.constant
));
3599 if (PUSH_ARGS_REVERSED
)
3610 #ifdef REG_PARM_STACK_SPACE
3611 if (ACCUMULATE_OUTGOING_ARGS
)
3613 /* The argument list is the property of the called routine and it
3614 may clobber it. If the fixed area has been used for previous
3615 parameters, we must save and restore it. */
3616 save_area
= save_fixed_argument_area (reg_parm_stack_space
, argblock
,
3617 &low_to_save
, &high_to_save
);
3621 /* Push the args that need to be pushed. */
3623 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3624 are to be pushed. */
3625 for (count
= 0; count
< nargs
; count
++, argnum
+= inc
)
3627 enum machine_mode mode
= argvec
[argnum
].mode
;
3628 rtx val
= argvec
[argnum
].value
;
3629 rtx reg
= argvec
[argnum
].reg
;
3630 int partial
= argvec
[argnum
].partial
;
3631 unsigned int parm_align
= argvec
[argnum
].locate
.boundary
;
3632 int lower_bound
= 0, upper_bound
= 0, i
;
3634 if (! (reg
!= 0 && partial
== 0))
3636 if (ACCUMULATE_OUTGOING_ARGS
)
3638 /* If this is being stored into a pre-allocated, fixed-size,
3639 stack area, save any previous data at that location. */
3641 #ifdef ARGS_GROW_DOWNWARD
3642 /* stack_slot is negative, but we want to index stack_usage_map
3643 with positive values. */
3644 upper_bound
= -argvec
[argnum
].locate
.slot_offset
.constant
+ 1;
3645 lower_bound
= upper_bound
- argvec
[argnum
].locate
.size
.constant
;
3647 lower_bound
= argvec
[argnum
].locate
.slot_offset
.constant
;
3648 upper_bound
= lower_bound
+ argvec
[argnum
].locate
.size
.constant
;
3652 /* Don't worry about things in the fixed argument area;
3653 it has already been saved. */
3654 if (i
< reg_parm_stack_space
)
3655 i
= reg_parm_stack_space
;
3656 while (i
< upper_bound
&& stack_usage_map
[i
] == 0)
3659 if (i
< upper_bound
)
3661 /* We need to make a save area. */
3663 = argvec
[argnum
].locate
.size
.constant
* BITS_PER_UNIT
;
3664 enum machine_mode save_mode
3665 = mode_for_size (size
, MODE_INT
, 1);
3667 = plus_constant (argblock
,
3668 argvec
[argnum
].locate
.offset
.constant
);
3670 = gen_rtx_MEM (save_mode
, memory_address (save_mode
, adr
));
3672 if (save_mode
== BLKmode
)
3674 argvec
[argnum
].save_area
3675 = assign_stack_temp (BLKmode
,
3676 argvec
[argnum
].locate
.size
.constant
,
3679 emit_block_move (validize_mem (argvec
[argnum
].save_area
),
3681 GEN_INT (argvec
[argnum
].locate
.size
.constant
),
3682 BLOCK_OP_CALL_PARM
);
3686 argvec
[argnum
].save_area
= gen_reg_rtx (save_mode
);
3688 emit_move_insn (argvec
[argnum
].save_area
, stack_area
);
3693 emit_push_insn (val
, mode
, NULL_TREE
, NULL_RTX
, parm_align
,
3694 partial
, reg
, 0, argblock
,
3695 GEN_INT (argvec
[argnum
].locate
.offset
.constant
),
3696 reg_parm_stack_space
,
3697 ARGS_SIZE_RTX (argvec
[argnum
].locate
.alignment_pad
));
3699 /* Now mark the segment we just used. */
3700 if (ACCUMULATE_OUTGOING_ARGS
)
3701 for (i
= lower_bound
; i
< upper_bound
; i
++)
3702 stack_usage_map
[i
] = 1;
3706 if ((flags
& ECF_CONST
)
3707 || ((flags
& ECF_PURE
) && ACCUMULATE_OUTGOING_ARGS
))
3711 /* Indicate argument access so that alias.c knows that these
3714 use
= plus_constant (argblock
,
3715 argvec
[argnum
].locate
.offset
.constant
);
3717 /* When arguments are pushed, trying to tell alias.c where
3718 exactly this argument is won't work, because the
3719 auto-increment causes confusion. So we merely indicate
3720 that we access something with a known mode somewhere on
3722 use
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3723 gen_rtx_SCRATCH (Pmode
));
3724 use
= gen_rtx_MEM (argvec
[argnum
].mode
, use
);
3725 use
= gen_rtx_USE (VOIDmode
, use
);
3726 call_fusage
= gen_rtx_EXPR_LIST (VOIDmode
, use
, call_fusage
);
3731 /* If we pushed args in forward order, perform stack alignment
3732 after pushing the last arg. */
3733 if (argblock
== 0 && !PUSH_ARGS_REVERSED
)
3734 anti_adjust_stack (GEN_INT (args_size
.constant
3735 - original_args_size
.constant
));
3737 if (PUSH_ARGS_REVERSED
)
3742 fun
= prepare_call_address (NULL
, fun
, NULL
, &call_fusage
, 0, 0);
3744 /* Now load any reg parms into their regs. */
3746 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3747 are to be pushed. */
3748 for (count
= 0; count
< nargs
; count
++, argnum
+= inc
)
3750 enum machine_mode mode
= argvec
[argnum
].mode
;
3751 rtx val
= argvec
[argnum
].value
;
3752 rtx reg
= argvec
[argnum
].reg
;
3753 int partial
= argvec
[argnum
].partial
;
3755 /* Handle calls that pass values in multiple non-contiguous
3756 locations. The PA64 has examples of this for library calls. */
3757 if (reg
!= 0 && GET_CODE (reg
) == PARALLEL
)
3758 emit_group_load (reg
, val
, NULL_TREE
, GET_MODE_SIZE (mode
));
3759 else if (reg
!= 0 && partial
== 0)
3760 emit_move_insn (reg
, val
);
3765 /* Any regs containing parms remain in use through the call. */
3766 for (count
= 0; count
< nargs
; count
++)
3768 rtx reg
= argvec
[count
].reg
;
3769 if (reg
!= 0 && GET_CODE (reg
) == PARALLEL
)
3770 use_group_regs (&call_fusage
, reg
);
3773 int partial
= argvec
[count
].partial
;
3777 gcc_assert (partial
% UNITS_PER_WORD
== 0);
3778 nregs
= partial
/ UNITS_PER_WORD
;
3779 use_regs (&call_fusage
, REGNO (reg
), nregs
);
3782 use_reg (&call_fusage
, reg
);
3786 /* Pass the function the address in which to return a structure value. */
3787 if (mem_value
!= 0 && struct_value
!= 0 && ! pcc_struct_value
)
3789 emit_move_insn (struct_value
,
3791 force_operand (XEXP (mem_value
, 0),
3793 if (REG_P (struct_value
))
3794 use_reg (&call_fusage
, struct_value
);
3797 /* Don't allow popping to be deferred, since then
3798 cse'ing of library calls could delete a call and leave the pop. */
3800 valreg
= (mem_value
== 0 && outmode
!= VOIDmode
3801 ? hard_libcall_value (outmode
, orgfun
) : NULL_RTX
);
3803 /* Stack must be properly aligned now. */
3804 gcc_assert (!(stack_pointer_delta
3805 & (PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
- 1)));
3807 before_call
= get_last_insn ();
3809 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3810 will set inhibit_defer_pop to that value. */
3811 /* The return type is needed to decide how many bytes the function pops.
3812 Signedness plays no role in that, so for simplicity, we pretend it's
3813 always signed. We also assume that the list of arguments passed has
3814 no impact, so we pretend it is unknown. */
3816 emit_call_1 (fun
, NULL
,
3817 get_identifier (XSTR (orgfun
, 0)),
3818 build_function_type (tfom
, NULL_TREE
),
3819 original_args_size
.constant
, args_size
.constant
,
3821 FUNCTION_ARG (args_so_far
, VOIDmode
, void_type_node
, 1),
3823 old_inhibit_defer_pop
+ 1, call_fusage
, flags
, & args_so_far
);
3825 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
3826 that it should complain if nonvolatile values are live. For
3827 functions that cannot return, inform flow that control does not
3830 if (flags
& ECF_NORETURN
)
3832 /* The barrier note must be emitted
3833 immediately after the CALL_INSN. Some ports emit more than
3834 just a CALL_INSN above, so we must search for it here. */
3836 rtx last
= get_last_insn ();
3837 while (!CALL_P (last
))
3839 last
= PREV_INSN (last
);
3840 /* There was no CALL_INSN? */
3841 gcc_assert (last
!= before_call
);
3844 emit_barrier_after (last
);
3847 /* Now restore inhibit_defer_pop to its actual original value. */
3852 /* Copy the value to the right place. */
3853 if (outmode
!= VOIDmode
&& retval
)
3859 if (value
!= mem_value
)
3860 emit_move_insn (value
, mem_value
);
3862 else if (GET_CODE (valreg
) == PARALLEL
)
3865 value
= gen_reg_rtx (outmode
);
3866 emit_group_store (value
, valreg
, NULL_TREE
, GET_MODE_SIZE (outmode
));
3870 /* Convert to the proper mode if a promotion has been active. */
3871 if (GET_MODE (valreg
) != outmode
)
3873 int unsignedp
= TYPE_UNSIGNED (tfom
);
3875 gcc_assert (promote_function_mode (tfom
, outmode
, &unsignedp
,
3876 fndecl
? TREE_TYPE (fndecl
) : fntype
, 1)
3877 == GET_MODE (valreg
));
3878 valreg
= convert_modes (outmode
, GET_MODE (valreg
), valreg
, 0);
3882 emit_move_insn (value
, valreg
);
3888 if (ACCUMULATE_OUTGOING_ARGS
)
3890 #ifdef REG_PARM_STACK_SPACE
3892 restore_fixed_argument_area (save_area
, argblock
,
3893 high_to_save
, low_to_save
);
3896 /* If we saved any argument areas, restore them. */
3897 for (count
= 0; count
< nargs
; count
++)
3898 if (argvec
[count
].save_area
)
3900 enum machine_mode save_mode
= GET_MODE (argvec
[count
].save_area
);
3901 rtx adr
= plus_constant (argblock
,
3902 argvec
[count
].locate
.offset
.constant
);
3903 rtx stack_area
= gen_rtx_MEM (save_mode
,
3904 memory_address (save_mode
, adr
));
3906 if (save_mode
== BLKmode
)
3907 emit_block_move (stack_area
,
3908 validize_mem (argvec
[count
].save_area
),
3909 GEN_INT (argvec
[count
].locate
.size
.constant
),
3910 BLOCK_OP_CALL_PARM
);
3912 emit_move_insn (stack_area
, argvec
[count
].save_area
);
3915 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
3916 stack_usage_map
= initial_stack_usage_map
;
3919 if (stack_usage_map_buf
)
3920 free (stack_usage_map_buf
);
3926 /* Output a library call to function FUN (a SYMBOL_REF rtx)
3927 (emitting the queue unless NO_QUEUE is nonzero),
3928 for a value of mode OUTMODE,
3929 with NARGS different arguments, passed as alternating rtx values
3930 and machine_modes to convert them to.
3932 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
3933 `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for
3934 other types of library calls. */
3937 emit_library_call (rtx orgfun
, enum libcall_type fn_type
,
3938 enum machine_mode outmode
, int nargs
, ...)
3942 va_start (p
, nargs
);
3943 emit_library_call_value_1 (0, orgfun
, NULL_RTX
, fn_type
, outmode
, nargs
, p
);
3947 /* Like emit_library_call except that an extra argument, VALUE,
3948 comes second and says where to store the result.
3949 (If VALUE is zero, this function chooses a convenient way
3950 to return the value.
3952 This function returns an rtx for where the value is to be found.
3953 If VALUE is nonzero, VALUE is returned. */
3956 emit_library_call_value (rtx orgfun
, rtx value
,
3957 enum libcall_type fn_type
,
3958 enum machine_mode outmode
, int nargs
, ...)
3963 va_start (p
, nargs
);
3964 result
= emit_library_call_value_1 (1, orgfun
, value
, fn_type
, outmode
,
3971 /* Store a single argument for a function call
3972 into the register or memory area where it must be passed.
3973 *ARG describes the argument value and where to pass it.
3975 ARGBLOCK is the address of the stack-block for all the arguments,
3976 or 0 on a machine where arguments are pushed individually.
3978 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3979 so must be careful about how the stack is used.
3981 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3982 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3983 that we need not worry about saving and restoring the stack.
3985 FNDECL is the declaration of the function we are calling.
3987 Return nonzero if this arg should cause sibcall failure,
3991 store_one_arg (struct arg_data
*arg
, rtx argblock
, int flags
,
3992 int variable_size ATTRIBUTE_UNUSED
, int reg_parm_stack_space
)
3994 tree pval
= arg
->tree_value
;
3998 int i
, lower_bound
= 0, upper_bound
= 0;
3999 int sibcall_failure
= 0;
4001 if (TREE_CODE (pval
) == ERROR_MARK
)
4004 /* Push a new temporary level for any temporaries we make for
4008 if (ACCUMULATE_OUTGOING_ARGS
&& !(flags
& ECF_SIBCALL
))
4010 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4011 save any previous data at that location. */
4012 if (argblock
&& ! variable_size
&& arg
->stack
)
4014 #ifdef ARGS_GROW_DOWNWARD
4015 /* stack_slot is negative, but we want to index stack_usage_map
4016 with positive values. */
4017 if (GET_CODE (XEXP (arg
->stack_slot
, 0)) == PLUS
)
4018 upper_bound
= -INTVAL (XEXP (XEXP (arg
->stack_slot
, 0), 1)) + 1;
4022 lower_bound
= upper_bound
- arg
->locate
.size
.constant
;
4024 if (GET_CODE (XEXP (arg
->stack_slot
, 0)) == PLUS
)
4025 lower_bound
= INTVAL (XEXP (XEXP (arg
->stack_slot
, 0), 1));
4029 upper_bound
= lower_bound
+ arg
->locate
.size
.constant
;
4033 /* Don't worry about things in the fixed argument area;
4034 it has already been saved. */
4035 if (i
< reg_parm_stack_space
)
4036 i
= reg_parm_stack_space
;
4037 while (i
< upper_bound
&& stack_usage_map
[i
] == 0)
4040 if (i
< upper_bound
)
4042 /* We need to make a save area. */
4043 unsigned int size
= arg
->locate
.size
.constant
* BITS_PER_UNIT
;
4044 enum machine_mode save_mode
= mode_for_size (size
, MODE_INT
, 1);
4045 rtx adr
= memory_address (save_mode
, XEXP (arg
->stack_slot
, 0));
4046 rtx stack_area
= gen_rtx_MEM (save_mode
, adr
);
4048 if (save_mode
== BLKmode
)
4050 tree ot
= TREE_TYPE (arg
->tree_value
);
4051 tree nt
= build_qualified_type (ot
, (TYPE_QUALS (ot
)
4052 | TYPE_QUAL_CONST
));
4054 arg
->save_area
= assign_temp (nt
, 0, 1, 1);
4055 preserve_temp_slots (arg
->save_area
);
4056 emit_block_move (validize_mem (arg
->save_area
), stack_area
,
4057 GEN_INT (arg
->locate
.size
.constant
),
4058 BLOCK_OP_CALL_PARM
);
4062 arg
->save_area
= gen_reg_rtx (save_mode
);
4063 emit_move_insn (arg
->save_area
, stack_area
);
4069 /* If this isn't going to be placed on both the stack and in registers,
4070 set up the register and number of words. */
4071 if (! arg
->pass_on_stack
)
4073 if (flags
& ECF_SIBCALL
)
4074 reg
= arg
->tail_call_reg
;
4077 partial
= arg
->partial
;
4080 /* Being passed entirely in a register. We shouldn't be called in
4082 gcc_assert (reg
== 0 || partial
!= 0);
4084 /* If this arg needs special alignment, don't load the registers
4086 if (arg
->n_aligned_regs
!= 0)
4089 /* If this is being passed partially in a register, we can't evaluate
4090 it directly into its stack slot. Otherwise, we can. */
4091 if (arg
->value
== 0)
4093 /* stack_arg_under_construction is nonzero if a function argument is
4094 being evaluated directly into the outgoing argument list and
4095 expand_call must take special action to preserve the argument list
4096 if it is called recursively.
4098 For scalar function arguments stack_usage_map is sufficient to
4099 determine which stack slots must be saved and restored. Scalar
4100 arguments in general have pass_on_stack == 0.
4102 If this argument is initialized by a function which takes the
4103 address of the argument (a C++ constructor or a C function
4104 returning a BLKmode structure), then stack_usage_map is
4105 insufficient and expand_call must push the stack around the
4106 function call. Such arguments have pass_on_stack == 1.
4108 Note that it is always safe to set stack_arg_under_construction,
4109 but this generates suboptimal code if set when not needed. */
4111 if (arg
->pass_on_stack
)
4112 stack_arg_under_construction
++;
4114 arg
->value
= expand_expr (pval
,
4116 || TYPE_MODE (TREE_TYPE (pval
)) != arg
->mode
)
4117 ? NULL_RTX
: arg
->stack
,
4118 VOIDmode
, EXPAND_STACK_PARM
);
4120 /* If we are promoting object (or for any other reason) the mode
4121 doesn't agree, convert the mode. */
4123 if (arg
->mode
!= TYPE_MODE (TREE_TYPE (pval
)))
4124 arg
->value
= convert_modes (arg
->mode
, TYPE_MODE (TREE_TYPE (pval
)),
4125 arg
->value
, arg
->unsignedp
);
4127 if (arg
->pass_on_stack
)
4128 stack_arg_under_construction
--;
4131 /* Check for overlap with already clobbered argument area. */
4132 if ((flags
& ECF_SIBCALL
)
4133 && MEM_P (arg
->value
)
4134 && mem_overlaps_already_clobbered_arg_p (XEXP (arg
->value
, 0),
4135 arg
->locate
.size
.constant
))
4136 sibcall_failure
= 1;
4138 /* Don't allow anything left on stack from computation
4139 of argument to alloca. */
4140 if (flags
& ECF_MAY_BE_ALLOCA
)
4141 do_pending_stack_adjust ();
4143 if (arg
->value
== arg
->stack
)
4144 /* If the value is already in the stack slot, we are done. */
4146 else if (arg
->mode
!= BLKmode
)
4149 unsigned int parm_align
;
4151 /* Argument is a scalar, not entirely passed in registers.
4152 (If part is passed in registers, arg->partial says how much
4153 and emit_push_insn will take care of putting it there.)
4155 Push it, and if its size is less than the
4156 amount of space allocated to it,
4157 also bump stack pointer by the additional space.
4158 Note that in C the default argument promotions
4159 will prevent such mismatches. */
4161 size
= GET_MODE_SIZE (arg
->mode
);
4162 /* Compute how much space the push instruction will push.
4163 On many machines, pushing a byte will advance the stack
4164 pointer by a halfword. */
4165 #ifdef PUSH_ROUNDING
4166 size
= PUSH_ROUNDING (size
);
4170 /* Compute how much space the argument should get:
4171 round up to a multiple of the alignment for arguments. */
4172 if (none
!= FUNCTION_ARG_PADDING (arg
->mode
, TREE_TYPE (pval
)))
4173 used
= (((size
+ PARM_BOUNDARY
/ BITS_PER_UNIT
- 1)
4174 / (PARM_BOUNDARY
/ BITS_PER_UNIT
))
4175 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
4177 /* Compute the alignment of the pushed argument. */
4178 parm_align
= arg
->locate
.boundary
;
4179 if (FUNCTION_ARG_PADDING (arg
->mode
, TREE_TYPE (pval
)) == downward
)
4181 int pad
= used
- size
;
4184 unsigned int pad_align
= (pad
& -pad
) * BITS_PER_UNIT
;
4185 parm_align
= MIN (parm_align
, pad_align
);
4189 /* This isn't already where we want it on the stack, so put it there.
4190 This can either be done with push or copy insns. */
4191 emit_push_insn (arg
->value
, arg
->mode
, TREE_TYPE (pval
), NULL_RTX
,
4192 parm_align
, partial
, reg
, used
- size
, argblock
,
4193 ARGS_SIZE_RTX (arg
->locate
.offset
), reg_parm_stack_space
,
4194 ARGS_SIZE_RTX (arg
->locate
.alignment_pad
));
4196 /* Unless this is a partially-in-register argument, the argument is now
4199 arg
->value
= arg
->stack
;
4203 /* BLKmode, at least partly to be pushed. */
4205 unsigned int parm_align
;
4209 /* Pushing a nonscalar.
4210 If part is passed in registers, PARTIAL says how much
4211 and emit_push_insn will take care of putting it there. */
4213 /* Round its size up to a multiple
4214 of the allocation unit for arguments. */
4216 if (arg
->locate
.size
.var
!= 0)
4219 size_rtx
= ARGS_SIZE_RTX (arg
->locate
.size
);
4223 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4224 for BLKmode is careful to avoid it. */
4225 excess
= (arg
->locate
.size
.constant
4226 - int_size_in_bytes (TREE_TYPE (pval
))
4228 size_rtx
= expand_expr (size_in_bytes (TREE_TYPE (pval
)),
4229 NULL_RTX
, TYPE_MODE (sizetype
),
4233 parm_align
= arg
->locate
.boundary
;
4235 /* When an argument is padded down, the block is aligned to
4236 PARM_BOUNDARY, but the actual argument isn't. */
4237 if (FUNCTION_ARG_PADDING (arg
->mode
, TREE_TYPE (pval
)) == downward
)
4239 if (arg
->locate
.size
.var
)
4240 parm_align
= BITS_PER_UNIT
;
4243 unsigned int excess_align
= (excess
& -excess
) * BITS_PER_UNIT
;
4244 parm_align
= MIN (parm_align
, excess_align
);
4248 if ((flags
& ECF_SIBCALL
) && MEM_P (arg
->value
))
4250 /* emit_push_insn might not work properly if arg->value and
4251 argblock + arg->locate.offset areas overlap. */
4255 if (XEXP (x
, 0) == crtl
->args
.internal_arg_pointer
4256 || (GET_CODE (XEXP (x
, 0)) == PLUS
4257 && XEXP (XEXP (x
, 0), 0) ==
4258 crtl
->args
.internal_arg_pointer
4259 && CONST_INT_P (XEXP (XEXP (x
, 0), 1))))
4261 if (XEXP (x
, 0) != crtl
->args
.internal_arg_pointer
)
4262 i
= INTVAL (XEXP (XEXP (x
, 0), 1));
4264 /* expand_call should ensure this. */
4265 gcc_assert (!arg
->locate
.offset
.var
4266 && arg
->locate
.size
.var
== 0
4267 && CONST_INT_P (size_rtx
));
4269 if (arg
->locate
.offset
.constant
> i
)
4271 if (arg
->locate
.offset
.constant
< i
+ INTVAL (size_rtx
))
4272 sibcall_failure
= 1;
4274 else if (arg
->locate
.offset
.constant
< i
)
4276 /* Use arg->locate.size.constant instead of size_rtx
4277 because we only care about the part of the argument
4279 if (i
< (arg
->locate
.offset
.constant
4280 + arg
->locate
.size
.constant
))
4281 sibcall_failure
= 1;
4285 /* Even though they appear to be at the same location,
4286 if part of the outgoing argument is in registers,
4287 they aren't really at the same location. Check for
4288 this by making sure that the incoming size is the
4289 same as the outgoing size. */
4290 if (arg
->locate
.size
.constant
!= INTVAL (size_rtx
))
4291 sibcall_failure
= 1;
4296 emit_push_insn (arg
->value
, arg
->mode
, TREE_TYPE (pval
), size_rtx
,
4297 parm_align
, partial
, reg
, excess
, argblock
,
4298 ARGS_SIZE_RTX (arg
->locate
.offset
), reg_parm_stack_space
,
4299 ARGS_SIZE_RTX (arg
->locate
.alignment_pad
));
4301 /* Unless this is a partially-in-register argument, the argument is now
4304 ??? Unlike the case above, in which we want the actual
4305 address of the data, so that we can load it directly into a
4306 register, here we want the address of the stack slot, so that
4307 it's properly aligned for word-by-word copying or something
4308 like that. It's not clear that this is always correct. */
4310 arg
->value
= arg
->stack_slot
;
4313 if (arg
->reg
&& GET_CODE (arg
->reg
) == PARALLEL
)
4315 tree type
= TREE_TYPE (arg
->tree_value
);
4317 = emit_group_load_into_temps (arg
->reg
, arg
->value
, type
,
4318 int_size_in_bytes (type
));
4321 /* Mark all slots this store used. */
4322 if (ACCUMULATE_OUTGOING_ARGS
&& !(flags
& ECF_SIBCALL
)
4323 && argblock
&& ! variable_size
&& arg
->stack
)
4324 for (i
= lower_bound
; i
< upper_bound
; i
++)
4325 stack_usage_map
[i
] = 1;
4327 /* Once we have pushed something, pops can't safely
4328 be deferred during the rest of the arguments. */
4331 /* Free any temporary slots made in processing this argument. Show
4332 that we might have taken the address of something and pushed that
4334 preserve_temp_slots (NULL_RTX
);
4338 return sibcall_failure
;
4341 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4344 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED
,
4350 /* If the type has variable size... */
4351 if (TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
4354 /* If the type is marked as addressable (it is required
4355 to be constructed into the stack)... */
4356 if (TREE_ADDRESSABLE (type
))
4362 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4363 takes trailing padding of a structure into account. */
4364 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4367 must_pass_in_stack_var_size_or_pad (enum machine_mode mode
, const_tree type
)
4372 /* If the type has variable size... */
4373 if (TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
4376 /* If the type is marked as addressable (it is required
4377 to be constructed into the stack)... */
4378 if (TREE_ADDRESSABLE (type
))
4381 /* If the padding and mode of the type is such that a copy into
4382 a register would put it into the wrong part of the register. */
4384 && int_size_in_bytes (type
) % (PARM_BOUNDARY
/ BITS_PER_UNIT
)
4385 && (FUNCTION_ARG_PADDING (mode
, type
)
4386 == (BYTES_BIG_ENDIAN
? upward
: downward
)))