PR middle-end/77674
[official-gcc.git] / gcc / calls.c
blobb7bbec53eded0a7f3970e25c6ef64bb3e8b466c6
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "predict.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "stringpool.h"
32 #include "expmed.h"
33 #include "optabs.h"
34 #include "emit-rtl.h"
35 #include "cgraph.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "varasm.h"
40 #include "internal-fn.h"
41 #include "dojump.h"
42 #include "explow.h"
43 #include "calls.h"
44 #include "expr.h"
45 #include "output.h"
46 #include "langhooks.h"
47 #include "except.h"
48 #include "dbgcnt.h"
49 #include "rtl-iter.h"
50 #include "tree-chkp.h"
51 #include "tree-vrp.h"
52 #include "tree-ssanames.h"
53 #include "rtl-chkp.h"
54 #include "intl.h"
56 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
57 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
59 /* Data structure and subroutines used within expand_call. */
61 struct arg_data
63 /* Tree node for this argument. */
64 tree tree_value;
65 /* Mode for value; TYPE_MODE unless promoted. */
66 machine_mode mode;
67 /* Current RTL value for argument, or 0 if it isn't precomputed. */
68 rtx value;
69 /* Initially-compute RTL value for argument; only for const functions. */
70 rtx initial_value;
71 /* Register to pass this argument in, 0 if passed on stack, or an
72 PARALLEL if the arg is to be copied into multiple non-contiguous
73 registers. */
74 rtx reg;
75 /* Register to pass this argument in when generating tail call sequence.
76 This is not the same register as for normal calls on machines with
77 register windows. */
78 rtx tail_call_reg;
79 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
80 form for emit_group_move. */
81 rtx parallel_value;
82 /* If value is passed in neither reg nor stack, this field holds a number
83 of a special slot to be used. */
84 rtx special_slot;
85 /* For pointer bounds hold an index of parm bounds are bound to. -1 if
86 there is no such pointer. */
87 int pointer_arg;
88 /* If pointer_arg refers a structure, then pointer_offset holds an offset
89 of a pointer in this structure. */
90 int pointer_offset;
91 /* If REG was promoted from the actual mode of the argument expression,
92 indicates whether the promotion is sign- or zero-extended. */
93 int unsignedp;
94 /* Number of bytes to put in registers. 0 means put the whole arg
95 in registers. Also 0 if not passed in registers. */
96 int partial;
97 /* Nonzero if argument must be passed on stack.
98 Note that some arguments may be passed on the stack
99 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
100 pass_on_stack identifies arguments that *cannot* go in registers. */
101 int pass_on_stack;
102 /* Some fields packaged up for locate_and_pad_parm. */
103 struct locate_and_pad_arg_data locate;
104 /* Location on the stack at which parameter should be stored. The store
105 has already been done if STACK == VALUE. */
106 rtx stack;
107 /* Location on the stack of the start of this argument slot. This can
108 differ from STACK if this arg pads downward. This location is known
109 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
110 rtx stack_slot;
111 /* Place that this stack area has been saved, if needed. */
112 rtx save_area;
113 /* If an argument's alignment does not permit direct copying into registers,
114 copy in smaller-sized pieces into pseudos. These are stored in a
115 block pointed to by this field. The next field says how many
116 word-sized pseudos we made. */
117 rtx *aligned_regs;
118 int n_aligned_regs;
121 /* A vector of one char per byte of stack space. A byte if nonzero if
122 the corresponding stack location has been used.
123 This vector is used to prevent a function call within an argument from
124 clobbering any stack already set up. */
125 static char *stack_usage_map;
127 /* Size of STACK_USAGE_MAP. */
128 static int highest_outgoing_arg_in_use;
130 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
131 stack location's tail call argument has been already stored into the stack.
132 This bitmap is used to prevent sibling call optimization if function tries
133 to use parent's incoming argument slots when they have been already
134 overwritten with tail call arguments. */
135 static sbitmap stored_args_map;
137 /* stack_arg_under_construction is nonzero when an argument may be
138 initialized with a constructor call (including a C function that
139 returns a BLKmode struct) and expand_call must take special action
140 to make sure the object being constructed does not overlap the
141 argument list for the constructor call. */
142 static int stack_arg_under_construction;
144 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
145 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
146 cumulative_args_t);
147 static void precompute_register_parameters (int, struct arg_data *, int *);
148 static void store_bounds (struct arg_data *, struct arg_data *);
149 static int store_one_arg (struct arg_data *, rtx, int, int, int);
150 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
151 static int finalize_must_preallocate (int, int, struct arg_data *,
152 struct args_size *);
153 static void precompute_arguments (int, struct arg_data *);
154 static int compute_argument_block_size (int, struct args_size *, tree, tree, int);
155 static void initialize_argument_information (int, struct arg_data *,
156 struct args_size *, int,
157 tree, tree,
158 tree, tree, cumulative_args_t, int,
159 rtx *, int *, int *, int *,
160 bool *, bool);
161 static void compute_argument_addresses (struct arg_data *, rtx, int);
162 static rtx rtx_for_function_call (tree, tree);
163 static void load_register_parameters (struct arg_data *, int, rtx *, int,
164 int, int *);
165 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
166 machine_mode, int, va_list);
167 static int special_function_p (const_tree, int);
168 static int check_sibcall_argument_overlap_1 (rtx);
169 static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
171 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
172 unsigned int);
173 static tree split_complex_types (tree);
175 #ifdef REG_PARM_STACK_SPACE
176 static rtx save_fixed_argument_area (int, rtx, int *, int *);
177 static void restore_fixed_argument_area (rtx, rtx, int, int);
178 #endif
180 /* Force FUNEXP into a form suitable for the address of a CALL,
181 and return that as an rtx. Also load the static chain register
182 if FNDECL is a nested function.
184 CALL_FUSAGE points to a variable holding the prospective
185 CALL_INSN_FUNCTION_USAGE information. */
188 prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
189 rtx *call_fusage, int reg_parm_seen, int flags)
191 /* Make a valid memory address and copy constants through pseudo-regs,
192 but not for a constant address if -fno-function-cse. */
193 if (GET_CODE (funexp) != SYMBOL_REF)
195 /* If it's an indirect call by descriptor, generate code to perform
196 runtime identification of the pointer and load the descriptor. */
197 if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
199 const int bit_val = targetm.calls.custom_function_descriptors;
200 rtx call_lab = gen_label_rtx ();
202 gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
203 fndecl_or_type
204 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
205 fndecl_or_type);
206 DECL_STATIC_CHAIN (fndecl_or_type) = 1;
207 rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
209 /* Avoid long live ranges around function calls. */
210 funexp = copy_to_mode_reg (Pmode, funexp);
212 if (REG_P (chain))
213 emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
215 /* Emit the runtime identification pattern. */
216 rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
217 emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
218 call_lab);
220 /* Statically predict the branch to very likely taken. */
221 rtx_insn *insn = get_last_insn ();
222 if (JUMP_P (insn))
223 predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
225 /* Load the descriptor. */
226 rtx mem = gen_rtx_MEM (ptr_mode,
227 plus_constant (Pmode, funexp, - bit_val));
228 MEM_NOTRAP_P (mem) = 1;
229 mem = convert_memory_address (Pmode, mem);
230 emit_move_insn (chain, mem);
232 mem = gen_rtx_MEM (ptr_mode,
233 plus_constant (Pmode, funexp,
234 POINTER_SIZE / BITS_PER_UNIT
235 - bit_val));
236 MEM_NOTRAP_P (mem) = 1;
237 mem = convert_memory_address (Pmode, mem);
238 emit_move_insn (funexp, mem);
240 emit_label (call_lab);
242 if (REG_P (chain))
244 use_reg (call_fusage, chain);
245 STATIC_CHAIN_REG_P (chain) = 1;
248 /* Make sure we're not going to be overwritten below. */
249 gcc_assert (!static_chain_value);
252 /* If we are using registers for parameters, force the
253 function address into a register now. */
254 funexp = ((reg_parm_seen
255 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
256 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
257 : memory_address (FUNCTION_MODE, funexp));
259 else
261 /* funexp could be a SYMBOL_REF represents a function pointer which is
262 of ptr_mode. In this case, it should be converted into address mode
263 to be a valid address for memory rtx pattern. See PR 64971. */
264 if (GET_MODE (funexp) != Pmode)
265 funexp = convert_memory_address (Pmode, funexp);
267 if (!(flags & ECF_SIBCALL))
269 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
270 funexp = force_reg (Pmode, funexp);
274 if (static_chain_value != 0
275 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
276 || DECL_STATIC_CHAIN (fndecl_or_type)))
278 rtx chain;
280 chain = targetm.calls.static_chain (fndecl_or_type, false);
281 static_chain_value = convert_memory_address (Pmode, static_chain_value);
283 emit_move_insn (chain, static_chain_value);
284 if (REG_P (chain))
286 use_reg (call_fusage, chain);
287 STATIC_CHAIN_REG_P (chain) = 1;
291 return funexp;
294 /* Generate instructions to call function FUNEXP,
295 and optionally pop the results.
296 The CALL_INSN is the first insn generated.
298 FNDECL is the declaration node of the function. This is given to the
299 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
300 its own args.
302 FUNTYPE is the data type of the function. This is given to the hook
303 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
304 own args. We used to allow an identifier for library functions, but
305 that doesn't work when the return type is an aggregate type and the
306 calling convention says that the pointer to this aggregate is to be
307 popped by the callee.
309 STACK_SIZE is the number of bytes of arguments on the stack,
310 ROUNDED_STACK_SIZE is that number rounded up to
311 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
312 both to put into the call insn and to generate explicit popping
313 code if necessary.
315 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
316 It is zero if this call doesn't want a structure value.
318 NEXT_ARG_REG is the rtx that results from executing
319 targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
320 just after all the args have had their registers assigned.
321 This could be whatever you like, but normally it is the first
322 arg-register beyond those used for args in this call,
323 or 0 if all the arg-registers are used in this call.
324 It is passed on to `gen_call' so you can put this info in the call insn.
326 VALREG is a hard register in which a value is returned,
327 or 0 if the call does not return a value.
329 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
330 the args to this call were processed.
331 We restore `inhibit_defer_pop' to that value.
333 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
334 denote registers used by the called function. */
336 static void
337 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
338 tree funtype ATTRIBUTE_UNUSED,
339 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
340 HOST_WIDE_INT rounded_stack_size,
341 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
342 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
343 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
344 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
346 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
347 rtx call, funmem, pat;
348 int already_popped = 0;
349 HOST_WIDE_INT n_popped = 0;
351 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
352 patterns exist). Any popping that the callee does on return will
353 be from our caller's frame rather than ours. */
354 if (!(ecf_flags & ECF_SIBCALL))
356 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
358 #ifdef CALL_POPS_ARGS
359 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
360 #endif
363 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
364 and we don't want to load it into a register as an optimization,
365 because prepare_call_address already did it if it should be done. */
366 if (GET_CODE (funexp) != SYMBOL_REF)
367 funexp = memory_address (FUNCTION_MODE, funexp);
369 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
370 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
372 tree t = fndecl;
374 /* Although a built-in FUNCTION_DECL and its non-__builtin
375 counterpart compare equal and get a shared mem_attrs, they
376 produce different dump output in compare-debug compilations,
377 if an entry gets garbage collected in one compilation, then
378 adds a different (but equivalent) entry, while the other
379 doesn't run the garbage collector at the same spot and then
380 shares the mem_attr with the equivalent entry. */
381 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
383 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
384 if (t2)
385 t = t2;
388 set_mem_expr (funmem, t);
390 else if (fntree)
391 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
393 if (ecf_flags & ECF_SIBCALL)
395 if (valreg)
396 pat = targetm.gen_sibcall_value (valreg, funmem,
397 rounded_stack_size_rtx,
398 next_arg_reg, NULL_RTX);
399 else
400 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
401 next_arg_reg, GEN_INT (struct_value_size));
403 /* If the target has "call" or "call_value" insns, then prefer them
404 if no arguments are actually popped. If the target does not have
405 "call" or "call_value" insns, then we must use the popping versions
406 even if the call has no arguments to pop. */
407 else if (n_popped > 0
408 || !(valreg
409 ? targetm.have_call_value ()
410 : targetm.have_call ()))
412 rtx n_pop = GEN_INT (n_popped);
414 /* If this subroutine pops its own args, record that in the call insn
415 if possible, for the sake of frame pointer elimination. */
417 if (valreg)
418 pat = targetm.gen_call_value_pop (valreg, funmem,
419 rounded_stack_size_rtx,
420 next_arg_reg, n_pop);
421 else
422 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
423 next_arg_reg, n_pop);
425 already_popped = 1;
427 else
429 if (valreg)
430 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
431 next_arg_reg, NULL_RTX);
432 else
433 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
434 GEN_INT (struct_value_size));
436 emit_insn (pat);
438 /* Find the call we just emitted. */
439 rtx_call_insn *call_insn = last_call_insn ();
441 /* Some target create a fresh MEM instead of reusing the one provided
442 above. Set its MEM_EXPR. */
443 call = get_call_rtx_from (call_insn);
444 if (call
445 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
446 && MEM_EXPR (funmem) != NULL_TREE)
447 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
449 /* Mark instrumented calls. */
450 if (call && fntree)
451 CALL_EXPR_WITH_BOUNDS_P (call) = CALL_WITH_BOUNDS_P (fntree);
453 /* Put the register usage information there. */
454 add_function_usage_to (call_insn, call_fusage);
456 /* If this is a const call, then set the insn's unchanging bit. */
457 if (ecf_flags & ECF_CONST)
458 RTL_CONST_CALL_P (call_insn) = 1;
460 /* If this is a pure call, then set the insn's unchanging bit. */
461 if (ecf_flags & ECF_PURE)
462 RTL_PURE_CALL_P (call_insn) = 1;
464 /* If this is a const call, then set the insn's unchanging bit. */
465 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
466 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
468 /* Create a nothrow REG_EH_REGION note, if needed. */
469 make_reg_eh_region_note (call_insn, ecf_flags, 0);
471 if (ecf_flags & ECF_NORETURN)
472 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
474 if (ecf_flags & ECF_RETURNS_TWICE)
476 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
477 cfun->calls_setjmp = 1;
480 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
482 /* Restore this now, so that we do defer pops for this call's args
483 if the context of the call as a whole permits. */
484 inhibit_defer_pop = old_inhibit_defer_pop;
486 if (n_popped > 0)
488 if (!already_popped)
489 CALL_INSN_FUNCTION_USAGE (call_insn)
490 = gen_rtx_EXPR_LIST (VOIDmode,
491 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
492 CALL_INSN_FUNCTION_USAGE (call_insn));
493 rounded_stack_size -= n_popped;
494 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
495 stack_pointer_delta -= n_popped;
497 add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
499 /* If popup is needed, stack realign must use DRAP */
500 if (SUPPORTS_STACK_ALIGNMENT)
501 crtl->need_drap = true;
503 /* For noreturn calls when not accumulating outgoing args force
504 REG_ARGS_SIZE note to prevent crossjumping of calls with different
505 args sizes. */
506 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
507 add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
509 if (!ACCUMULATE_OUTGOING_ARGS)
511 /* If returning from the subroutine does not automatically pop the args,
512 we need an instruction to pop them sooner or later.
513 Perhaps do it now; perhaps just record how much space to pop later.
515 If returning from the subroutine does pop the args, indicate that the
516 stack pointer will be changed. */
518 if (rounded_stack_size != 0)
520 if (ecf_flags & ECF_NORETURN)
521 /* Just pretend we did the pop. */
522 stack_pointer_delta -= rounded_stack_size;
523 else if (flag_defer_pop && inhibit_defer_pop == 0
524 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
525 pending_stack_adjust += rounded_stack_size;
526 else
527 adjust_stack (rounded_stack_size_rtx);
530 /* When we accumulate outgoing args, we must avoid any stack manipulations.
531 Restore the stack pointer to its original value now. Usually
532 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
533 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
534 popping variants of functions exist as well.
536 ??? We may optimize similar to defer_pop above, but it is
537 probably not worthwhile.
539 ??? It will be worthwhile to enable combine_stack_adjustments even for
540 such machines. */
541 else if (n_popped)
542 anti_adjust_stack (GEN_INT (n_popped));
545 /* Determine if the function identified by FNDECL is one with
546 special properties we wish to know about. Modify FLAGS accordingly.
548 For example, if the function might return more than one time (setjmp), then
549 set ECF_RETURNS_TWICE.
551 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
552 space from the stack such as alloca. */
554 static int
555 special_function_p (const_tree fndecl, int flags)
557 tree name_decl = DECL_NAME (fndecl);
559 /* For instrumentation clones we want to derive flags
560 from the original name. */
561 if (cgraph_node::get (fndecl)
562 && cgraph_node::get (fndecl)->instrumentation_clone)
563 name_decl = DECL_NAME (cgraph_node::get (fndecl)->orig_decl);
565 if (fndecl && name_decl
566 && IDENTIFIER_LENGTH (name_decl) <= 11
567 /* Exclude functions not at the file scope, or not `extern',
568 since they are not the magic functions we would otherwise
569 think they are.
570 FIXME: this should be handled with attributes, not with this
571 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
572 because you can declare fork() inside a function if you
573 wish. */
574 && (DECL_CONTEXT (fndecl) == NULL_TREE
575 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
576 && TREE_PUBLIC (fndecl))
578 const char *name = IDENTIFIER_POINTER (name_decl);
579 const char *tname = name;
581 /* We assume that alloca will always be called by name. It
582 makes no sense to pass it as a pointer-to-function to
583 anything that does not understand its behavior. */
584 if (IDENTIFIER_LENGTH (name_decl) == 6
585 && name[0] == 'a'
586 && ! strcmp (name, "alloca"))
587 flags |= ECF_MAY_BE_ALLOCA;
589 /* Disregard prefix _ or __. */
590 if (name[0] == '_')
592 if (name[1] == '_')
593 tname += 2;
594 else
595 tname += 1;
598 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
599 if (! strcmp (tname, "setjmp")
600 || ! strcmp (tname, "sigsetjmp")
601 || ! strcmp (name, "savectx")
602 || ! strcmp (name, "vfork")
603 || ! strcmp (name, "getcontext"))
604 flags |= ECF_RETURNS_TWICE;
607 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
608 switch (DECL_FUNCTION_CODE (fndecl))
610 case BUILT_IN_ALLOCA:
611 case BUILT_IN_ALLOCA_WITH_ALIGN:
612 flags |= ECF_MAY_BE_ALLOCA;
613 break;
614 default:
615 break;
618 return flags;
621 /* Similar to special_function_p; return a set of ERF_ flags for the
622 function FNDECL. */
623 static int
624 decl_return_flags (tree fndecl)
626 tree attr;
627 tree type = TREE_TYPE (fndecl);
628 if (!type)
629 return 0;
631 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
632 if (!attr)
633 return 0;
635 attr = TREE_VALUE (TREE_VALUE (attr));
636 if (!attr || TREE_STRING_LENGTH (attr) < 1)
637 return 0;
639 switch (TREE_STRING_POINTER (attr)[0])
641 case '1':
642 case '2':
643 case '3':
644 case '4':
645 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
647 case 'm':
648 return ERF_NOALIAS;
650 case '.':
651 default:
652 return 0;
656 /* Return nonzero when FNDECL represents a call to setjmp. */
659 setjmp_call_p (const_tree fndecl)
661 if (DECL_IS_RETURNS_TWICE (fndecl))
662 return ECF_RETURNS_TWICE;
663 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
667 /* Return true if STMT may be an alloca call. */
669 bool
670 gimple_maybe_alloca_call_p (const gimple *stmt)
672 tree fndecl;
674 if (!is_gimple_call (stmt))
675 return false;
677 fndecl = gimple_call_fndecl (stmt);
678 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
679 return true;
681 return false;
684 /* Return true if STMT is a builtin alloca call. */
686 bool
687 gimple_alloca_call_p (const gimple *stmt)
689 tree fndecl;
691 if (!is_gimple_call (stmt))
692 return false;
694 fndecl = gimple_call_fndecl (stmt);
695 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
696 switch (DECL_FUNCTION_CODE (fndecl))
698 case BUILT_IN_ALLOCA:
699 case BUILT_IN_ALLOCA_WITH_ALIGN:
700 return true;
701 default:
702 break;
705 return false;
708 /* Return true when exp contains a builtin alloca call. */
710 bool
711 alloca_call_p (const_tree exp)
713 tree fndecl;
714 if (TREE_CODE (exp) == CALL_EXPR
715 && (fndecl = get_callee_fndecl (exp))
716 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
717 switch (DECL_FUNCTION_CODE (fndecl))
719 case BUILT_IN_ALLOCA:
720 case BUILT_IN_ALLOCA_WITH_ALIGN:
721 return true;
722 default:
723 break;
726 return false;
729 /* Return TRUE if FNDECL is either a TM builtin or a TM cloned
730 function. Return FALSE otherwise. */
732 static bool
733 is_tm_builtin (const_tree fndecl)
735 if (fndecl == NULL)
736 return false;
738 if (decl_is_tm_clone (fndecl))
739 return true;
741 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
743 switch (DECL_FUNCTION_CODE (fndecl))
745 case BUILT_IN_TM_COMMIT:
746 case BUILT_IN_TM_COMMIT_EH:
747 case BUILT_IN_TM_ABORT:
748 case BUILT_IN_TM_IRREVOCABLE:
749 case BUILT_IN_TM_GETTMCLONE_IRR:
750 case BUILT_IN_TM_MEMCPY:
751 case BUILT_IN_TM_MEMMOVE:
752 case BUILT_IN_TM_MEMSET:
753 CASE_BUILT_IN_TM_STORE (1):
754 CASE_BUILT_IN_TM_STORE (2):
755 CASE_BUILT_IN_TM_STORE (4):
756 CASE_BUILT_IN_TM_STORE (8):
757 CASE_BUILT_IN_TM_STORE (FLOAT):
758 CASE_BUILT_IN_TM_STORE (DOUBLE):
759 CASE_BUILT_IN_TM_STORE (LDOUBLE):
760 CASE_BUILT_IN_TM_STORE (M64):
761 CASE_BUILT_IN_TM_STORE (M128):
762 CASE_BUILT_IN_TM_STORE (M256):
763 CASE_BUILT_IN_TM_LOAD (1):
764 CASE_BUILT_IN_TM_LOAD (2):
765 CASE_BUILT_IN_TM_LOAD (4):
766 CASE_BUILT_IN_TM_LOAD (8):
767 CASE_BUILT_IN_TM_LOAD (FLOAT):
768 CASE_BUILT_IN_TM_LOAD (DOUBLE):
769 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
770 CASE_BUILT_IN_TM_LOAD (M64):
771 CASE_BUILT_IN_TM_LOAD (M128):
772 CASE_BUILT_IN_TM_LOAD (M256):
773 case BUILT_IN_TM_LOG:
774 case BUILT_IN_TM_LOG_1:
775 case BUILT_IN_TM_LOG_2:
776 case BUILT_IN_TM_LOG_4:
777 case BUILT_IN_TM_LOG_8:
778 case BUILT_IN_TM_LOG_FLOAT:
779 case BUILT_IN_TM_LOG_DOUBLE:
780 case BUILT_IN_TM_LOG_LDOUBLE:
781 case BUILT_IN_TM_LOG_M64:
782 case BUILT_IN_TM_LOG_M128:
783 case BUILT_IN_TM_LOG_M256:
784 return true;
785 default:
786 break;
789 return false;
792 /* Detect flags (function attributes) from the function decl or type node. */
795 flags_from_decl_or_type (const_tree exp)
797 int flags = 0;
799 if (DECL_P (exp))
801 /* The function exp may have the `malloc' attribute. */
802 if (DECL_IS_MALLOC (exp))
803 flags |= ECF_MALLOC;
805 /* The function exp may have the `returns_twice' attribute. */
806 if (DECL_IS_RETURNS_TWICE (exp))
807 flags |= ECF_RETURNS_TWICE;
809 /* Process the pure and const attributes. */
810 if (TREE_READONLY (exp))
811 flags |= ECF_CONST;
812 if (DECL_PURE_P (exp))
813 flags |= ECF_PURE;
814 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
815 flags |= ECF_LOOPING_CONST_OR_PURE;
817 if (DECL_IS_NOVOPS (exp))
818 flags |= ECF_NOVOPS;
819 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
820 flags |= ECF_LEAF;
822 if (TREE_NOTHROW (exp))
823 flags |= ECF_NOTHROW;
825 if (flag_tm)
827 if (is_tm_builtin (exp))
828 flags |= ECF_TM_BUILTIN;
829 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
830 || lookup_attribute ("transaction_pure",
831 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
832 flags |= ECF_TM_PURE;
835 flags = special_function_p (exp, flags);
837 else if (TYPE_P (exp))
839 if (TYPE_READONLY (exp))
840 flags |= ECF_CONST;
842 if (flag_tm
843 && ((flags & ECF_CONST) != 0
844 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
845 flags |= ECF_TM_PURE;
847 else
848 gcc_unreachable ();
850 if (TREE_THIS_VOLATILE (exp))
852 flags |= ECF_NORETURN;
853 if (flags & (ECF_CONST|ECF_PURE))
854 flags |= ECF_LOOPING_CONST_OR_PURE;
857 return flags;
860 /* Detect flags from a CALL_EXPR. */
863 call_expr_flags (const_tree t)
865 int flags;
866 tree decl = get_callee_fndecl (t);
868 if (decl)
869 flags = flags_from_decl_or_type (decl);
870 else if (CALL_EXPR_FN (t) == NULL_TREE)
871 flags = internal_fn_flags (CALL_EXPR_IFN (t));
872 else
874 tree type = TREE_TYPE (CALL_EXPR_FN (t));
875 if (type && TREE_CODE (type) == POINTER_TYPE)
876 flags = flags_from_decl_or_type (TREE_TYPE (type));
877 else
878 flags = 0;
879 if (CALL_EXPR_BY_DESCRIPTOR (t))
880 flags |= ECF_BY_DESCRIPTOR;
883 return flags;
886 /* Return true if TYPE should be passed by invisible reference. */
888 bool
889 pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
890 tree type, bool named_arg)
892 if (type)
894 /* If this type contains non-trivial constructors, then it is
895 forbidden for the middle-end to create any new copies. */
896 if (TREE_ADDRESSABLE (type))
897 return true;
899 /* GCC post 3.4 passes *all* variable sized types by reference. */
900 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
901 return true;
903 /* If a record type should be passed the same as its first (and only)
904 member, use the type and mode of that member. */
905 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
907 type = TREE_TYPE (first_field (type));
908 mode = TYPE_MODE (type);
912 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
913 type, named_arg);
916 /* Return true if TYPE, which is passed by reference, should be callee
917 copied instead of caller copied. */
919 bool
920 reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
921 tree type, bool named_arg)
923 if (type && TREE_ADDRESSABLE (type))
924 return false;
925 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
926 named_arg);
930 /* Precompute all register parameters as described by ARGS, storing values
931 into fields within the ARGS array.
933 NUM_ACTUALS indicates the total number elements in the ARGS array.
935 Set REG_PARM_SEEN if we encounter a register parameter. */
937 static void
938 precompute_register_parameters (int num_actuals, struct arg_data *args,
939 int *reg_parm_seen)
941 int i;
943 *reg_parm_seen = 0;
945 for (i = 0; i < num_actuals; i++)
946 if (args[i].reg != 0 && ! args[i].pass_on_stack)
948 *reg_parm_seen = 1;
950 if (args[i].value == 0)
952 push_temp_slots ();
953 args[i].value = expand_normal (args[i].tree_value);
954 preserve_temp_slots (args[i].value);
955 pop_temp_slots ();
958 /* If we are to promote the function arg to a wider mode,
959 do it now. */
961 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
962 args[i].value
963 = convert_modes (args[i].mode,
964 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
965 args[i].value, args[i].unsignedp);
967 /* If the value is a non-legitimate constant, force it into a
968 pseudo now. TLS symbols sometimes need a call to resolve. */
969 if (CONSTANT_P (args[i].value)
970 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
971 args[i].value = force_reg (args[i].mode, args[i].value);
973 /* If we're going to have to load the value by parts, pull the
974 parts into pseudos. The part extraction process can involve
975 non-trivial computation. */
976 if (GET_CODE (args[i].reg) == PARALLEL)
978 tree type = TREE_TYPE (args[i].tree_value);
979 args[i].parallel_value
980 = emit_group_load_into_temps (args[i].reg, args[i].value,
981 type, int_size_in_bytes (type));
984 /* If the value is expensive, and we are inside an appropriately
985 short loop, put the value into a pseudo and then put the pseudo
986 into the hard reg.
988 For small register classes, also do this if this call uses
989 register parameters. This is to avoid reload conflicts while
990 loading the parameters registers. */
992 else if ((! (REG_P (args[i].value)
993 || (GET_CODE (args[i].value) == SUBREG
994 && REG_P (SUBREG_REG (args[i].value)))))
995 && args[i].mode != BLKmode
996 && (set_src_cost (args[i].value, args[i].mode,
997 optimize_insn_for_speed_p ())
998 > COSTS_N_INSNS (1))
999 && ((*reg_parm_seen
1000 && targetm.small_register_classes_for_mode_p (args[i].mode))
1001 || optimize))
1002 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1006 #ifdef REG_PARM_STACK_SPACE
1008 /* The argument list is the property of the called routine and it
1009 may clobber it. If the fixed area has been used for previous
1010 parameters, we must save and restore it. */
1012 static rtx
1013 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
1015 int low;
1016 int high;
1018 /* Compute the boundary of the area that needs to be saved, if any. */
1019 high = reg_parm_stack_space;
1020 if (ARGS_GROW_DOWNWARD)
1021 high += 1;
1023 if (high > highest_outgoing_arg_in_use)
1024 high = highest_outgoing_arg_in_use;
1026 for (low = 0; low < high; low++)
1027 if (stack_usage_map[low] != 0)
1029 int num_to_save;
1030 machine_mode save_mode;
1031 int delta;
1032 rtx addr;
1033 rtx stack_area;
1034 rtx save_area;
1036 while (stack_usage_map[--high] == 0)
1039 *low_to_save = low;
1040 *high_to_save = high;
1042 num_to_save = high - low + 1;
1043 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
1045 /* If we don't have the required alignment, must do this
1046 in BLKmode. */
1047 if ((low & (MIN (GET_MODE_SIZE (save_mode),
1048 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
1049 save_mode = BLKmode;
1051 if (ARGS_GROW_DOWNWARD)
1052 delta = -high;
1053 else
1054 delta = low;
1056 addr = plus_constant (Pmode, argblock, delta);
1057 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1059 set_mem_align (stack_area, PARM_BOUNDARY);
1060 if (save_mode == BLKmode)
1062 save_area = assign_stack_temp (BLKmode, num_to_save);
1063 emit_block_move (validize_mem (save_area), stack_area,
1064 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1066 else
1068 save_area = gen_reg_rtx (save_mode);
1069 emit_move_insn (save_area, stack_area);
1072 return save_area;
1075 return NULL_RTX;
1078 static void
1079 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
1081 machine_mode save_mode = GET_MODE (save_area);
1082 int delta;
1083 rtx addr, stack_area;
1085 if (ARGS_GROW_DOWNWARD)
1086 delta = -high_to_save;
1087 else
1088 delta = low_to_save;
1090 addr = plus_constant (Pmode, argblock, delta);
1091 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1092 set_mem_align (stack_area, PARM_BOUNDARY);
1094 if (save_mode != BLKmode)
1095 emit_move_insn (stack_area, save_area);
1096 else
1097 emit_block_move (stack_area, validize_mem (save_area),
1098 GEN_INT (high_to_save - low_to_save + 1),
1099 BLOCK_OP_CALL_PARM);
1101 #endif /* REG_PARM_STACK_SPACE */
1103 /* If any elements in ARGS refer to parameters that are to be passed in
1104 registers, but not in memory, and whose alignment does not permit a
1105 direct copy into registers. Copy the values into a group of pseudos
1106 which we will later copy into the appropriate hard registers.
1108 Pseudos for each unaligned argument will be stored into the array
1109 args[argnum].aligned_regs. The caller is responsible for deallocating
1110 the aligned_regs array if it is nonzero. */
1112 static void
1113 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
1115 int i, j;
1117 for (i = 0; i < num_actuals; i++)
1118 if (args[i].reg != 0 && ! args[i].pass_on_stack
1119 && GET_CODE (args[i].reg) != PARALLEL
1120 && args[i].mode == BLKmode
1121 && MEM_P (args[i].value)
1122 && (MEM_ALIGN (args[i].value)
1123 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1125 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1126 int endian_correction = 0;
1128 if (args[i].partial)
1130 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1131 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1133 else
1135 args[i].n_aligned_regs
1136 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1139 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
1141 /* Structures smaller than a word are normally aligned to the
1142 least significant byte. On a BYTES_BIG_ENDIAN machine,
1143 this means we must skip the empty high order bytes when
1144 calculating the bit offset. */
1145 if (bytes < UNITS_PER_WORD
1146 #ifdef BLOCK_REG_PADDING
1147 && (BLOCK_REG_PADDING (args[i].mode,
1148 TREE_TYPE (args[i].tree_value), 1)
1149 == downward)
1150 #else
1151 && BYTES_BIG_ENDIAN
1152 #endif
1154 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
1156 for (j = 0; j < args[i].n_aligned_regs; j++)
1158 rtx reg = gen_reg_rtx (word_mode);
1159 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1160 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1162 args[i].aligned_regs[j] = reg;
1163 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1164 word_mode, word_mode, false);
1166 /* There is no need to restrict this code to loading items
1167 in TYPE_ALIGN sized hunks. The bitfield instructions can
1168 load up entire word sized registers efficiently.
1170 ??? This may not be needed anymore.
1171 We use to emit a clobber here but that doesn't let later
1172 passes optimize the instructions we emit. By storing 0 into
1173 the register later passes know the first AND to zero out the
1174 bitfield being set in the register is unnecessary. The store
1175 of 0 will be deleted as will at least the first AND. */
1177 emit_move_insn (reg, const0_rtx);
1179 bytes -= bitsize / BITS_PER_UNIT;
1180 store_bit_field (reg, bitsize, endian_correction, 0, 0,
1181 word_mode, word, false);
1186 /* The limit set by -Walloc-larger-than=. */
1187 static GTY(()) tree alloc_object_size_limit;
1189 /* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
1190 setting if the option is specified, or to the maximum object size if it
1191 is not. Return the initialized value. */
1193 static tree
1194 alloc_max_size (void)
1196 if (!alloc_object_size_limit)
1198 alloc_object_size_limit = TYPE_MAX_VALUE (ssizetype);
1200 unsigned HOST_WIDE_INT unit = 1;
1202 char *end;
1203 errno = 0;
1204 unsigned HOST_WIDE_INT limit
1205 = warn_alloc_size_limit ? strtoull (warn_alloc_size_limit, &end, 10) : 0;
1207 if (limit && !errno)
1209 if (end && *end)
1211 /* Numeric option arguments are at most INT_MAX. Make it
1212 possible to specify a larger value by accepting common
1213 suffixes. */
1214 if (!strcmp (end, "kB"))
1215 unit = 1000;
1216 else if (!strcasecmp (end, "KiB") || strcmp (end, "KB"))
1217 unit = 1024;
1218 else if (!strcmp (end, "MB"))
1219 unit = 1000LU * 1000;
1220 else if (!strcasecmp (end, "MiB"))
1221 unit = 1024LU * 1024;
1222 else if (!strcasecmp (end, "GB"))
1223 unit = 1000LU * 1000 * 1000;
1224 else if (!strcasecmp (end, "GiB"))
1225 unit = 1024LU * 1024 * 1024;
1226 else if (!strcasecmp (end, "TB"))
1227 unit = 1000LU * 1000 * 1000 * 1000;
1228 else if (!strcasecmp (end, "TiB"))
1229 unit = 1024LU * 1024 * 1024 * 1024;
1230 else if (!strcasecmp (end, "PB"))
1231 unit = 1000LU * 1000 * 1000 * 1000 * 1000;
1232 else if (!strcasecmp (end, "PiB"))
1233 unit = 1024LU * 1024 * 1024 * 1024 * 1024;
1234 else if (!strcasecmp (end, "EB"))
1235 unit = 1000LU * 1000 * 1000 * 1000 * 1000 * 1000;
1236 else if (!strcasecmp (end, "EiB"))
1237 unit = 1024LU * 1024 * 1024 * 1024 * 1024 * 1024;
1238 else
1239 unit = 0;
1242 if (unit)
1243 alloc_object_size_limit = build_int_cst (ssizetype, limit * unit);
1246 return alloc_object_size_limit;
1249 /* Return true if the type of OP is signed, looking through any casts
1250 to an unsigned type. */
1252 static bool
1253 operand_signed_p (tree op)
1255 if (TREE_CODE (op) == SSA_NAME)
1257 gimple *def = SSA_NAME_DEF_STMT (op);
1258 if (is_gimple_assign (def))
1260 /* In an assignment involving a cast, ignore the type
1261 of the cast and consider the type of its operand. */
1262 tree_code code = gimple_assign_rhs_code (def);
1263 if (code == NOP_EXPR)
1264 op = gimple_assign_rhs1 (def);
1266 else if (gimple_code (def) == GIMPLE_PHI)
1268 /* In a phi, a constant argument may be unsigned even
1269 if in the source it's signed and negative. Ignore
1270 those and consider the result of a phi signed if
1271 all its non-constant operands are. */
1272 unsigned nargs = gimple_phi_num_args (def);
1273 for (unsigned i = 0; i != nargs; ++i)
1275 tree op = gimple_phi_arg_def (def, i);
1276 if (TREE_CODE (op) != INTEGER_CST
1277 && !operand_signed_p (op))
1278 return false;
1281 return true;
1285 return !TYPE_UNSIGNED (TREE_TYPE (op));
1288 /* Diagnose a call EXP to function FN decorated with attribute alloc_size
1289 whose argument numbers given by IDX with values given by ARGS exceed
1290 the maximum object size or cause an unsigned oveflow (wrapping) when
1291 multiplied. When ARGS[0] is null the function does nothing. ARGS[1]
1292 may be null for functions like malloc, and non-null for those like
1293 calloc that are decorated with a two-argument attribute alloc_size. */
1295 void
1296 maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
1298 /* The range each of the (up to) two arguments is known to be in. */
1299 tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } };
1301 /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */
1302 tree maxobjsize = alloc_max_size ();
1304 location_t loc = EXPR_LOCATION (exp);
1306 bool warned = false;
1308 /* Validate each argument individually. */
1309 for (unsigned i = 0; i != 2 && args[i]; ++i)
1311 if (TREE_CODE (args[i]) == INTEGER_CST)
1313 argrange[i][0] = args[i];
1314 argrange[i][1] = args[i];
1316 if (tree_int_cst_lt (args[i], integer_zero_node))
1318 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1319 "argument %i value %qE is negative",
1320 idx[i] + 1, args[i]);
1322 else if (integer_zerop (args[i]))
1324 /* Avoid issuing -Walloc-zero for allocation functions other
1325 than __builtin_alloca that are declared with attribute
1326 returns_nonnull because there's no portability risk. This
1327 avoids warning for such calls to libiberty's xmalloc and
1328 friends.
1329 Also avoid issuing the warning for calls to function named
1330 "alloca". */
1331 if ((DECL_FUNCTION_CODE (fn) == BUILT_IN_ALLOCA
1332 && IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6)
1333 || (DECL_FUNCTION_CODE (fn) != BUILT_IN_ALLOCA
1334 && !lookup_attribute ("returns_nonnull",
1335 TYPE_ATTRIBUTES (TREE_TYPE (fn)))))
1336 warned = warning_at (loc, OPT_Walloc_zero,
1337 "argument %i value is zero",
1338 idx[i] + 1);
1340 else if (tree_int_cst_lt (maxobjsize, args[i]))
1342 /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
1343 mode and with -fno-exceptions as a way to indicate array
1344 size overflow. There's no good way to detect C++98 here
1345 so avoid diagnosing these calls for all C++ modes. */
1346 if (i == 0
1347 && !args[1]
1348 && lang_GNU_CXX ()
1349 && DECL_IS_OPERATOR_NEW (fn)
1350 && integer_all_onesp (args[i]))
1351 continue;
1353 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1354 "argument %i value %qE exceeds "
1355 "maximum object size %E",
1356 idx[i] + 1, args[i], maxobjsize);
1359 else if (TREE_CODE (args[i]) == SSA_NAME)
1361 tree type = TREE_TYPE (args[i]);
1363 wide_int min, max;
1364 value_range_type range_type = get_range_info (args[i], &min, &max);
1365 if (range_type == VR_RANGE)
1367 argrange[i][0] = wide_int_to_tree (type, min);
1368 argrange[i][1] = wide_int_to_tree (type, max);
1370 else if (range_type == VR_ANTI_RANGE)
1372 /* For an anti-range, if the type of the formal argument
1373 is unsigned and the bounds of the range are of opposite
1374 signs when interpreted as signed, check to see if the
1375 type of the actual argument is signed. If so, the lower
1376 bound must be taken to be zero (rather than a large
1377 positive value corresonding to the actual lower bound
1378 interpreted as unsigned) and there is nothing else that
1379 can be inferred from it. */
1380 --min;
1381 ++max;
1382 wide_int zero = wi::uhwi (0, TYPE_PRECISION (type));
1383 if (TYPE_UNSIGNED (type)
1384 && wi::lts_p (zero, min) && wi::lts_p (max, zero)
1385 && operand_signed_p (args[i]))
1386 continue;
1388 argrange[i][0] = wide_int_to_tree (type, max);
1389 argrange[i][1] = wide_int_to_tree (type, min);
1391 /* Verify that the anti-range doesn't make all arguments
1392 invalid (treat the anti-range ~[0, 0] as invalid). */
1393 if (tree_int_cst_lt (maxobjsize, argrange[i][0])
1394 && tree_int_cst_le (argrange[i][1], integer_zero_node))
1396 warned
1397 = warning_at (loc, OPT_Walloc_size_larger_than_,
1398 (TYPE_UNSIGNED (type)
1399 ? G_("argument %i range [%E, %E] exceeds "
1400 "maximum object size %E")
1401 : G_("argument %i range [%E, %E] is both "
1402 "negative and exceeds maximum object "
1403 "size %E")),
1404 idx[i] + 1, argrange[i][0],
1405 argrange[i][1], maxobjsize);
1407 continue;
1409 else
1410 continue;
1412 /* Verify that the argument's range is not negative (including
1413 upper bound of zero). */
1414 if (tree_int_cst_lt (argrange[i][0], integer_zero_node)
1415 && tree_int_cst_le (argrange[i][1], integer_zero_node))
1417 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1418 "argument %i range [%E, %E] is negative",
1419 idx[i] + 1, argrange[i][0], argrange[i][1]);
1421 else if (tree_int_cst_lt (maxobjsize, argrange[i][0]))
1423 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1424 "argument %i range [%E, %E] exceeds "
1425 "maximum object size %E",
1426 idx[i] + 1, argrange[i][0], argrange[i][1],
1427 maxobjsize);
1432 if (!argrange[0])
1433 return;
1435 /* For a two-argument alloc_size, validate the product of the two
1436 arguments if both of their values or ranges are known. */
1437 if (!warned && tree_fits_uhwi_p (argrange[0][0])
1438 && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0])
1439 && !integer_onep (argrange[0][0])
1440 && !integer_onep (argrange[1][0]))
1442 /* Check for overflow in the product of a function decorated with
1443 attribute alloc_size (X, Y). */
1444 unsigned szprec = TYPE_PRECISION (size_type_node);
1445 wide_int x = wi::to_wide (argrange[0][0], szprec);
1446 wide_int y = wi::to_wide (argrange[1][0], szprec);
1448 bool vflow;
1449 wide_int prod = wi::umul (x, y, &vflow);
1451 if (vflow)
1452 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1453 "product %<%E * %E%> of arguments %i and %i "
1454 "exceeds %<SIZE_MAX%>",
1455 argrange[0][0], argrange[1][0],
1456 idx[0] + 1, idx[1] + 1);
1457 else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod))
1458 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1459 "product %<%E * %E%> of arguments %i and %i "
1460 "exceeds maximum object size %E",
1461 argrange[0][0], argrange[1][0],
1462 idx[0] + 1, idx[1] + 1,
1463 maxobjsize);
1465 if (warned)
1467 /* Print the full range of each of the two arguments to make
1468 it clear when it is, in fact, in a range and not constant. */
1469 if (argrange[0][0] != argrange [0][1])
1470 inform (loc, "argument %i in the range [%E, %E]",
1471 idx[0] + 1, argrange[0][0], argrange[0][1]);
1472 if (argrange[1][0] != argrange [1][1])
1473 inform (loc, "argument %i in the range [%E, %E]",
1474 idx[1] + 1, argrange[1][0], argrange[1][1]);
1478 if (warned)
1480 location_t fnloc = DECL_SOURCE_LOCATION (fn);
1482 if (DECL_IS_BUILTIN (fn))
1483 inform (loc,
1484 "in a call to built-in allocation function %qD", fn);
1485 else
1486 inform (fnloc,
1487 "in a call to allocation function %qD declared here", fn);
1491 /* Issue an error if CALL_EXPR was flagged as requiring
1492 tall-call optimization. */
1494 static void
1495 maybe_complain_about_tail_call (tree call_expr, const char *reason)
1497 gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1498 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1499 return;
1501 error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1504 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1505 CALL_EXPR EXP.
1507 NUM_ACTUALS is the total number of parameters.
1509 N_NAMED_ARGS is the total number of named arguments.
1511 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1512 value, or null.
1514 FNDECL is the tree code for the target of this call (if known)
1516 ARGS_SO_FAR holds state needed by the target to know where to place
1517 the next argument.
1519 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1520 for arguments which are passed in registers.
1522 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1523 and may be modified by this routine.
1525 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1526 flags which may be modified by this routine.
1528 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1529 that requires allocation of stack space.
1531 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1532 the thunked-to function. */
1534 static void
1535 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1536 struct arg_data *args,
1537 struct args_size *args_size,
1538 int n_named_args ATTRIBUTE_UNUSED,
1539 tree exp, tree struct_value_addr_value,
1540 tree fndecl, tree fntype,
1541 cumulative_args_t args_so_far,
1542 int reg_parm_stack_space,
1543 rtx *old_stack_level, int *old_pending_adj,
1544 int *must_preallocate, int *ecf_flags,
1545 bool *may_tailcall, bool call_from_thunk_p)
1547 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
1548 location_t loc = EXPR_LOCATION (exp);
1550 /* Count arg position in order args appear. */
1551 int argpos;
1553 int i;
1555 args_size->constant = 0;
1556 args_size->var = 0;
1558 bitmap_obstack_initialize (NULL);
1560 /* In this loop, we consider args in the order they are written.
1561 We fill up ARGS from the back. */
1563 i = num_actuals - 1;
1565 int j = i, ptr_arg = -1;
1566 call_expr_arg_iterator iter;
1567 tree arg;
1568 bitmap slots = NULL;
1570 if (struct_value_addr_value)
1572 args[j].tree_value = struct_value_addr_value;
1573 j--;
1575 /* If we pass structure address then we need to
1576 create bounds for it. Since created bounds is
1577 a call statement, we expand it right here to avoid
1578 fixing all other places where it may be expanded. */
1579 if (CALL_WITH_BOUNDS_P (exp))
1581 args[j].value = gen_reg_rtx (targetm.chkp_bound_mode ());
1582 args[j].tree_value
1583 = chkp_make_bounds_for_struct_addr (struct_value_addr_value);
1584 expand_expr_real (args[j].tree_value, args[j].value, VOIDmode,
1585 EXPAND_NORMAL, 0, false);
1586 args[j].pointer_arg = j + 1;
1587 j--;
1590 argpos = 0;
1591 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1593 tree argtype = TREE_TYPE (arg);
1595 /* Remember last param with pointer and associate it
1596 with following pointer bounds. */
1597 if (CALL_WITH_BOUNDS_P (exp)
1598 && chkp_type_has_pointer (argtype))
1600 if (slots)
1601 BITMAP_FREE (slots);
1602 ptr_arg = j;
1603 if (!BOUNDED_TYPE_P (argtype))
1605 slots = BITMAP_ALLOC (NULL);
1606 chkp_find_bound_slots (argtype, slots);
1609 else if (CALL_WITH_BOUNDS_P (exp)
1610 && pass_by_reference (NULL, TYPE_MODE (argtype), argtype,
1611 argpos < n_named_args))
1613 if (slots)
1614 BITMAP_FREE (slots);
1615 ptr_arg = j;
1617 else if (POINTER_BOUNDS_TYPE_P (argtype))
1619 /* We expect bounds in instrumented calls only.
1620 Otherwise it is a sign we lost flag due to some optimization
1621 and may emit call args incorrectly. */
1622 gcc_assert (CALL_WITH_BOUNDS_P (exp));
1624 /* For structures look for the next available pointer. */
1625 if (ptr_arg != -1 && slots)
1627 unsigned bnd_no = bitmap_first_set_bit (slots);
1628 args[j].pointer_offset =
1629 bnd_no * POINTER_SIZE / BITS_PER_UNIT;
1631 bitmap_clear_bit (slots, bnd_no);
1633 /* Check we have no more pointers in the structure. */
1634 if (bitmap_empty_p (slots))
1635 BITMAP_FREE (slots);
1637 args[j].pointer_arg = ptr_arg;
1639 /* Check we covered all pointers in the previous
1640 non bounds arg. */
1641 if (!slots)
1642 ptr_arg = -1;
1644 else
1645 ptr_arg = -1;
1647 if (targetm.calls.split_complex_arg
1648 && argtype
1649 && TREE_CODE (argtype) == COMPLEX_TYPE
1650 && targetm.calls.split_complex_arg (argtype))
1652 tree subtype = TREE_TYPE (argtype);
1653 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
1654 j--;
1655 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1657 else
1658 args[j].tree_value = arg;
1659 j--;
1660 argpos++;
1663 if (slots)
1664 BITMAP_FREE (slots);
1667 bitmap_obstack_release (NULL);
1669 /* Extract attribute alloc_size and if set, store the indices of
1670 the corresponding arguments in ALLOC_IDX, and then the actual
1671 argument(s) at those indices in ALLOC_ARGS. */
1672 int alloc_idx[2] = { -1, -1 };
1673 if (tree alloc_size
1674 = (fndecl ? lookup_attribute ("alloc_size",
1675 TYPE_ATTRIBUTES (TREE_TYPE (fndecl)))
1676 : NULL_TREE))
1678 tree args = TREE_VALUE (alloc_size);
1679 alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
1680 if (TREE_CHAIN (args))
1681 alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
1684 /* Array for up to the two attribute alloc_size arguments. */
1685 tree alloc_args[] = { NULL_TREE, NULL_TREE };
1687 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1688 for (argpos = 0; argpos < num_actuals; i--, argpos++)
1690 tree type = TREE_TYPE (args[i].tree_value);
1691 int unsignedp;
1692 machine_mode mode;
1694 /* Replace erroneous argument with constant zero. */
1695 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1696 args[i].tree_value = integer_zero_node, type = integer_type_node;
1698 /* If TYPE is a transparent union or record, pass things the way
1699 we would pass the first field of the union or record. We have
1700 already verified that the modes are the same. */
1701 if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1702 && TYPE_TRANSPARENT_AGGR (type))
1703 type = TREE_TYPE (first_field (type));
1705 /* Decide where to pass this arg.
1707 args[i].reg is nonzero if all or part is passed in registers.
1709 args[i].partial is nonzero if part but not all is passed in registers,
1710 and the exact value says how many bytes are passed in registers.
1712 args[i].pass_on_stack is nonzero if the argument must at least be
1713 computed on the stack. It may then be loaded back into registers
1714 if args[i].reg is nonzero.
1716 These decisions are driven by the FUNCTION_... macros and must agree
1717 with those made by function.c. */
1719 /* See if this argument should be passed by invisible reference. */
1720 if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
1721 type, argpos < n_named_args))
1723 bool callee_copies;
1724 tree base = NULL_TREE;
1726 callee_copies
1727 = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
1728 type, argpos < n_named_args);
1730 /* If we're compiling a thunk, pass through invisible references
1731 instead of making a copy. */
1732 if (call_from_thunk_p
1733 || (callee_copies
1734 && !TREE_ADDRESSABLE (type)
1735 && (base = get_base_address (args[i].tree_value))
1736 && TREE_CODE (base) != SSA_NAME
1737 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1739 /* We may have turned the parameter value into an SSA name.
1740 Go back to the original parameter so we can take the
1741 address. */
1742 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
1744 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
1745 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
1746 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
1748 /* Argument setup code may have copied the value to register. We
1749 revert that optimization now because the tail call code must
1750 use the original location. */
1751 if (TREE_CODE (args[i].tree_value) == PARM_DECL
1752 && !MEM_P (DECL_RTL (args[i].tree_value))
1753 && DECL_INCOMING_RTL (args[i].tree_value)
1754 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
1755 set_decl_rtl (args[i].tree_value,
1756 DECL_INCOMING_RTL (args[i].tree_value));
1758 mark_addressable (args[i].tree_value);
1760 /* We can't use sibcalls if a callee-copied argument is
1761 stored in the current function's frame. */
1762 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1764 *may_tailcall = false;
1765 maybe_complain_about_tail_call (exp,
1766 "a callee-copied argument is"
1767 " stored in the current "
1768 " function's frame");
1771 args[i].tree_value = build_fold_addr_expr_loc (loc,
1772 args[i].tree_value);
1773 type = TREE_TYPE (args[i].tree_value);
1775 if (*ecf_flags & ECF_CONST)
1776 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
1778 else
1780 /* We make a copy of the object and pass the address to the
1781 function being called. */
1782 rtx copy;
1784 if (!COMPLETE_TYPE_P (type)
1785 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1786 || (flag_stack_check == GENERIC_STACK_CHECK
1787 && compare_tree_int (TYPE_SIZE_UNIT (type),
1788 STACK_CHECK_MAX_VAR_SIZE) > 0))
1790 /* This is a variable-sized object. Make space on the stack
1791 for it. */
1792 rtx size_rtx = expr_size (args[i].tree_value);
1794 if (*old_stack_level == 0)
1796 emit_stack_save (SAVE_BLOCK, old_stack_level);
1797 *old_pending_adj = pending_stack_adjust;
1798 pending_stack_adjust = 0;
1801 /* We can pass TRUE as the 4th argument because we just
1802 saved the stack pointer and will restore it right after
1803 the call. */
1804 copy = allocate_dynamic_stack_space (size_rtx,
1805 TYPE_ALIGN (type),
1806 TYPE_ALIGN (type),
1807 true);
1808 copy = gen_rtx_MEM (BLKmode, copy);
1809 set_mem_attributes (copy, type, 1);
1811 else
1812 copy = assign_temp (type, 1, 0);
1814 store_expr (args[i].tree_value, copy, 0, false, false);
1816 /* Just change the const function to pure and then let
1817 the next test clear the pure based on
1818 callee_copies. */
1819 if (*ecf_flags & ECF_CONST)
1821 *ecf_flags &= ~ECF_CONST;
1822 *ecf_flags |= ECF_PURE;
1825 if (!callee_copies && *ecf_flags & ECF_PURE)
1826 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
1828 args[i].tree_value
1829 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
1830 type = TREE_TYPE (args[i].tree_value);
1831 *may_tailcall = false;
1832 maybe_complain_about_tail_call (exp,
1833 "argument must be passed"
1834 " by copying");
1838 unsignedp = TYPE_UNSIGNED (type);
1839 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
1840 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
1842 args[i].unsignedp = unsignedp;
1843 args[i].mode = mode;
1845 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
1846 argpos < n_named_args);
1848 if (args[i].reg && CONST_INT_P (args[i].reg))
1850 args[i].special_slot = args[i].reg;
1851 args[i].reg = NULL;
1854 /* If this is a sibling call and the machine has register windows, the
1855 register window has to be unwinded before calling the routine, so
1856 arguments have to go into the incoming registers. */
1857 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
1858 args[i].tail_call_reg
1859 = targetm.calls.function_incoming_arg (args_so_far, mode, type,
1860 argpos < n_named_args);
1861 else
1862 args[i].tail_call_reg = args[i].reg;
1864 if (args[i].reg)
1865 args[i].partial
1866 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1867 argpos < n_named_args);
1869 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1871 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1872 it means that we are to pass this arg in the register(s) designated
1873 by the PARALLEL, but also to pass it in the stack. */
1874 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1875 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1876 args[i].pass_on_stack = 1;
1878 /* If this is an addressable type, we must preallocate the stack
1879 since we must evaluate the object into its final location.
1881 If this is to be passed in both registers and the stack, it is simpler
1882 to preallocate. */
1883 if (TREE_ADDRESSABLE (type)
1884 || (args[i].pass_on_stack && args[i].reg != 0))
1885 *must_preallocate = 1;
1887 /* No stack allocation and padding for bounds. */
1888 if (POINTER_BOUNDS_P (args[i].tree_value))
1890 /* Compute the stack-size of this argument. */
1891 else if (args[i].reg == 0 || args[i].partial != 0
1892 || reg_parm_stack_space > 0
1893 || args[i].pass_on_stack)
1894 locate_and_pad_parm (mode, type,
1895 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1897 #else
1898 args[i].reg != 0,
1899 #endif
1900 reg_parm_stack_space,
1901 args[i].pass_on_stack ? 0 : args[i].partial,
1902 fndecl, args_size, &args[i].locate);
1903 #ifdef BLOCK_REG_PADDING
1904 else
1905 /* The argument is passed entirely in registers. See at which
1906 end it should be padded. */
1907 args[i].locate.where_pad =
1908 BLOCK_REG_PADDING (mode, type,
1909 int_size_in_bytes (type) <= UNITS_PER_WORD);
1910 #endif
1912 /* Update ARGS_SIZE, the total stack space for args so far. */
1914 args_size->constant += args[i].locate.size.constant;
1915 if (args[i].locate.size.var)
1916 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1918 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1919 have been used, etc. */
1921 targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
1922 type, argpos < n_named_args);
1924 /* Store argument values for functions decorated with attribute
1925 alloc_size. */
1926 if (argpos == alloc_idx[0])
1927 alloc_args[0] = args[i].tree_value;
1928 else if (argpos == alloc_idx[1])
1929 alloc_args[1] = args[i].tree_value;
1932 if (alloc_args[0])
1934 /* Check the arguments of functions decorated with attribute
1935 alloc_size. */
1936 maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
1940 /* Update ARGS_SIZE to contain the total size for the argument block.
1941 Return the original constant component of the argument block's size.
1943 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1944 for arguments passed in registers. */
1946 static int
1947 compute_argument_block_size (int reg_parm_stack_space,
1948 struct args_size *args_size,
1949 tree fndecl ATTRIBUTE_UNUSED,
1950 tree fntype ATTRIBUTE_UNUSED,
1951 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1953 int unadjusted_args_size = args_size->constant;
1955 /* For accumulate outgoing args mode we don't need to align, since the frame
1956 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1957 backends from generating misaligned frame sizes. */
1958 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1959 preferred_stack_boundary = STACK_BOUNDARY;
1961 /* Compute the actual size of the argument block required. The variable
1962 and constant sizes must be combined, the size may have to be rounded,
1963 and there may be a minimum required size. */
1965 if (args_size->var)
1967 args_size->var = ARGS_SIZE_TREE (*args_size);
1968 args_size->constant = 0;
1970 preferred_stack_boundary /= BITS_PER_UNIT;
1971 if (preferred_stack_boundary > 1)
1973 /* We don't handle this case yet. To handle it correctly we have
1974 to add the delta, round and subtract the delta.
1975 Currently no machine description requires this support. */
1976 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1977 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1980 if (reg_parm_stack_space > 0)
1982 args_size->var
1983 = size_binop (MAX_EXPR, args_size->var,
1984 ssize_int (reg_parm_stack_space));
1986 /* The area corresponding to register parameters is not to count in
1987 the size of the block we need. So make the adjustment. */
1988 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1989 args_size->var
1990 = size_binop (MINUS_EXPR, args_size->var,
1991 ssize_int (reg_parm_stack_space));
1994 else
1996 preferred_stack_boundary /= BITS_PER_UNIT;
1997 if (preferred_stack_boundary < 1)
1998 preferred_stack_boundary = 1;
1999 args_size->constant = (((args_size->constant
2000 + stack_pointer_delta
2001 + preferred_stack_boundary - 1)
2002 / preferred_stack_boundary
2003 * preferred_stack_boundary)
2004 - stack_pointer_delta);
2006 args_size->constant = MAX (args_size->constant,
2007 reg_parm_stack_space);
2009 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2010 args_size->constant -= reg_parm_stack_space;
2012 return unadjusted_args_size;
2015 /* Precompute parameters as needed for a function call.
2017 FLAGS is mask of ECF_* constants.
2019 NUM_ACTUALS is the number of arguments.
2021 ARGS is an array containing information for each argument; this
2022 routine fills in the INITIAL_VALUE and VALUE fields for each
2023 precomputed argument. */
2025 static void
2026 precompute_arguments (int num_actuals, struct arg_data *args)
2028 int i;
2030 /* If this is a libcall, then precompute all arguments so that we do not
2031 get extraneous instructions emitted as part of the libcall sequence. */
2033 /* If we preallocated the stack space, and some arguments must be passed
2034 on the stack, then we must precompute any parameter which contains a
2035 function call which will store arguments on the stack.
2036 Otherwise, evaluating the parameter may clobber previous parameters
2037 which have already been stored into the stack. (we have code to avoid
2038 such case by saving the outgoing stack arguments, but it results in
2039 worse code) */
2040 if (!ACCUMULATE_OUTGOING_ARGS)
2041 return;
2043 for (i = 0; i < num_actuals; i++)
2045 tree type;
2046 machine_mode mode;
2048 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
2049 continue;
2051 /* If this is an addressable type, we cannot pre-evaluate it. */
2052 type = TREE_TYPE (args[i].tree_value);
2053 gcc_assert (!TREE_ADDRESSABLE (type));
2055 args[i].initial_value = args[i].value
2056 = expand_normal (args[i].tree_value);
2058 mode = TYPE_MODE (type);
2059 if (mode != args[i].mode)
2061 int unsignedp = args[i].unsignedp;
2062 args[i].value
2063 = convert_modes (args[i].mode, mode,
2064 args[i].value, args[i].unsignedp);
2066 /* CSE will replace this only if it contains args[i].value
2067 pseudo, so convert it down to the declared mode using
2068 a SUBREG. */
2069 if (REG_P (args[i].value)
2070 && GET_MODE_CLASS (args[i].mode) == MODE_INT
2071 && promote_mode (type, mode, &unsignedp) != args[i].mode)
2073 args[i].initial_value
2074 = gen_lowpart_SUBREG (mode, args[i].value);
2075 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
2076 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
2082 /* Given the current state of MUST_PREALLOCATE and information about
2083 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
2084 compute and return the final value for MUST_PREALLOCATE. */
2086 static int
2087 finalize_must_preallocate (int must_preallocate, int num_actuals,
2088 struct arg_data *args, struct args_size *args_size)
2090 /* See if we have or want to preallocate stack space.
2092 If we would have to push a partially-in-regs parm
2093 before other stack parms, preallocate stack space instead.
2095 If the size of some parm is not a multiple of the required stack
2096 alignment, we must preallocate.
2098 If the total size of arguments that would otherwise create a copy in
2099 a temporary (such as a CALL) is more than half the total argument list
2100 size, preallocation is faster.
2102 Another reason to preallocate is if we have a machine (like the m88k)
2103 where stack alignment is required to be maintained between every
2104 pair of insns, not just when the call is made. However, we assume here
2105 that such machines either do not have push insns (and hence preallocation
2106 would occur anyway) or the problem is taken care of with
2107 PUSH_ROUNDING. */
2109 if (! must_preallocate)
2111 int partial_seen = 0;
2112 int copy_to_evaluate_size = 0;
2113 int i;
2115 for (i = 0; i < num_actuals && ! must_preallocate; i++)
2117 if (args[i].partial > 0 && ! args[i].pass_on_stack)
2118 partial_seen = 1;
2119 else if (partial_seen && args[i].reg == 0)
2120 must_preallocate = 1;
2121 /* We preallocate in case there are bounds passed
2122 in the bounds table to have precomputed address
2123 for bounds association. */
2124 else if (POINTER_BOUNDS_P (args[i].tree_value)
2125 && !args[i].reg)
2126 must_preallocate = 1;
2128 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
2129 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
2130 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
2131 || TREE_CODE (args[i].tree_value) == COND_EXPR
2132 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
2133 copy_to_evaluate_size
2134 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2137 if (copy_to_evaluate_size * 2 >= args_size->constant
2138 && args_size->constant > 0)
2139 must_preallocate = 1;
2141 return must_preallocate;
2144 /* If we preallocated stack space, compute the address of each argument
2145 and store it into the ARGS array.
2147 We need not ensure it is a valid memory address here; it will be
2148 validized when it is used.
2150 ARGBLOCK is an rtx for the address of the outgoing arguments. */
2152 static void
2153 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
2155 if (argblock)
2157 rtx arg_reg = argblock;
2158 int i, arg_offset = 0;
2160 if (GET_CODE (argblock) == PLUS)
2161 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
2163 for (i = 0; i < num_actuals; i++)
2165 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
2166 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
2167 rtx addr;
2168 unsigned int align, boundary;
2169 unsigned int units_on_stack = 0;
2170 machine_mode partial_mode = VOIDmode;
2172 /* Skip this parm if it will not be passed on the stack. */
2173 if (! args[i].pass_on_stack
2174 && args[i].reg != 0
2175 && args[i].partial == 0)
2176 continue;
2178 /* Pointer Bounds are never passed on the stack. */
2179 if (POINTER_BOUNDS_P (args[i].tree_value))
2180 continue;
2182 if (CONST_INT_P (offset))
2183 addr = plus_constant (Pmode, arg_reg, INTVAL (offset));
2184 else
2185 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
2187 addr = plus_constant (Pmode, addr, arg_offset);
2189 if (args[i].partial != 0)
2191 /* Only part of the parameter is being passed on the stack.
2192 Generate a simple memory reference of the correct size. */
2193 units_on_stack = args[i].locate.size.constant;
2194 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
2195 MODE_INT, 1);
2196 args[i].stack = gen_rtx_MEM (partial_mode, addr);
2197 set_mem_size (args[i].stack, units_on_stack);
2199 else
2201 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
2202 set_mem_attributes (args[i].stack,
2203 TREE_TYPE (args[i].tree_value), 1);
2205 align = BITS_PER_UNIT;
2206 boundary = args[i].locate.boundary;
2207 if (args[i].locate.where_pad != downward)
2208 align = boundary;
2209 else if (CONST_INT_P (offset))
2211 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
2212 align = least_bit_hwi (align);
2214 set_mem_align (args[i].stack, align);
2216 if (CONST_INT_P (slot_offset))
2217 addr = plus_constant (Pmode, arg_reg, INTVAL (slot_offset));
2218 else
2219 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
2221 addr = plus_constant (Pmode, addr, arg_offset);
2223 if (args[i].partial != 0)
2225 /* Only part of the parameter is being passed on the stack.
2226 Generate a simple memory reference of the correct size.
2228 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
2229 set_mem_size (args[i].stack_slot, units_on_stack);
2231 else
2233 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
2234 set_mem_attributes (args[i].stack_slot,
2235 TREE_TYPE (args[i].tree_value), 1);
2237 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
2239 /* Function incoming arguments may overlap with sibling call
2240 outgoing arguments and we cannot allow reordering of reads
2241 from function arguments with stores to outgoing arguments
2242 of sibling calls. */
2243 set_mem_alias_set (args[i].stack, 0);
2244 set_mem_alias_set (args[i].stack_slot, 0);
2249 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
2250 in a call instruction.
2252 FNDECL is the tree node for the target function. For an indirect call
2253 FNDECL will be NULL_TREE.
2255 ADDR is the operand 0 of CALL_EXPR for this call. */
2257 static rtx
2258 rtx_for_function_call (tree fndecl, tree addr)
2260 rtx funexp;
2262 /* Get the function to call, in the form of RTL. */
2263 if (fndecl)
2265 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
2266 TREE_USED (fndecl) = 1;
2268 /* Get a SYMBOL_REF rtx for the function address. */
2269 funexp = XEXP (DECL_RTL (fndecl), 0);
2271 else
2272 /* Generate an rtx (probably a pseudo-register) for the address. */
2274 push_temp_slots ();
2275 funexp = expand_normal (addr);
2276 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
2278 return funexp;
2281 /* Internal state for internal_arg_pointer_based_exp and its helpers. */
2282 static struct
2284 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
2285 or NULL_RTX if none has been scanned yet. */
2286 rtx_insn *scan_start;
2287 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
2288 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
2289 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
2290 with fixed offset, or PC if this is with variable or unknown offset. */
2291 vec<rtx> cache;
2292 } internal_arg_pointer_exp_state;
2294 static rtx internal_arg_pointer_based_exp (const_rtx, bool);
2296 /* Helper function for internal_arg_pointer_based_exp. Scan insns in
2297 the tail call sequence, starting with first insn that hasn't been
2298 scanned yet, and note for each pseudo on the LHS whether it is based
2299 on crtl->args.internal_arg_pointer or not, and what offset from that
2300 that pointer it has. */
2302 static void
2303 internal_arg_pointer_based_exp_scan (void)
2305 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
2307 if (scan_start == NULL_RTX)
2308 insn = get_insns ();
2309 else
2310 insn = NEXT_INSN (scan_start);
2312 while (insn)
2314 rtx set = single_set (insn);
2315 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
2317 rtx val = NULL_RTX;
2318 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
2319 /* Punt on pseudos set multiple times. */
2320 if (idx < internal_arg_pointer_exp_state.cache.length ()
2321 && (internal_arg_pointer_exp_state.cache[idx]
2322 != NULL_RTX))
2323 val = pc_rtx;
2324 else
2325 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
2326 if (val != NULL_RTX)
2328 if (idx >= internal_arg_pointer_exp_state.cache.length ())
2329 internal_arg_pointer_exp_state.cache
2330 .safe_grow_cleared (idx + 1);
2331 internal_arg_pointer_exp_state.cache[idx] = val;
2334 if (NEXT_INSN (insn) == NULL_RTX)
2335 scan_start = insn;
2336 insn = NEXT_INSN (insn);
2339 internal_arg_pointer_exp_state.scan_start = scan_start;
2342 /* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
2343 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
2344 it with fixed offset, or PC if this is with variable or unknown offset.
2345 TOPLEVEL is true if the function is invoked at the topmost level. */
2347 static rtx
2348 internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
2350 if (CONSTANT_P (rtl))
2351 return NULL_RTX;
2353 if (rtl == crtl->args.internal_arg_pointer)
2354 return const0_rtx;
2356 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
2357 return NULL_RTX;
2359 if (GET_CODE (rtl) == PLUS && CONST_INT_P (XEXP (rtl, 1)))
2361 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
2362 if (val == NULL_RTX || val == pc_rtx)
2363 return val;
2364 return plus_constant (Pmode, val, INTVAL (XEXP (rtl, 1)));
2367 /* When called at the topmost level, scan pseudo assignments in between the
2368 last scanned instruction in the tail call sequence and the latest insn
2369 in that sequence. */
2370 if (toplevel)
2371 internal_arg_pointer_based_exp_scan ();
2373 if (REG_P (rtl))
2375 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
2376 if (idx < internal_arg_pointer_exp_state.cache.length ())
2377 return internal_arg_pointer_exp_state.cache[idx];
2379 return NULL_RTX;
2382 subrtx_iterator::array_type array;
2383 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
2385 const_rtx x = *iter;
2386 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
2387 return pc_rtx;
2388 if (MEM_P (x))
2389 iter.skip_subrtxes ();
2392 return NULL_RTX;
2395 /* Return true if and only if SIZE storage units (usually bytes)
2396 starting from address ADDR overlap with already clobbered argument
2397 area. This function is used to determine if we should give up a
2398 sibcall. */
2400 static bool
2401 mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
2403 HOST_WIDE_INT i;
2404 rtx val;
2406 if (bitmap_empty_p (stored_args_map))
2407 return false;
2408 val = internal_arg_pointer_based_exp (addr, true);
2409 if (val == NULL_RTX)
2410 return false;
2411 else if (val == pc_rtx)
2412 return true;
2413 else
2414 i = INTVAL (val);
2416 if (STACK_GROWS_DOWNWARD)
2417 i -= crtl->args.pretend_args_size;
2418 else
2419 i += crtl->args.pretend_args_size;
2422 if (ARGS_GROW_DOWNWARD)
2423 i = -i - size;
2425 if (size > 0)
2427 unsigned HOST_WIDE_INT k;
2429 for (k = 0; k < size; k++)
2430 if (i + k < SBITMAP_SIZE (stored_args_map)
2431 && bitmap_bit_p (stored_args_map, i + k))
2432 return true;
2435 return false;
2438 /* Do the register loads required for any wholly-register parms or any
2439 parms which are passed both on the stack and in a register. Their
2440 expressions were already evaluated.
2442 Mark all register-parms as living through the call, putting these USE
2443 insns in the CALL_INSN_FUNCTION_USAGE field.
2445 When IS_SIBCALL, perform the check_sibcall_argument_overlap
2446 checking, setting *SIBCALL_FAILURE if appropriate. */
2448 static void
2449 load_register_parameters (struct arg_data *args, int num_actuals,
2450 rtx *call_fusage, int flags, int is_sibcall,
2451 int *sibcall_failure)
2453 int i, j;
2455 for (i = 0; i < num_actuals; i++)
2457 rtx reg = ((flags & ECF_SIBCALL)
2458 ? args[i].tail_call_reg : args[i].reg);
2459 if (reg)
2461 int partial = args[i].partial;
2462 int nregs;
2463 int size = 0;
2464 rtx_insn *before_arg = get_last_insn ();
2465 /* Set non-negative if we must move a word at a time, even if
2466 just one word (e.g, partial == 4 && mode == DFmode). Set
2467 to -1 if we just use a normal move insn. This value can be
2468 zero if the argument is a zero size structure. */
2469 nregs = -1;
2470 if (GET_CODE (reg) == PARALLEL)
2472 else if (partial)
2474 gcc_assert (partial % UNITS_PER_WORD == 0);
2475 nregs = partial / UNITS_PER_WORD;
2477 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
2479 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2480 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2482 else
2483 size = GET_MODE_SIZE (args[i].mode);
2485 /* Handle calls that pass values in multiple non-contiguous
2486 locations. The Irix 6 ABI has examples of this. */
2488 if (GET_CODE (reg) == PARALLEL)
2489 emit_group_move (reg, args[i].parallel_value);
2491 /* If simple case, just do move. If normal partial, store_one_arg
2492 has already loaded the register for us. In all other cases,
2493 load the register(s) from memory. */
2495 else if (nregs == -1)
2497 emit_move_insn (reg, args[i].value);
2498 #ifdef BLOCK_REG_PADDING
2499 /* Handle case where we have a value that needs shifting
2500 up to the msb. eg. a QImode value and we're padding
2501 upward on a BYTES_BIG_ENDIAN machine. */
2502 if (size < UNITS_PER_WORD
2503 && (args[i].locate.where_pad
2504 == (BYTES_BIG_ENDIAN ? upward : downward)))
2506 rtx x;
2507 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2509 /* Assigning REG here rather than a temp makes CALL_FUSAGE
2510 report the whole reg as used. Strictly speaking, the
2511 call only uses SIZE bytes at the msb end, but it doesn't
2512 seem worth generating rtl to say that. */
2513 reg = gen_rtx_REG (word_mode, REGNO (reg));
2514 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
2515 if (x != reg)
2516 emit_move_insn (reg, x);
2518 #endif
2521 /* If we have pre-computed the values to put in the registers in
2522 the case of non-aligned structures, copy them in now. */
2524 else if (args[i].n_aligned_regs != 0)
2525 for (j = 0; j < args[i].n_aligned_regs; j++)
2526 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
2527 args[i].aligned_regs[j]);
2529 else if (partial == 0 || args[i].pass_on_stack)
2531 rtx mem = validize_mem (copy_rtx (args[i].value));
2533 /* Check for overlap with already clobbered argument area,
2534 providing that this has non-zero size. */
2535 if (is_sibcall
2536 && size != 0
2537 && (mem_overlaps_already_clobbered_arg_p
2538 (XEXP (args[i].value, 0), size)))
2539 *sibcall_failure = 1;
2541 if (size % UNITS_PER_WORD == 0
2542 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
2543 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
2544 else
2546 if (nregs > 1)
2547 move_block_to_reg (REGNO (reg), mem, nregs - 1,
2548 args[i].mode);
2549 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
2550 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
2551 unsigned int bitsize = size * BITS_PER_UNIT - bitoff;
2552 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
2553 word_mode, word_mode, false);
2554 if (BYTES_BIG_ENDIAN)
2555 x = expand_shift (LSHIFT_EXPR, word_mode, x,
2556 BITS_PER_WORD - bitsize, dest, 1);
2557 if (x != dest)
2558 emit_move_insn (dest, x);
2561 /* Handle a BLKmode that needs shifting. */
2562 if (nregs == 1 && size < UNITS_PER_WORD
2563 #ifdef BLOCK_REG_PADDING
2564 && args[i].locate.where_pad == downward
2565 #else
2566 && BYTES_BIG_ENDIAN
2567 #endif
2570 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
2571 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2572 enum tree_code dir = (BYTES_BIG_ENDIAN
2573 ? RSHIFT_EXPR : LSHIFT_EXPR);
2574 rtx x;
2576 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
2577 if (x != dest)
2578 emit_move_insn (dest, x);
2582 /* When a parameter is a block, and perhaps in other cases, it is
2583 possible that it did a load from an argument slot that was
2584 already clobbered. */
2585 if (is_sibcall
2586 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
2587 *sibcall_failure = 1;
2589 /* Handle calls that pass values in multiple non-contiguous
2590 locations. The Irix 6 ABI has examples of this. */
2591 if (GET_CODE (reg) == PARALLEL)
2592 use_group_regs (call_fusage, reg);
2593 else if (nregs == -1)
2594 use_reg_mode (call_fusage, reg,
2595 TYPE_MODE (TREE_TYPE (args[i].tree_value)));
2596 else if (nregs > 0)
2597 use_regs (call_fusage, REGNO (reg), nregs);
2602 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
2603 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
2604 bytes, then we would need to push some additional bytes to pad the
2605 arguments. So, we compute an adjust to the stack pointer for an
2606 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
2607 bytes. Then, when the arguments are pushed the stack will be perfectly
2608 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
2609 be popped after the call. Returns the adjustment. */
2611 static int
2612 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
2613 struct args_size *args_size,
2614 unsigned int preferred_unit_stack_boundary)
2616 /* The number of bytes to pop so that the stack will be
2617 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
2618 HOST_WIDE_INT adjustment;
2619 /* The alignment of the stack after the arguments are pushed, if we
2620 just pushed the arguments without adjust the stack here. */
2621 unsigned HOST_WIDE_INT unadjusted_alignment;
2623 unadjusted_alignment
2624 = ((stack_pointer_delta + unadjusted_args_size)
2625 % preferred_unit_stack_boundary);
2627 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2628 as possible -- leaving just enough left to cancel out the
2629 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
2630 PENDING_STACK_ADJUST is non-negative, and congruent to
2631 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2633 /* Begin by trying to pop all the bytes. */
2634 unadjusted_alignment
2635 = (unadjusted_alignment
2636 - (pending_stack_adjust % preferred_unit_stack_boundary));
2637 adjustment = pending_stack_adjust;
2638 /* Push enough additional bytes that the stack will be aligned
2639 after the arguments are pushed. */
2640 if (preferred_unit_stack_boundary > 1)
2642 if (unadjusted_alignment > 0)
2643 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
2644 else
2645 adjustment += unadjusted_alignment;
2648 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2649 bytes after the call. The right number is the entire
2650 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2651 by the arguments in the first place. */
2652 args_size->constant
2653 = pending_stack_adjust - adjustment + unadjusted_args_size;
2655 return adjustment;
2658 /* Scan X expression if it does not dereference any argument slots
2659 we already clobbered by tail call arguments (as noted in stored_args_map
2660 bitmap).
2661 Return nonzero if X expression dereferences such argument slots,
2662 zero otherwise. */
2664 static int
2665 check_sibcall_argument_overlap_1 (rtx x)
2667 RTX_CODE code;
2668 int i, j;
2669 const char *fmt;
2671 if (x == NULL_RTX)
2672 return 0;
2674 code = GET_CODE (x);
2676 /* We need not check the operands of the CALL expression itself. */
2677 if (code == CALL)
2678 return 0;
2680 if (code == MEM)
2681 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
2682 GET_MODE_SIZE (GET_MODE (x)));
2684 /* Scan all subexpressions. */
2685 fmt = GET_RTX_FORMAT (code);
2686 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2688 if (*fmt == 'e')
2690 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2691 return 1;
2693 else if (*fmt == 'E')
2695 for (j = 0; j < XVECLEN (x, i); j++)
2696 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2697 return 1;
2700 return 0;
2703 /* Scan sequence after INSN if it does not dereference any argument slots
2704 we already clobbered by tail call arguments (as noted in stored_args_map
2705 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
2706 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
2707 should be 0). Return nonzero if sequence after INSN dereferences such argument
2708 slots, zero otherwise. */
2710 static int
2711 check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
2712 int mark_stored_args_map)
2714 int low, high;
2716 if (insn == NULL_RTX)
2717 insn = get_insns ();
2718 else
2719 insn = NEXT_INSN (insn);
2721 for (; insn; insn = NEXT_INSN (insn))
2722 if (INSN_P (insn)
2723 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
2724 break;
2726 if (mark_stored_args_map)
2728 if (ARGS_GROW_DOWNWARD)
2729 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
2730 else
2731 low = arg->locate.slot_offset.constant;
2733 for (high = low + arg->locate.size.constant; low < high; low++)
2734 bitmap_set_bit (stored_args_map, low);
2736 return insn != NULL_RTX;
2739 /* Given that a function returns a value of mode MODE at the most
2740 significant end of hard register VALUE, shift VALUE left or right
2741 as specified by LEFT_P. Return true if some action was needed. */
2743 bool
2744 shift_return_value (machine_mode mode, bool left_p, rtx value)
2746 HOST_WIDE_INT shift;
2748 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
2749 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
2750 if (shift == 0)
2751 return false;
2753 /* Use ashr rather than lshr for right shifts. This is for the benefit
2754 of the MIPS port, which requires SImode values to be sign-extended
2755 when stored in 64-bit registers. */
2756 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
2757 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
2758 gcc_unreachable ();
2759 return true;
2762 /* If X is a likely-spilled register value, copy it to a pseudo
2763 register and return that register. Return X otherwise. */
2765 static rtx
2766 avoid_likely_spilled_reg (rtx x)
2768 rtx new_rtx;
2770 if (REG_P (x)
2771 && HARD_REGISTER_P (x)
2772 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
2774 /* Make sure that we generate a REG rather than a CONCAT.
2775 Moves into CONCATs can need nontrivial instructions,
2776 and the whole point of this function is to avoid
2777 using the hard register directly in such a situation. */
2778 generating_concat_p = 0;
2779 new_rtx = gen_reg_rtx (GET_MODE (x));
2780 generating_concat_p = 1;
2781 emit_move_insn (new_rtx, x);
2782 return new_rtx;
2784 return x;
2787 /* Helper function for expand_call.
2788 Return false is EXP is not implementable as a sibling call. */
2790 static bool
2791 can_implement_as_sibling_call_p (tree exp,
2792 rtx structure_value_addr,
2793 tree funtype,
2794 int reg_parm_stack_space ATTRIBUTE_UNUSED,
2795 tree fndecl,
2796 int flags,
2797 tree addr,
2798 const args_size &args_size)
2800 if (!targetm.have_sibcall_epilogue ())
2802 maybe_complain_about_tail_call
2803 (exp,
2804 "machine description does not have"
2805 " a sibcall_epilogue instruction pattern");
2806 return false;
2809 /* Doing sibling call optimization needs some work, since
2810 structure_value_addr can be allocated on the stack.
2811 It does not seem worth the effort since few optimizable
2812 sibling calls will return a structure. */
2813 if (structure_value_addr != NULL_RTX)
2815 maybe_complain_about_tail_call (exp, "callee returns a structure");
2816 return false;
2819 #ifdef REG_PARM_STACK_SPACE
2820 /* If outgoing reg parm stack space changes, we can not do sibcall. */
2821 if (OUTGOING_REG_PARM_STACK_SPACE (funtype)
2822 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))
2823 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl)))
2825 maybe_complain_about_tail_call (exp,
2826 "inconsistent size of stack space"
2827 " allocated for arguments which are"
2828 " passed in registers");
2829 return false;
2831 #endif
2833 /* Check whether the target is able to optimize the call
2834 into a sibcall. */
2835 if (!targetm.function_ok_for_sibcall (fndecl, exp))
2837 maybe_complain_about_tail_call (exp,
2838 "target is not able to optimize the"
2839 " call into a sibling call");
2840 return false;
2843 /* Functions that do not return exactly once may not be sibcall
2844 optimized. */
2845 if (flags & ECF_RETURNS_TWICE)
2847 maybe_complain_about_tail_call (exp, "callee returns twice");
2848 return false;
2850 if (flags & ECF_NORETURN)
2852 maybe_complain_about_tail_call (exp, "callee does not return");
2853 return false;
2856 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
2858 maybe_complain_about_tail_call (exp, "volatile function type");
2859 return false;
2862 /* If the called function is nested in the current one, it might access
2863 some of the caller's arguments, but could clobber them beforehand if
2864 the argument areas are shared. */
2865 if (fndecl && decl_function_context (fndecl) == current_function_decl)
2867 maybe_complain_about_tail_call (exp, "nested function");
2868 return false;
2871 /* If this function requires more stack slots than the current
2872 function, we cannot change it into a sibling call.
2873 crtl->args.pretend_args_size is not part of the
2874 stack allocated by our caller. */
2875 if (args_size.constant > (crtl->args.size - crtl->args.pretend_args_size))
2877 maybe_complain_about_tail_call (exp,
2878 "callee required more stack slots"
2879 " than the caller");
2880 return false;
2883 /* If the callee pops its own arguments, then it must pop exactly
2884 the same number of arguments as the current function. */
2885 if (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant)
2886 != targetm.calls.return_pops_args (current_function_decl,
2887 TREE_TYPE (current_function_decl),
2888 crtl->args.size))
2890 maybe_complain_about_tail_call (exp,
2891 "inconsistent number of"
2892 " popped arguments");
2893 return false;
2896 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
2898 maybe_complain_about_tail_call (exp, "frontend does not support"
2899 " sibling call");
2900 return false;
2903 /* All checks passed. */
2904 return true;
2907 /* Generate all the code for a CALL_EXPR exp
2908 and return an rtx for its value.
2909 Store the value in TARGET (specified as an rtx) if convenient.
2910 If the value is stored in TARGET then TARGET is returned.
2911 If IGNORE is nonzero, then we ignore the value of the function call. */
2914 expand_call (tree exp, rtx target, int ignore)
2916 /* Nonzero if we are currently expanding a call. */
2917 static int currently_expanding_call = 0;
2919 /* RTX for the function to be called. */
2920 rtx funexp;
2921 /* Sequence of insns to perform a normal "call". */
2922 rtx_insn *normal_call_insns = NULL;
2923 /* Sequence of insns to perform a tail "call". */
2924 rtx_insn *tail_call_insns = NULL;
2925 /* Data type of the function. */
2926 tree funtype;
2927 tree type_arg_types;
2928 tree rettype;
2929 /* Declaration of the function being called,
2930 or 0 if the function is computed (not known by name). */
2931 tree fndecl = 0;
2932 /* The type of the function being called. */
2933 tree fntype;
2934 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
2935 bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
2936 int pass;
2938 /* Register in which non-BLKmode value will be returned,
2939 or 0 if no value or if value is BLKmode. */
2940 rtx valreg;
2941 /* Register(s) in which bounds are returned. */
2942 rtx valbnd = NULL;
2943 /* Address where we should return a BLKmode value;
2944 0 if value not BLKmode. */
2945 rtx structure_value_addr = 0;
2946 /* Nonzero if that address is being passed by treating it as
2947 an extra, implicit first parameter. Otherwise,
2948 it is passed by being copied directly into struct_value_rtx. */
2949 int structure_value_addr_parm = 0;
2950 /* Holds the value of implicit argument for the struct value. */
2951 tree structure_value_addr_value = NULL_TREE;
2952 /* Size of aggregate value wanted, or zero if none wanted
2953 or if we are using the non-reentrant PCC calling convention
2954 or expecting the value in registers. */
2955 HOST_WIDE_INT struct_value_size = 0;
2956 /* Nonzero if called function returns an aggregate in memory PCC style,
2957 by returning the address of where to find it. */
2958 int pcc_struct_value = 0;
2959 rtx struct_value = 0;
2961 /* Number of actual parameters in this call, including struct value addr. */
2962 int num_actuals;
2963 /* Number of named args. Args after this are anonymous ones
2964 and they must all go on the stack. */
2965 int n_named_args;
2966 /* Number of complex actual arguments that need to be split. */
2967 int num_complex_actuals = 0;
2969 /* Vector of information about each argument.
2970 Arguments are numbered in the order they will be pushed,
2971 not the order they are written. */
2972 struct arg_data *args;
2974 /* Total size in bytes of all the stack-parms scanned so far. */
2975 struct args_size args_size;
2976 struct args_size adjusted_args_size;
2977 /* Size of arguments before any adjustments (such as rounding). */
2978 int unadjusted_args_size;
2979 /* Data on reg parms scanned so far. */
2980 CUMULATIVE_ARGS args_so_far_v;
2981 cumulative_args_t args_so_far;
2982 /* Nonzero if a reg parm has been scanned. */
2983 int reg_parm_seen;
2984 /* Nonzero if this is an indirect function call. */
2986 /* Nonzero if we must avoid push-insns in the args for this call.
2987 If stack space is allocated for register parameters, but not by the
2988 caller, then it is preallocated in the fixed part of the stack frame.
2989 So the entire argument block must then be preallocated (i.e., we
2990 ignore PUSH_ROUNDING in that case). */
2992 int must_preallocate = !PUSH_ARGS;
2994 /* Size of the stack reserved for parameter registers. */
2995 int reg_parm_stack_space = 0;
2997 /* Address of space preallocated for stack parms
2998 (on machines that lack push insns), or 0 if space not preallocated. */
2999 rtx argblock = 0;
3001 /* Mask of ECF_ and ERF_ flags. */
3002 int flags = 0;
3003 int return_flags = 0;
3004 #ifdef REG_PARM_STACK_SPACE
3005 /* Define the boundary of the register parm stack space that needs to be
3006 saved, if any. */
3007 int low_to_save, high_to_save;
3008 rtx save_area = 0; /* Place that it is saved */
3009 #endif
3011 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3012 char *initial_stack_usage_map = stack_usage_map;
3013 char *stack_usage_map_buf = NULL;
3015 int old_stack_allocated;
3017 /* State variables to track stack modifications. */
3018 rtx old_stack_level = 0;
3019 int old_stack_arg_under_construction = 0;
3020 int old_pending_adj = 0;
3021 int old_inhibit_defer_pop = inhibit_defer_pop;
3023 /* Some stack pointer alterations we make are performed via
3024 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
3025 which we then also need to save/restore along the way. */
3026 int old_stack_pointer_delta = 0;
3028 rtx call_fusage;
3029 tree addr = CALL_EXPR_FN (exp);
3030 int i;
3031 /* The alignment of the stack, in bits. */
3032 unsigned HOST_WIDE_INT preferred_stack_boundary;
3033 /* The alignment of the stack, in bytes. */
3034 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
3035 /* The static chain value to use for this call. */
3036 rtx static_chain_value;
3037 /* See if this is "nothrow" function call. */
3038 if (TREE_NOTHROW (exp))
3039 flags |= ECF_NOTHROW;
3041 /* See if we can find a DECL-node for the actual function, and get the
3042 function attributes (flags) from the function decl or type node. */
3043 fndecl = get_callee_fndecl (exp);
3044 if (fndecl)
3046 fntype = TREE_TYPE (fndecl);
3047 flags |= flags_from_decl_or_type (fndecl);
3048 return_flags |= decl_return_flags (fndecl);
3050 else
3052 fntype = TREE_TYPE (TREE_TYPE (addr));
3053 flags |= flags_from_decl_or_type (fntype);
3054 if (CALL_EXPR_BY_DESCRIPTOR (exp))
3055 flags |= ECF_BY_DESCRIPTOR;
3057 rettype = TREE_TYPE (exp);
3059 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
3061 /* Warn if this value is an aggregate type,
3062 regardless of which calling convention we are using for it. */
3063 if (AGGREGATE_TYPE_P (rettype))
3064 warning (OPT_Waggregate_return, "function call has aggregate value");
3066 /* If the result of a non looping pure or const function call is
3067 ignored (or void), and none of its arguments are volatile, we can
3068 avoid expanding the call and just evaluate the arguments for
3069 side-effects. */
3070 if ((flags & (ECF_CONST | ECF_PURE))
3071 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
3072 && (ignore || target == const0_rtx
3073 || TYPE_MODE (rettype) == VOIDmode))
3075 bool volatilep = false;
3076 tree arg;
3077 call_expr_arg_iterator iter;
3079 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3080 if (TREE_THIS_VOLATILE (arg))
3082 volatilep = true;
3083 break;
3086 if (! volatilep)
3088 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3089 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
3090 return const0_rtx;
3094 #ifdef REG_PARM_STACK_SPACE
3095 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
3096 #endif
3098 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
3099 && reg_parm_stack_space > 0 && PUSH_ARGS)
3100 must_preallocate = 1;
3102 /* Set up a place to return a structure. */
3104 /* Cater to broken compilers. */
3105 if (aggregate_value_p (exp, fntype))
3107 /* This call returns a big structure. */
3108 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3110 #ifdef PCC_STATIC_STRUCT_RETURN
3112 pcc_struct_value = 1;
3114 #else /* not PCC_STATIC_STRUCT_RETURN */
3116 struct_value_size = int_size_in_bytes (rettype);
3118 /* Even if it is semantically safe to use the target as the return
3119 slot, it may be not sufficiently aligned for the return type. */
3120 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
3121 && target
3122 && MEM_P (target)
3123 && !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
3124 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (rettype),
3125 MEM_ALIGN (target))))
3126 structure_value_addr = XEXP (target, 0);
3127 else
3129 /* For variable-sized objects, we must be called with a target
3130 specified. If we were to allocate space on the stack here,
3131 we would have no way of knowing when to free it. */
3132 rtx d = assign_temp (rettype, 1, 1);
3133 structure_value_addr = XEXP (d, 0);
3134 target = 0;
3137 #endif /* not PCC_STATIC_STRUCT_RETURN */
3140 /* Figure out the amount to which the stack should be aligned. */
3141 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3142 if (fndecl)
3144 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
3145 /* Without automatic stack alignment, we can't increase preferred
3146 stack boundary. With automatic stack alignment, it is
3147 unnecessary since unless we can guarantee that all callers will
3148 align the outgoing stack properly, callee has to align its
3149 stack anyway. */
3150 if (i
3151 && i->preferred_incoming_stack_boundary
3152 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
3153 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
3156 /* Operand 0 is a pointer-to-function; get the type of the function. */
3157 funtype = TREE_TYPE (addr);
3158 gcc_assert (POINTER_TYPE_P (funtype));
3159 funtype = TREE_TYPE (funtype);
3161 /* Count whether there are actual complex arguments that need to be split
3162 into their real and imaginary parts. Munge the type_arg_types
3163 appropriately here as well. */
3164 if (targetm.calls.split_complex_arg)
3166 call_expr_arg_iterator iter;
3167 tree arg;
3168 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3170 tree type = TREE_TYPE (arg);
3171 if (type && TREE_CODE (type) == COMPLEX_TYPE
3172 && targetm.calls.split_complex_arg (type))
3173 num_complex_actuals++;
3175 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
3177 else
3178 type_arg_types = TYPE_ARG_TYPES (funtype);
3180 if (flags & ECF_MAY_BE_ALLOCA)
3181 cfun->calls_alloca = 1;
3183 /* If struct_value_rtx is 0, it means pass the address
3184 as if it were an extra parameter. Put the argument expression
3185 in structure_value_addr_value. */
3186 if (structure_value_addr && struct_value == 0)
3188 /* If structure_value_addr is a REG other than
3189 virtual_outgoing_args_rtx, we can use always use it. If it
3190 is not a REG, we must always copy it into a register.
3191 If it is virtual_outgoing_args_rtx, we must copy it to another
3192 register in some cases. */
3193 rtx temp = (!REG_P (structure_value_addr)
3194 || (ACCUMULATE_OUTGOING_ARGS
3195 && stack_arg_under_construction
3196 && structure_value_addr == virtual_outgoing_args_rtx)
3197 ? copy_addr_to_reg (convert_memory_address
3198 (Pmode, structure_value_addr))
3199 : structure_value_addr);
3201 structure_value_addr_value =
3202 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
3203 structure_value_addr_parm = CALL_WITH_BOUNDS_P (exp) ? 2 : 1;
3206 /* Count the arguments and set NUM_ACTUALS. */
3207 num_actuals =
3208 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
3210 /* Compute number of named args.
3211 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
3213 if (type_arg_types != 0)
3214 n_named_args
3215 = (list_length (type_arg_types)
3216 /* Count the struct value address, if it is passed as a parm. */
3217 + structure_value_addr_parm);
3218 else
3219 /* If we know nothing, treat all args as named. */
3220 n_named_args = num_actuals;
3222 /* Start updating where the next arg would go.
3224 On some machines (such as the PA) indirect calls have a different
3225 calling convention than normal calls. The fourth argument in
3226 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
3227 or not. */
3228 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
3229 args_so_far = pack_cumulative_args (&args_so_far_v);
3231 /* Now possibly adjust the number of named args.
3232 Normally, don't include the last named arg if anonymous args follow.
3233 We do include the last named arg if
3234 targetm.calls.strict_argument_naming() returns nonzero.
3235 (If no anonymous args follow, the result of list_length is actually
3236 one too large. This is harmless.)
3238 If targetm.calls.pretend_outgoing_varargs_named() returns
3239 nonzero, and targetm.calls.strict_argument_naming() returns zero,
3240 this machine will be able to place unnamed args that were passed
3241 in registers into the stack. So treat all args as named. This
3242 allows the insns emitting for a specific argument list to be
3243 independent of the function declaration.
3245 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
3246 we do not have any reliable way to pass unnamed args in
3247 registers, so we must force them into memory. */
3249 if (type_arg_types != 0
3250 && targetm.calls.strict_argument_naming (args_so_far))
3252 else if (type_arg_types != 0
3253 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
3254 /* Don't include the last named arg. */
3255 --n_named_args;
3256 else
3257 /* Treat all args as named. */
3258 n_named_args = num_actuals;
3260 /* Make a vector to hold all the information about each arg. */
3261 args = XALLOCAVEC (struct arg_data, num_actuals);
3262 memset (args, 0, num_actuals * sizeof (struct arg_data));
3264 /* Build up entries in the ARGS array, compute the size of the
3265 arguments into ARGS_SIZE, etc. */
3266 initialize_argument_information (num_actuals, args, &args_size,
3267 n_named_args, exp,
3268 structure_value_addr_value, fndecl, fntype,
3269 args_so_far, reg_parm_stack_space,
3270 &old_stack_level, &old_pending_adj,
3271 &must_preallocate, &flags,
3272 &try_tail_call, CALL_FROM_THUNK_P (exp));
3274 if (args_size.var)
3275 must_preallocate = 1;
3277 /* Now make final decision about preallocating stack space. */
3278 must_preallocate = finalize_must_preallocate (must_preallocate,
3279 num_actuals, args,
3280 &args_size);
3282 /* If the structure value address will reference the stack pointer, we
3283 must stabilize it. We don't need to do this if we know that we are
3284 not going to adjust the stack pointer in processing this call. */
3286 if (structure_value_addr
3287 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
3288 || reg_mentioned_p (virtual_outgoing_args_rtx,
3289 structure_value_addr))
3290 && (args_size.var
3291 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
3292 structure_value_addr = copy_to_reg (structure_value_addr);
3294 /* Tail calls can make things harder to debug, and we've traditionally
3295 pushed these optimizations into -O2. Don't try if we're already
3296 expanding a call, as that means we're an argument. Don't try if
3297 there's cleanups, as we know there's code to follow the call. */
3299 if (currently_expanding_call++ != 0
3300 || !flag_optimize_sibling_calls
3301 || args_size.var
3302 || dbg_cnt (tail_call) == false)
3303 try_tail_call = 0;
3305 /* If the user has marked the function as requiring tail-call
3306 optimization, attempt it. */
3307 if (must_tail_call)
3308 try_tail_call = 1;
3310 /* Rest of purposes for tail call optimizations to fail. */
3311 if (try_tail_call)
3312 try_tail_call = can_implement_as_sibling_call_p (exp,
3313 structure_value_addr,
3314 funtype,
3315 reg_parm_stack_space,
3316 fndecl,
3317 flags, addr, args_size);
3319 /* Check if caller and callee disagree in promotion of function
3320 return value. */
3321 if (try_tail_call)
3323 machine_mode caller_mode, caller_promoted_mode;
3324 machine_mode callee_mode, callee_promoted_mode;
3325 int caller_unsignedp, callee_unsignedp;
3326 tree caller_res = DECL_RESULT (current_function_decl);
3328 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
3329 caller_mode = DECL_MODE (caller_res);
3330 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
3331 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
3332 caller_promoted_mode
3333 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
3334 &caller_unsignedp,
3335 TREE_TYPE (current_function_decl), 1);
3336 callee_promoted_mode
3337 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
3338 &callee_unsignedp,
3339 funtype, 1);
3340 if (caller_mode != VOIDmode
3341 && (caller_promoted_mode != callee_promoted_mode
3342 || ((caller_mode != caller_promoted_mode
3343 || callee_mode != callee_promoted_mode)
3344 && (caller_unsignedp != callee_unsignedp
3345 || GET_MODE_BITSIZE (caller_mode)
3346 < GET_MODE_BITSIZE (callee_mode)))))
3348 try_tail_call = 0;
3349 maybe_complain_about_tail_call (exp,
3350 "caller and callee disagree in"
3351 " promotion of function"
3352 " return value");
3356 /* Ensure current function's preferred stack boundary is at least
3357 what we need. Stack alignment may also increase preferred stack
3358 boundary. */
3359 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
3360 crtl->preferred_stack_boundary = preferred_stack_boundary;
3361 else
3362 preferred_stack_boundary = crtl->preferred_stack_boundary;
3364 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
3366 /* We want to make two insn chains; one for a sibling call, the other
3367 for a normal call. We will select one of the two chains after
3368 initial RTL generation is complete. */
3369 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
3371 int sibcall_failure = 0;
3372 /* We want to emit any pending stack adjustments before the tail
3373 recursion "call". That way we know any adjustment after the tail
3374 recursion call can be ignored if we indeed use the tail
3375 call expansion. */
3376 saved_pending_stack_adjust save;
3377 rtx_insn *insns, *before_call, *after_args;
3378 rtx next_arg_reg;
3380 if (pass == 0)
3382 /* State variables we need to save and restore between
3383 iterations. */
3384 save_pending_stack_adjust (&save);
3386 if (pass)
3387 flags &= ~ECF_SIBCALL;
3388 else
3389 flags |= ECF_SIBCALL;
3391 /* Other state variables that we must reinitialize each time
3392 through the loop (that are not initialized by the loop itself). */
3393 argblock = 0;
3394 call_fusage = 0;
3396 /* Start a new sequence for the normal call case.
3398 From this point on, if the sibling call fails, we want to set
3399 sibcall_failure instead of continuing the loop. */
3400 start_sequence ();
3402 /* Don't let pending stack adjusts add up to too much.
3403 Also, do all pending adjustments now if there is any chance
3404 this might be a call to alloca or if we are expanding a sibling
3405 call sequence.
3406 Also do the adjustments before a throwing call, otherwise
3407 exception handling can fail; PR 19225. */
3408 if (pending_stack_adjust >= 32
3409 || (pending_stack_adjust > 0
3410 && (flags & ECF_MAY_BE_ALLOCA))
3411 || (pending_stack_adjust > 0
3412 && flag_exceptions && !(flags & ECF_NOTHROW))
3413 || pass == 0)
3414 do_pending_stack_adjust ();
3416 /* Precompute any arguments as needed. */
3417 if (pass)
3418 precompute_arguments (num_actuals, args);
3420 /* Now we are about to start emitting insns that can be deleted
3421 if a libcall is deleted. */
3422 if (pass && (flags & ECF_MALLOC))
3423 start_sequence ();
3425 if (pass == 0
3426 && crtl->stack_protect_guard
3427 && targetm.stack_protect_runtime_enabled_p ())
3428 stack_protect_epilogue ();
3430 adjusted_args_size = args_size;
3431 /* Compute the actual size of the argument block required. The variable
3432 and constant sizes must be combined, the size may have to be rounded,
3433 and there may be a minimum required size. When generating a sibcall
3434 pattern, do not round up, since we'll be re-using whatever space our
3435 caller provided. */
3436 unadjusted_args_size
3437 = compute_argument_block_size (reg_parm_stack_space,
3438 &adjusted_args_size,
3439 fndecl, fntype,
3440 (pass == 0 ? 0
3441 : preferred_stack_boundary));
3443 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3445 /* The argument block when performing a sibling call is the
3446 incoming argument block. */
3447 if (pass == 0)
3449 argblock = crtl->args.internal_arg_pointer;
3450 if (STACK_GROWS_DOWNWARD)
3451 argblock
3452 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
3453 else
3454 argblock
3455 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
3457 stored_args_map = sbitmap_alloc (args_size.constant);
3458 bitmap_clear (stored_args_map);
3461 /* If we have no actual push instructions, or shouldn't use them,
3462 make space for all args right now. */
3463 else if (adjusted_args_size.var != 0)
3465 if (old_stack_level == 0)
3467 emit_stack_save (SAVE_BLOCK, &old_stack_level);
3468 old_stack_pointer_delta = stack_pointer_delta;
3469 old_pending_adj = pending_stack_adjust;
3470 pending_stack_adjust = 0;
3471 /* stack_arg_under_construction says whether a stack arg is
3472 being constructed at the old stack level. Pushing the stack
3473 gets a clean outgoing argument block. */
3474 old_stack_arg_under_construction = stack_arg_under_construction;
3475 stack_arg_under_construction = 0;
3477 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
3478 if (flag_stack_usage_info)
3479 current_function_has_unbounded_dynamic_stack_size = 1;
3481 else
3483 /* Note that we must go through the motions of allocating an argument
3484 block even if the size is zero because we may be storing args
3485 in the area reserved for register arguments, which may be part of
3486 the stack frame. */
3488 int needed = adjusted_args_size.constant;
3490 /* Store the maximum argument space used. It will be pushed by
3491 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
3492 checking). */
3494 if (needed > crtl->outgoing_args_size)
3495 crtl->outgoing_args_size = needed;
3497 if (must_preallocate)
3499 if (ACCUMULATE_OUTGOING_ARGS)
3501 /* Since the stack pointer will never be pushed, it is
3502 possible for the evaluation of a parm to clobber
3503 something we have already written to the stack.
3504 Since most function calls on RISC machines do not use
3505 the stack, this is uncommon, but must work correctly.
3507 Therefore, we save any area of the stack that was already
3508 written and that we are using. Here we set up to do this
3509 by making a new stack usage map from the old one. The
3510 actual save will be done by store_one_arg.
3512 Another approach might be to try to reorder the argument
3513 evaluations to avoid this conflicting stack usage. */
3515 /* Since we will be writing into the entire argument area,
3516 the map must be allocated for its entire size, not just
3517 the part that is the responsibility of the caller. */
3518 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3519 needed += reg_parm_stack_space;
3521 if (ARGS_GROW_DOWNWARD)
3522 highest_outgoing_arg_in_use
3523 = MAX (initial_highest_arg_in_use, needed + 1);
3524 else
3525 highest_outgoing_arg_in_use
3526 = MAX (initial_highest_arg_in_use, needed);
3528 free (stack_usage_map_buf);
3529 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3530 stack_usage_map = stack_usage_map_buf;
3532 if (initial_highest_arg_in_use)
3533 memcpy (stack_usage_map, initial_stack_usage_map,
3534 initial_highest_arg_in_use);
3536 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3537 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3538 (highest_outgoing_arg_in_use
3539 - initial_highest_arg_in_use));
3540 needed = 0;
3542 /* The address of the outgoing argument list must not be
3543 copied to a register here, because argblock would be left
3544 pointing to the wrong place after the call to
3545 allocate_dynamic_stack_space below. */
3547 argblock = virtual_outgoing_args_rtx;
3549 else
3551 if (inhibit_defer_pop == 0)
3553 /* Try to reuse some or all of the pending_stack_adjust
3554 to get this space. */
3555 needed
3556 = (combine_pending_stack_adjustment_and_call
3557 (unadjusted_args_size,
3558 &adjusted_args_size,
3559 preferred_unit_stack_boundary));
3561 /* combine_pending_stack_adjustment_and_call computes
3562 an adjustment before the arguments are allocated.
3563 Account for them and see whether or not the stack
3564 needs to go up or down. */
3565 needed = unadjusted_args_size - needed;
3567 if (needed < 0)
3569 /* We're releasing stack space. */
3570 /* ??? We can avoid any adjustment at all if we're
3571 already aligned. FIXME. */
3572 pending_stack_adjust = -needed;
3573 do_pending_stack_adjust ();
3574 needed = 0;
3576 else
3577 /* We need to allocate space. We'll do that in
3578 push_block below. */
3579 pending_stack_adjust = 0;
3582 /* Special case this because overhead of `push_block' in
3583 this case is non-trivial. */
3584 if (needed == 0)
3585 argblock = virtual_outgoing_args_rtx;
3586 else
3588 argblock = push_block (GEN_INT (needed), 0, 0);
3589 if (ARGS_GROW_DOWNWARD)
3590 argblock = plus_constant (Pmode, argblock, needed);
3593 /* We only really need to call `copy_to_reg' in the case
3594 where push insns are going to be used to pass ARGBLOCK
3595 to a function call in ARGS. In that case, the stack
3596 pointer changes value from the allocation point to the
3597 call point, and hence the value of
3598 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
3599 as well always do it. */
3600 argblock = copy_to_reg (argblock);
3605 if (ACCUMULATE_OUTGOING_ARGS)
3607 /* The save/restore code in store_one_arg handles all
3608 cases except one: a constructor call (including a C
3609 function returning a BLKmode struct) to initialize
3610 an argument. */
3611 if (stack_arg_under_construction)
3613 rtx push_size
3614 = GEN_INT (adjusted_args_size.constant
3615 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype
3616 : TREE_TYPE (fndecl))) ? 0
3617 : reg_parm_stack_space));
3618 if (old_stack_level == 0)
3620 emit_stack_save (SAVE_BLOCK, &old_stack_level);
3621 old_stack_pointer_delta = stack_pointer_delta;
3622 old_pending_adj = pending_stack_adjust;
3623 pending_stack_adjust = 0;
3624 /* stack_arg_under_construction says whether a stack
3625 arg is being constructed at the old stack level.
3626 Pushing the stack gets a clean outgoing argument
3627 block. */
3628 old_stack_arg_under_construction
3629 = stack_arg_under_construction;
3630 stack_arg_under_construction = 0;
3631 /* Make a new map for the new argument list. */
3632 free (stack_usage_map_buf);
3633 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
3634 stack_usage_map = stack_usage_map_buf;
3635 highest_outgoing_arg_in_use = 0;
3637 /* We can pass TRUE as the 4th argument because we just
3638 saved the stack pointer and will restore it right after
3639 the call. */
3640 allocate_dynamic_stack_space (push_size, 0,
3641 BIGGEST_ALIGNMENT, true);
3644 /* If argument evaluation might modify the stack pointer,
3645 copy the address of the argument list to a register. */
3646 for (i = 0; i < num_actuals; i++)
3647 if (args[i].pass_on_stack)
3649 argblock = copy_addr_to_reg (argblock);
3650 break;
3654 compute_argument_addresses (args, argblock, num_actuals);
3656 /* Stack is properly aligned, pops can't safely be deferred during
3657 the evaluation of the arguments. */
3658 NO_DEFER_POP;
3660 /* Precompute all register parameters. It isn't safe to compute
3661 anything once we have started filling any specific hard regs.
3662 TLS symbols sometimes need a call to resolve. Precompute
3663 register parameters before any stack pointer manipulation
3664 to avoid unaligned stack in the called function. */
3665 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
3667 OK_DEFER_POP;
3669 /* Perform stack alignment before the first push (the last arg). */
3670 if (argblock == 0
3671 && adjusted_args_size.constant > reg_parm_stack_space
3672 && adjusted_args_size.constant != unadjusted_args_size)
3674 /* When the stack adjustment is pending, we get better code
3675 by combining the adjustments. */
3676 if (pending_stack_adjust
3677 && ! inhibit_defer_pop)
3679 pending_stack_adjust
3680 = (combine_pending_stack_adjustment_and_call
3681 (unadjusted_args_size,
3682 &adjusted_args_size,
3683 preferred_unit_stack_boundary));
3684 do_pending_stack_adjust ();
3686 else if (argblock == 0)
3687 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
3688 - unadjusted_args_size));
3690 /* Now that the stack is properly aligned, pops can't safely
3691 be deferred during the evaluation of the arguments. */
3692 NO_DEFER_POP;
3694 /* Record the maximum pushed stack space size. We need to delay
3695 doing it this far to take into account the optimization done
3696 by combine_pending_stack_adjustment_and_call. */
3697 if (flag_stack_usage_info
3698 && !ACCUMULATE_OUTGOING_ARGS
3699 && pass
3700 && adjusted_args_size.var == 0)
3702 int pushed = adjusted_args_size.constant + pending_stack_adjust;
3703 if (pushed > current_function_pushed_stack_size)
3704 current_function_pushed_stack_size = pushed;
3707 funexp = rtx_for_function_call (fndecl, addr);
3709 if (CALL_EXPR_STATIC_CHAIN (exp))
3710 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
3711 else
3712 static_chain_value = 0;
3714 #ifdef REG_PARM_STACK_SPACE
3715 /* Save the fixed argument area if it's part of the caller's frame and
3716 is clobbered by argument setup for this call. */
3717 if (ACCUMULATE_OUTGOING_ARGS && pass)
3718 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3719 &low_to_save, &high_to_save);
3720 #endif
3722 /* Now store (and compute if necessary) all non-register parms.
3723 These come before register parms, since they can require block-moves,
3724 which could clobber the registers used for register parms.
3725 Parms which have partial registers are not stored here,
3726 but we do preallocate space here if they want that. */
3728 for (i = 0; i < num_actuals; i++)
3730 /* Delay bounds until all other args are stored. */
3731 if (POINTER_BOUNDS_P (args[i].tree_value))
3732 continue;
3733 else if (args[i].reg == 0 || args[i].pass_on_stack)
3735 rtx_insn *before_arg = get_last_insn ();
3737 /* We don't allow passing huge (> 2^30 B) arguments
3738 by value. It would cause an overflow later on. */
3739 if (adjusted_args_size.constant
3740 >= (1 << (HOST_BITS_PER_INT - 2)))
3742 sorry ("passing too large argument on stack");
3743 continue;
3746 if (store_one_arg (&args[i], argblock, flags,
3747 adjusted_args_size.var != 0,
3748 reg_parm_stack_space)
3749 || (pass == 0
3750 && check_sibcall_argument_overlap (before_arg,
3751 &args[i], 1)))
3752 sibcall_failure = 1;
3755 if (args[i].stack)
3756 call_fusage
3757 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
3758 gen_rtx_USE (VOIDmode, args[i].stack),
3759 call_fusage);
3762 /* If we have a parm that is passed in registers but not in memory
3763 and whose alignment does not permit a direct copy into registers,
3764 make a group of pseudos that correspond to each register that we
3765 will later fill. */
3766 if (STRICT_ALIGNMENT)
3767 store_unaligned_arguments_into_pseudos (args, num_actuals);
3769 /* Now store any partially-in-registers parm.
3770 This is the last place a block-move can happen. */
3771 if (reg_parm_seen)
3772 for (i = 0; i < num_actuals; i++)
3773 if (args[i].partial != 0 && ! args[i].pass_on_stack)
3775 rtx_insn *before_arg = get_last_insn ();
3777 /* On targets with weird calling conventions (e.g. PA) it's
3778 hard to ensure that all cases of argument overlap between
3779 stack and registers work. Play it safe and bail out. */
3780 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
3782 sibcall_failure = 1;
3783 break;
3786 if (store_one_arg (&args[i], argblock, flags,
3787 adjusted_args_size.var != 0,
3788 reg_parm_stack_space)
3789 || (pass == 0
3790 && check_sibcall_argument_overlap (before_arg,
3791 &args[i], 1)))
3792 sibcall_failure = 1;
3795 bool any_regs = false;
3796 for (i = 0; i < num_actuals; i++)
3797 if (args[i].reg != NULL_RTX)
3799 any_regs = true;
3800 targetm.calls.call_args (args[i].reg, funtype);
3802 if (!any_regs)
3803 targetm.calls.call_args (pc_rtx, funtype);
3805 /* Figure out the register where the value, if any, will come back. */
3806 valreg = 0;
3807 valbnd = 0;
3808 if (TYPE_MODE (rettype) != VOIDmode
3809 && ! structure_value_addr)
3811 if (pcc_struct_value)
3813 valreg = hard_function_value (build_pointer_type (rettype),
3814 fndecl, NULL, (pass == 0));
3815 if (CALL_WITH_BOUNDS_P (exp))
3816 valbnd = targetm.calls.
3817 chkp_function_value_bounds (build_pointer_type (rettype),
3818 fndecl, (pass == 0));
3820 else
3822 valreg = hard_function_value (rettype, fndecl, fntype,
3823 (pass == 0));
3824 if (CALL_WITH_BOUNDS_P (exp))
3825 valbnd = targetm.calls.chkp_function_value_bounds (rettype,
3826 fndecl,
3827 (pass == 0));
3830 /* If VALREG is a PARALLEL whose first member has a zero
3831 offset, use that. This is for targets such as m68k that
3832 return the same value in multiple places. */
3833 if (GET_CODE (valreg) == PARALLEL)
3835 rtx elem = XVECEXP (valreg, 0, 0);
3836 rtx where = XEXP (elem, 0);
3837 rtx offset = XEXP (elem, 1);
3838 if (offset == const0_rtx
3839 && GET_MODE (where) == GET_MODE (valreg))
3840 valreg = where;
3844 /* Store all bounds not passed in registers. */
3845 for (i = 0; i < num_actuals; i++)
3847 if (POINTER_BOUNDS_P (args[i].tree_value)
3848 && !args[i].reg)
3849 store_bounds (&args[i],
3850 args[i].pointer_arg == -1
3851 ? NULL
3852 : &args[args[i].pointer_arg]);
3855 /* If register arguments require space on the stack and stack space
3856 was not preallocated, allocate stack space here for arguments
3857 passed in registers. */
3858 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
3859 && !ACCUMULATE_OUTGOING_ARGS
3860 && must_preallocate == 0 && reg_parm_stack_space > 0)
3861 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
3863 /* Pass the function the address in which to return a
3864 structure value. */
3865 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3867 structure_value_addr
3868 = convert_memory_address (Pmode, structure_value_addr);
3869 emit_move_insn (struct_value,
3870 force_reg (Pmode,
3871 force_operand (structure_value_addr,
3872 NULL_RTX)));
3874 if (REG_P (struct_value))
3875 use_reg (&call_fusage, struct_value);
3878 after_args = get_last_insn ();
3879 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
3880 static_chain_value, &call_fusage,
3881 reg_parm_seen, flags);
3883 load_register_parameters (args, num_actuals, &call_fusage, flags,
3884 pass == 0, &sibcall_failure);
3886 /* Save a pointer to the last insn before the call, so that we can
3887 later safely search backwards to find the CALL_INSN. */
3888 before_call = get_last_insn ();
3890 /* Set up next argument register. For sibling calls on machines
3891 with register windows this should be the incoming register. */
3892 if (pass == 0)
3893 next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
3894 VOIDmode,
3895 void_type_node,
3896 true);
3897 else
3898 next_arg_reg = targetm.calls.function_arg (args_so_far,
3899 VOIDmode, void_type_node,
3900 true);
3902 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
3904 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
3905 arg_nr = num_actuals - arg_nr - 1;
3906 if (arg_nr >= 0
3907 && arg_nr < num_actuals
3908 && args[arg_nr].reg
3909 && valreg
3910 && REG_P (valreg)
3911 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
3912 call_fusage
3913 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
3914 gen_rtx_SET (valreg, args[arg_nr].reg),
3915 call_fusage);
3917 /* All arguments and registers used for the call must be set up by
3918 now! */
3920 /* Stack must be properly aligned now. */
3921 gcc_assert (!pass
3922 || !(stack_pointer_delta % preferred_unit_stack_boundary));
3924 /* Generate the actual call instruction. */
3925 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
3926 adjusted_args_size.constant, struct_value_size,
3927 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3928 flags, args_so_far);
3930 if (flag_ipa_ra)
3932 rtx_call_insn *last;
3933 rtx datum = NULL_RTX;
3934 if (fndecl != NULL_TREE)
3936 datum = XEXP (DECL_RTL (fndecl), 0);
3937 gcc_assert (datum != NULL_RTX
3938 && GET_CODE (datum) == SYMBOL_REF);
3940 last = last_call_insn ();
3941 add_reg_note (last, REG_CALL_DECL, datum);
3944 /* If the call setup or the call itself overlaps with anything
3945 of the argument setup we probably clobbered our call address.
3946 In that case we can't do sibcalls. */
3947 if (pass == 0
3948 && check_sibcall_argument_overlap (after_args, 0, 0))
3949 sibcall_failure = 1;
3951 /* If a non-BLKmode value is returned at the most significant end
3952 of a register, shift the register right by the appropriate amount
3953 and update VALREG accordingly. BLKmode values are handled by the
3954 group load/store machinery below. */
3955 if (!structure_value_addr
3956 && !pcc_struct_value
3957 && TYPE_MODE (rettype) != VOIDmode
3958 && TYPE_MODE (rettype) != BLKmode
3959 && REG_P (valreg)
3960 && targetm.calls.return_in_msb (rettype))
3962 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
3963 sibcall_failure = 1;
3964 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
3967 if (pass && (flags & ECF_MALLOC))
3969 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3970 rtx_insn *last, *insns;
3972 /* The return value from a malloc-like function is a pointer. */
3973 if (TREE_CODE (rettype) == POINTER_TYPE)
3974 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
3976 emit_move_insn (temp, valreg);
3978 /* The return value from a malloc-like function can not alias
3979 anything else. */
3980 last = get_last_insn ();
3981 add_reg_note (last, REG_NOALIAS, temp);
3983 /* Write out the sequence. */
3984 insns = get_insns ();
3985 end_sequence ();
3986 emit_insn (insns);
3987 valreg = temp;
3990 /* For calls to `setjmp', etc., inform
3991 function.c:setjmp_warnings that it should complain if
3992 nonvolatile values are live. For functions that cannot
3993 return, inform flow that control does not fall through. */
3995 if ((flags & ECF_NORETURN) || pass == 0)
3997 /* The barrier must be emitted
3998 immediately after the CALL_INSN. Some ports emit more
3999 than just a CALL_INSN above, so we must search for it here. */
4001 rtx_insn *last = get_last_insn ();
4002 while (!CALL_P (last))
4004 last = PREV_INSN (last);
4005 /* There was no CALL_INSN? */
4006 gcc_assert (last != before_call);
4009 emit_barrier_after (last);
4011 /* Stack adjustments after a noreturn call are dead code.
4012 However when NO_DEFER_POP is in effect, we must preserve
4013 stack_pointer_delta. */
4014 if (inhibit_defer_pop == 0)
4016 stack_pointer_delta = old_stack_allocated;
4017 pending_stack_adjust = 0;
4021 /* If value type not void, return an rtx for the value. */
4023 if (TYPE_MODE (rettype) == VOIDmode
4024 || ignore)
4025 target = const0_rtx;
4026 else if (structure_value_addr)
4028 if (target == 0 || !MEM_P (target))
4030 target
4031 = gen_rtx_MEM (TYPE_MODE (rettype),
4032 memory_address (TYPE_MODE (rettype),
4033 structure_value_addr));
4034 set_mem_attributes (target, rettype, 1);
4037 else if (pcc_struct_value)
4039 /* This is the special C++ case where we need to
4040 know what the true target was. We take care to
4041 never use this value more than once in one expression. */
4042 target = gen_rtx_MEM (TYPE_MODE (rettype),
4043 copy_to_reg (valreg));
4044 set_mem_attributes (target, rettype, 1);
4046 /* Handle calls that return values in multiple non-contiguous locations.
4047 The Irix 6 ABI has examples of this. */
4048 else if (GET_CODE (valreg) == PARALLEL)
4050 if (target == 0)
4051 target = emit_group_move_into_temps (valreg);
4052 else if (rtx_equal_p (target, valreg))
4054 else if (GET_CODE (target) == PARALLEL)
4055 /* Handle the result of a emit_group_move_into_temps
4056 call in the previous pass. */
4057 emit_group_move (target, valreg);
4058 else
4059 emit_group_store (target, valreg, rettype,
4060 int_size_in_bytes (rettype));
4062 else if (target
4063 && GET_MODE (target) == TYPE_MODE (rettype)
4064 && GET_MODE (target) == GET_MODE (valreg))
4066 bool may_overlap = false;
4068 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
4069 reg to a plain register. */
4070 if (!REG_P (target) || HARD_REGISTER_P (target))
4071 valreg = avoid_likely_spilled_reg (valreg);
4073 /* If TARGET is a MEM in the argument area, and we have
4074 saved part of the argument area, then we can't store
4075 directly into TARGET as it may get overwritten when we
4076 restore the argument save area below. Don't work too
4077 hard though and simply force TARGET to a register if it
4078 is a MEM; the optimizer is quite likely to sort it out. */
4079 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
4080 for (i = 0; i < num_actuals; i++)
4081 if (args[i].save_area)
4083 may_overlap = true;
4084 break;
4087 if (may_overlap)
4088 target = copy_to_reg (valreg);
4089 else
4091 /* TARGET and VALREG cannot be equal at this point
4092 because the latter would not have
4093 REG_FUNCTION_VALUE_P true, while the former would if
4094 it were referring to the same register.
4096 If they refer to the same register, this move will be
4097 a no-op, except when function inlining is being
4098 done. */
4099 emit_move_insn (target, valreg);
4101 /* If we are setting a MEM, this code must be executed.
4102 Since it is emitted after the call insn, sibcall
4103 optimization cannot be performed in that case. */
4104 if (MEM_P (target))
4105 sibcall_failure = 1;
4108 else
4109 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
4111 /* If we promoted this return value, make the proper SUBREG.
4112 TARGET might be const0_rtx here, so be careful. */
4113 if (REG_P (target)
4114 && TYPE_MODE (rettype) != BLKmode
4115 && GET_MODE (target) != TYPE_MODE (rettype))
4117 tree type = rettype;
4118 int unsignedp = TYPE_UNSIGNED (type);
4119 int offset = 0;
4120 machine_mode pmode;
4122 /* Ensure we promote as expected, and get the new unsignedness. */
4123 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
4124 funtype, 1);
4125 gcc_assert (GET_MODE (target) == pmode);
4127 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
4128 && (GET_MODE_SIZE (GET_MODE (target))
4129 > GET_MODE_SIZE (TYPE_MODE (type))))
4131 offset = GET_MODE_SIZE (GET_MODE (target))
4132 - GET_MODE_SIZE (TYPE_MODE (type));
4133 if (! BYTES_BIG_ENDIAN)
4134 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
4135 else if (! WORDS_BIG_ENDIAN)
4136 offset %= UNITS_PER_WORD;
4139 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
4140 SUBREG_PROMOTED_VAR_P (target) = 1;
4141 SUBREG_PROMOTED_SET (target, unsignedp);
4144 /* If size of args is variable or this was a constructor call for a stack
4145 argument, restore saved stack-pointer value. */
4147 if (old_stack_level)
4149 rtx_insn *prev = get_last_insn ();
4151 emit_stack_restore (SAVE_BLOCK, old_stack_level);
4152 stack_pointer_delta = old_stack_pointer_delta;
4154 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
4156 pending_stack_adjust = old_pending_adj;
4157 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
4158 stack_arg_under_construction = old_stack_arg_under_construction;
4159 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4160 stack_usage_map = initial_stack_usage_map;
4161 sibcall_failure = 1;
4163 else if (ACCUMULATE_OUTGOING_ARGS && pass)
4165 #ifdef REG_PARM_STACK_SPACE
4166 if (save_area)
4167 restore_fixed_argument_area (save_area, argblock,
4168 high_to_save, low_to_save);
4169 #endif
4171 /* If we saved any argument areas, restore them. */
4172 for (i = 0; i < num_actuals; i++)
4173 if (args[i].save_area)
4175 machine_mode save_mode = GET_MODE (args[i].save_area);
4176 rtx stack_area
4177 = gen_rtx_MEM (save_mode,
4178 memory_address (save_mode,
4179 XEXP (args[i].stack_slot, 0)));
4181 if (save_mode != BLKmode)
4182 emit_move_insn (stack_area, args[i].save_area);
4183 else
4184 emit_block_move (stack_area, args[i].save_area,
4185 GEN_INT (args[i].locate.size.constant),
4186 BLOCK_OP_CALL_PARM);
4189 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4190 stack_usage_map = initial_stack_usage_map;
4193 /* If this was alloca, record the new stack level. */
4194 if (flags & ECF_MAY_BE_ALLOCA)
4195 record_new_stack_level ();
4197 /* Free up storage we no longer need. */
4198 for (i = 0; i < num_actuals; ++i)
4199 free (args[i].aligned_regs);
4201 targetm.calls.end_call_args ();
4203 insns = get_insns ();
4204 end_sequence ();
4206 if (pass == 0)
4208 tail_call_insns = insns;
4210 /* Restore the pending stack adjustment now that we have
4211 finished generating the sibling call sequence. */
4213 restore_pending_stack_adjust (&save);
4215 /* Prepare arg structure for next iteration. */
4216 for (i = 0; i < num_actuals; i++)
4218 args[i].value = 0;
4219 args[i].aligned_regs = 0;
4220 args[i].stack = 0;
4223 sbitmap_free (stored_args_map);
4224 internal_arg_pointer_exp_state.scan_start = NULL;
4225 internal_arg_pointer_exp_state.cache.release ();
4227 else
4229 normal_call_insns = insns;
4231 /* Verify that we've deallocated all the stack we used. */
4232 gcc_assert ((flags & ECF_NORETURN)
4233 || (old_stack_allocated
4234 == stack_pointer_delta - pending_stack_adjust));
4237 /* If something prevents making this a sibling call,
4238 zero out the sequence. */
4239 if (sibcall_failure)
4240 tail_call_insns = NULL;
4241 else
4242 break;
4245 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
4246 arguments too, as argument area is now clobbered by the call. */
4247 if (tail_call_insns)
4249 emit_insn (tail_call_insns);
4250 crtl->tail_call_emit = true;
4252 else
4254 emit_insn (normal_call_insns);
4255 if (try_tail_call)
4256 /* Ideally we'd emit a message for all of the ways that it could
4257 have failed. */
4258 maybe_complain_about_tail_call (exp, "tail call production failed");
4261 currently_expanding_call--;
4263 free (stack_usage_map_buf);
4265 /* Join result with returned bounds so caller may use them if needed. */
4266 target = chkp_join_splitted_slot (target, valbnd);
4268 return target;
4271 /* A sibling call sequence invalidates any REG_EQUIV notes made for
4272 this function's incoming arguments.
4274 At the start of RTL generation we know the only REG_EQUIV notes
4275 in the rtl chain are those for incoming arguments, so we can look
4276 for REG_EQUIV notes between the start of the function and the
4277 NOTE_INSN_FUNCTION_BEG.
4279 This is (slight) overkill. We could keep track of the highest
4280 argument we clobber and be more selective in removing notes, but it
4281 does not seem to be worth the effort. */
4283 void
4284 fixup_tail_calls (void)
4286 rtx_insn *insn;
4288 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4290 rtx note;
4292 /* There are never REG_EQUIV notes for the incoming arguments
4293 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
4294 if (NOTE_P (insn)
4295 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
4296 break;
4298 note = find_reg_note (insn, REG_EQUIV, 0);
4299 if (note)
4300 remove_note (insn, note);
4301 note = find_reg_note (insn, REG_EQUIV, 0);
4302 gcc_assert (!note);
4306 /* Traverse a list of TYPES and expand all complex types into their
4307 components. */
4308 static tree
4309 split_complex_types (tree types)
4311 tree p;
4313 /* Before allocating memory, check for the common case of no complex. */
4314 for (p = types; p; p = TREE_CHAIN (p))
4316 tree type = TREE_VALUE (p);
4317 if (TREE_CODE (type) == COMPLEX_TYPE
4318 && targetm.calls.split_complex_arg (type))
4319 goto found;
4321 return types;
4323 found:
4324 types = copy_list (types);
4326 for (p = types; p; p = TREE_CHAIN (p))
4328 tree complex_type = TREE_VALUE (p);
4330 if (TREE_CODE (complex_type) == COMPLEX_TYPE
4331 && targetm.calls.split_complex_arg (complex_type))
4333 tree next, imag;
4335 /* Rewrite complex type with component type. */
4336 TREE_VALUE (p) = TREE_TYPE (complex_type);
4337 next = TREE_CHAIN (p);
4339 /* Add another component type for the imaginary part. */
4340 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
4341 TREE_CHAIN (p) = imag;
4342 TREE_CHAIN (imag) = next;
4344 /* Skip the newly created node. */
4345 p = TREE_CHAIN (p);
4349 return types;
4352 /* Output a library call to function FUN (a SYMBOL_REF rtx).
4353 The RETVAL parameter specifies whether return value needs to be saved, other
4354 parameters are documented in the emit_library_call function below. */
4356 static rtx
4357 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
4358 enum libcall_type fn_type,
4359 machine_mode outmode, int nargs, va_list p)
4361 /* Total size in bytes of all the stack-parms scanned so far. */
4362 struct args_size args_size;
4363 /* Size of arguments before any adjustments (such as rounding). */
4364 struct args_size original_args_size;
4365 int argnum;
4366 rtx fun;
4367 /* Todo, choose the correct decl type of orgfun. Sadly this information
4368 isn't present here, so we default to native calling abi here. */
4369 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
4370 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
4371 int count;
4372 rtx argblock = 0;
4373 CUMULATIVE_ARGS args_so_far_v;
4374 cumulative_args_t args_so_far;
4375 struct arg
4377 rtx value;
4378 machine_mode mode;
4379 rtx reg;
4380 int partial;
4381 struct locate_and_pad_arg_data locate;
4382 rtx save_area;
4384 struct arg *argvec;
4385 int old_inhibit_defer_pop = inhibit_defer_pop;
4386 rtx call_fusage = 0;
4387 rtx mem_value = 0;
4388 rtx valreg;
4389 int pcc_struct_value = 0;
4390 int struct_value_size = 0;
4391 int flags;
4392 int reg_parm_stack_space = 0;
4393 int needed;
4394 rtx_insn *before_call;
4395 bool have_push_fusage;
4396 tree tfom; /* type_for_mode (outmode, 0) */
4398 #ifdef REG_PARM_STACK_SPACE
4399 /* Define the boundary of the register parm stack space that needs to be
4400 save, if any. */
4401 int low_to_save = 0, high_to_save = 0;
4402 rtx save_area = 0; /* Place that it is saved. */
4403 #endif
4405 /* Size of the stack reserved for parameter registers. */
4406 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
4407 char *initial_stack_usage_map = stack_usage_map;
4408 char *stack_usage_map_buf = NULL;
4410 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
4412 #ifdef REG_PARM_STACK_SPACE
4413 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
4414 #endif
4416 /* By default, library functions cannot throw. */
4417 flags = ECF_NOTHROW;
4419 switch (fn_type)
4421 case LCT_NORMAL:
4422 break;
4423 case LCT_CONST:
4424 flags |= ECF_CONST;
4425 break;
4426 case LCT_PURE:
4427 flags |= ECF_PURE;
4428 break;
4429 case LCT_NORETURN:
4430 flags |= ECF_NORETURN;
4431 break;
4432 case LCT_THROW:
4433 flags &= ~ECF_NOTHROW;
4434 break;
4435 case LCT_RETURNS_TWICE:
4436 flags = ECF_RETURNS_TWICE;
4437 break;
4439 fun = orgfun;
4441 /* Ensure current function's preferred stack boundary is at least
4442 what we need. */
4443 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
4444 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4446 /* If this kind of value comes back in memory,
4447 decide where in memory it should come back. */
4448 if (outmode != VOIDmode)
4450 tfom = lang_hooks.types.type_for_mode (outmode, 0);
4451 if (aggregate_value_p (tfom, 0))
4453 #ifdef PCC_STATIC_STRUCT_RETURN
4454 rtx pointer_reg
4455 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
4456 mem_value = gen_rtx_MEM (outmode, pointer_reg);
4457 pcc_struct_value = 1;
4458 if (value == 0)
4459 value = gen_reg_rtx (outmode);
4460 #else /* not PCC_STATIC_STRUCT_RETURN */
4461 struct_value_size = GET_MODE_SIZE (outmode);
4462 if (value != 0 && MEM_P (value))
4463 mem_value = value;
4464 else
4465 mem_value = assign_temp (tfom, 1, 1);
4466 #endif
4467 /* This call returns a big structure. */
4468 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
4471 else
4472 tfom = void_type_node;
4474 /* ??? Unfinished: must pass the memory address as an argument. */
4476 /* Copy all the libcall-arguments out of the varargs data
4477 and into a vector ARGVEC.
4479 Compute how to pass each argument. We only support a very small subset
4480 of the full argument passing conventions to limit complexity here since
4481 library functions shouldn't have many args. */
4483 argvec = XALLOCAVEC (struct arg, nargs + 1);
4484 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
4486 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
4487 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
4488 #else
4489 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
4490 #endif
4491 args_so_far = pack_cumulative_args (&args_so_far_v);
4493 args_size.constant = 0;
4494 args_size.var = 0;
4496 count = 0;
4498 push_temp_slots ();
4500 /* If there's a structure value address to be passed,
4501 either pass it in the special place, or pass it as an extra argument. */
4502 if (mem_value && struct_value == 0 && ! pcc_struct_value)
4504 rtx addr = XEXP (mem_value, 0);
4506 nargs++;
4508 /* Make sure it is a reasonable operand for a move or push insn. */
4509 if (!REG_P (addr) && !MEM_P (addr)
4510 && !(CONSTANT_P (addr)
4511 && targetm.legitimate_constant_p (Pmode, addr)))
4512 addr = force_operand (addr, NULL_RTX);
4514 argvec[count].value = addr;
4515 argvec[count].mode = Pmode;
4516 argvec[count].partial = 0;
4518 argvec[count].reg = targetm.calls.function_arg (args_so_far,
4519 Pmode, NULL_TREE, true);
4520 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
4521 NULL_TREE, 1) == 0);
4523 locate_and_pad_parm (Pmode, NULL_TREE,
4524 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4526 #else
4527 argvec[count].reg != 0,
4528 #endif
4529 reg_parm_stack_space, 0,
4530 NULL_TREE, &args_size, &argvec[count].locate);
4532 if (argvec[count].reg == 0 || argvec[count].partial != 0
4533 || reg_parm_stack_space > 0)
4534 args_size.constant += argvec[count].locate.size.constant;
4536 targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
4538 count++;
4541 for (; count < nargs; count++)
4543 rtx val = va_arg (p, rtx);
4544 machine_mode mode = (machine_mode) va_arg (p, int);
4545 int unsigned_p = 0;
4547 /* We cannot convert the arg value to the mode the library wants here;
4548 must do it earlier where we know the signedness of the arg. */
4549 gcc_assert (mode != BLKmode
4550 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
4552 /* Make sure it is a reasonable operand for a move or push insn. */
4553 if (!REG_P (val) && !MEM_P (val)
4554 && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
4555 val = force_operand (val, NULL_RTX);
4557 if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
4559 rtx slot;
4560 int must_copy
4561 = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
4563 /* If this was a CONST function, it is now PURE since it now
4564 reads memory. */
4565 if (flags & ECF_CONST)
4567 flags &= ~ECF_CONST;
4568 flags |= ECF_PURE;
4571 if (MEM_P (val) && !must_copy)
4573 tree val_expr = MEM_EXPR (val);
4574 if (val_expr)
4575 mark_addressable (val_expr);
4576 slot = val;
4578 else
4580 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
4581 1, 1);
4582 emit_move_insn (slot, val);
4585 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4586 gen_rtx_USE (VOIDmode, slot),
4587 call_fusage);
4588 if (must_copy)
4589 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4590 gen_rtx_CLOBBER (VOIDmode,
4591 slot),
4592 call_fusage);
4594 mode = Pmode;
4595 val = force_operand (XEXP (slot, 0), NULL_RTX);
4598 mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
4599 argvec[count].mode = mode;
4600 argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
4601 argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
4602 NULL_TREE, true);
4604 argvec[count].partial
4605 = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
4607 if (argvec[count].reg == 0
4608 || argvec[count].partial != 0
4609 || reg_parm_stack_space > 0)
4611 locate_and_pad_parm (mode, NULL_TREE,
4612 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4614 #else
4615 argvec[count].reg != 0,
4616 #endif
4617 reg_parm_stack_space, argvec[count].partial,
4618 NULL_TREE, &args_size, &argvec[count].locate);
4619 args_size.constant += argvec[count].locate.size.constant;
4620 gcc_assert (!argvec[count].locate.size.var);
4622 #ifdef BLOCK_REG_PADDING
4623 else
4624 /* The argument is passed entirely in registers. See at which
4625 end it should be padded. */
4626 argvec[count].locate.where_pad =
4627 BLOCK_REG_PADDING (mode, NULL_TREE,
4628 GET_MODE_SIZE (mode) <= UNITS_PER_WORD);
4629 #endif
4631 targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
4634 /* If this machine requires an external definition for library
4635 functions, write one out. */
4636 assemble_external_libcall (fun);
4638 original_args_size = args_size;
4639 args_size.constant = (((args_size.constant
4640 + stack_pointer_delta
4641 + STACK_BYTES - 1)
4642 / STACK_BYTES
4643 * STACK_BYTES)
4644 - stack_pointer_delta);
4646 args_size.constant = MAX (args_size.constant,
4647 reg_parm_stack_space);
4649 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4650 args_size.constant -= reg_parm_stack_space;
4652 if (args_size.constant > crtl->outgoing_args_size)
4653 crtl->outgoing_args_size = args_size.constant;
4655 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
4657 int pushed = args_size.constant + pending_stack_adjust;
4658 if (pushed > current_function_pushed_stack_size)
4659 current_function_pushed_stack_size = pushed;
4662 if (ACCUMULATE_OUTGOING_ARGS)
4664 /* Since the stack pointer will never be pushed, it is possible for
4665 the evaluation of a parm to clobber something we have already
4666 written to the stack. Since most function calls on RISC machines
4667 do not use the stack, this is uncommon, but must work correctly.
4669 Therefore, we save any area of the stack that was already written
4670 and that we are using. Here we set up to do this by making a new
4671 stack usage map from the old one.
4673 Another approach might be to try to reorder the argument
4674 evaluations to avoid this conflicting stack usage. */
4676 needed = args_size.constant;
4678 /* Since we will be writing into the entire argument area, the
4679 map must be allocated for its entire size, not just the part that
4680 is the responsibility of the caller. */
4681 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4682 needed += reg_parm_stack_space;
4684 if (ARGS_GROW_DOWNWARD)
4685 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
4686 needed + 1);
4687 else
4688 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, needed);
4690 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
4691 stack_usage_map = stack_usage_map_buf;
4693 if (initial_highest_arg_in_use)
4694 memcpy (stack_usage_map, initial_stack_usage_map,
4695 initial_highest_arg_in_use);
4697 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
4698 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
4699 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
4700 needed = 0;
4702 /* We must be careful to use virtual regs before they're instantiated,
4703 and real regs afterwards. Loop optimization, for example, can create
4704 new libcalls after we've instantiated the virtual regs, and if we
4705 use virtuals anyway, they won't match the rtl patterns. */
4707 if (virtuals_instantiated)
4708 argblock = plus_constant (Pmode, stack_pointer_rtx,
4709 STACK_POINTER_OFFSET);
4710 else
4711 argblock = virtual_outgoing_args_rtx;
4713 else
4715 if (!PUSH_ARGS)
4716 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
4719 /* We push args individually in reverse order, perform stack alignment
4720 before the first push (the last arg). */
4721 if (argblock == 0)
4722 anti_adjust_stack (GEN_INT (args_size.constant
4723 - original_args_size.constant));
4725 argnum = nargs - 1;
4727 #ifdef REG_PARM_STACK_SPACE
4728 if (ACCUMULATE_OUTGOING_ARGS)
4730 /* The argument list is the property of the called routine and it
4731 may clobber it. If the fixed area has been used for previous
4732 parameters, we must save and restore it. */
4733 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4734 &low_to_save, &high_to_save);
4736 #endif
4738 /* When expanding a normal call, args are stored in push order,
4739 which is the reverse of what we have here. */
4740 bool any_regs = false;
4741 for (int i = nargs; i-- > 0; )
4742 if (argvec[i].reg != NULL_RTX)
4744 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
4745 any_regs = true;
4747 if (!any_regs)
4748 targetm.calls.call_args (pc_rtx, NULL_TREE);
4750 /* Push the args that need to be pushed. */
4752 have_push_fusage = false;
4754 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4755 are to be pushed. */
4756 for (count = 0; count < nargs; count++, argnum--)
4758 machine_mode mode = argvec[argnum].mode;
4759 rtx val = argvec[argnum].value;
4760 rtx reg = argvec[argnum].reg;
4761 int partial = argvec[argnum].partial;
4762 unsigned int parm_align = argvec[argnum].locate.boundary;
4763 int lower_bound = 0, upper_bound = 0, i;
4765 if (! (reg != 0 && partial == 0))
4767 rtx use;
4769 if (ACCUMULATE_OUTGOING_ARGS)
4771 /* If this is being stored into a pre-allocated, fixed-size,
4772 stack area, save any previous data at that location. */
4774 if (ARGS_GROW_DOWNWARD)
4776 /* stack_slot is negative, but we want to index stack_usage_map
4777 with positive values. */
4778 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
4779 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
4781 else
4783 lower_bound = argvec[argnum].locate.slot_offset.constant;
4784 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
4787 i = lower_bound;
4788 /* Don't worry about things in the fixed argument area;
4789 it has already been saved. */
4790 if (i < reg_parm_stack_space)
4791 i = reg_parm_stack_space;
4792 while (i < upper_bound && stack_usage_map[i] == 0)
4793 i++;
4795 if (i < upper_bound)
4797 /* We need to make a save area. */
4798 unsigned int size
4799 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
4800 machine_mode save_mode
4801 = mode_for_size (size, MODE_INT, 1);
4802 rtx adr
4803 = plus_constant (Pmode, argblock,
4804 argvec[argnum].locate.offset.constant);
4805 rtx stack_area
4806 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
4808 if (save_mode == BLKmode)
4810 argvec[argnum].save_area
4811 = assign_stack_temp (BLKmode,
4812 argvec[argnum].locate.size.constant
4815 emit_block_move (validize_mem
4816 (copy_rtx (argvec[argnum].save_area)),
4817 stack_area,
4818 GEN_INT (argvec[argnum].locate.size.constant),
4819 BLOCK_OP_CALL_PARM);
4821 else
4823 argvec[argnum].save_area = gen_reg_rtx (save_mode);
4825 emit_move_insn (argvec[argnum].save_area, stack_area);
4830 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
4831 partial, reg, 0, argblock,
4832 GEN_INT (argvec[argnum].locate.offset.constant),
4833 reg_parm_stack_space,
4834 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
4836 /* Now mark the segment we just used. */
4837 if (ACCUMULATE_OUTGOING_ARGS)
4838 for (i = lower_bound; i < upper_bound; i++)
4839 stack_usage_map[i] = 1;
4841 NO_DEFER_POP;
4843 /* Indicate argument access so that alias.c knows that these
4844 values are live. */
4845 if (argblock)
4846 use = plus_constant (Pmode, argblock,
4847 argvec[argnum].locate.offset.constant);
4848 else if (have_push_fusage)
4849 continue;
4850 else
4852 /* When arguments are pushed, trying to tell alias.c where
4853 exactly this argument is won't work, because the
4854 auto-increment causes confusion. So we merely indicate
4855 that we access something with a known mode somewhere on
4856 the stack. */
4857 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4858 gen_rtx_SCRATCH (Pmode));
4859 have_push_fusage = true;
4861 use = gen_rtx_MEM (argvec[argnum].mode, use);
4862 use = gen_rtx_USE (VOIDmode, use);
4863 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
4867 argnum = nargs - 1;
4869 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
4871 /* Now load any reg parms into their regs. */
4873 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4874 are to be pushed. */
4875 for (count = 0; count < nargs; count++, argnum--)
4877 machine_mode mode = argvec[argnum].mode;
4878 rtx val = argvec[argnum].value;
4879 rtx reg = argvec[argnum].reg;
4880 int partial = argvec[argnum].partial;
4881 #ifdef BLOCK_REG_PADDING
4882 int size = 0;
4883 #endif
4885 /* Handle calls that pass values in multiple non-contiguous
4886 locations. The PA64 has examples of this for library calls. */
4887 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4888 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
4889 else if (reg != 0 && partial == 0)
4891 emit_move_insn (reg, val);
4892 #ifdef BLOCK_REG_PADDING
4893 size = GET_MODE_SIZE (argvec[argnum].mode);
4895 /* Copied from load_register_parameters. */
4897 /* Handle case where we have a value that needs shifting
4898 up to the msb. eg. a QImode value and we're padding
4899 upward on a BYTES_BIG_ENDIAN machine. */
4900 if (size < UNITS_PER_WORD
4901 && (argvec[argnum].locate.where_pad
4902 == (BYTES_BIG_ENDIAN ? upward : downward)))
4904 rtx x;
4905 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
4907 /* Assigning REG here rather than a temp makes CALL_FUSAGE
4908 report the whole reg as used. Strictly speaking, the
4909 call only uses SIZE bytes at the msb end, but it doesn't
4910 seem worth generating rtl to say that. */
4911 reg = gen_rtx_REG (word_mode, REGNO (reg));
4912 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
4913 if (x != reg)
4914 emit_move_insn (reg, x);
4916 #endif
4919 NO_DEFER_POP;
4922 /* Any regs containing parms remain in use through the call. */
4923 for (count = 0; count < nargs; count++)
4925 rtx reg = argvec[count].reg;
4926 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4927 use_group_regs (&call_fusage, reg);
4928 else if (reg != 0)
4930 int partial = argvec[count].partial;
4931 if (partial)
4933 int nregs;
4934 gcc_assert (partial % UNITS_PER_WORD == 0);
4935 nregs = partial / UNITS_PER_WORD;
4936 use_regs (&call_fusage, REGNO (reg), nregs);
4938 else
4939 use_reg (&call_fusage, reg);
4943 /* Pass the function the address in which to return a structure value. */
4944 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
4946 emit_move_insn (struct_value,
4947 force_reg (Pmode,
4948 force_operand (XEXP (mem_value, 0),
4949 NULL_RTX)));
4950 if (REG_P (struct_value))
4951 use_reg (&call_fusage, struct_value);
4954 /* Don't allow popping to be deferred, since then
4955 cse'ing of library calls could delete a call and leave the pop. */
4956 NO_DEFER_POP;
4957 valreg = (mem_value == 0 && outmode != VOIDmode
4958 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
4960 /* Stack must be properly aligned now. */
4961 gcc_assert (!(stack_pointer_delta
4962 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
4964 before_call = get_last_insn ();
4966 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4967 will set inhibit_defer_pop to that value. */
4968 /* The return type is needed to decide how many bytes the function pops.
4969 Signedness plays no role in that, so for simplicity, we pretend it's
4970 always signed. We also assume that the list of arguments passed has
4971 no impact, so we pretend it is unknown. */
4973 emit_call_1 (fun, NULL,
4974 get_identifier (XSTR (orgfun, 0)),
4975 build_function_type (tfom, NULL_TREE),
4976 original_args_size.constant, args_size.constant,
4977 struct_value_size,
4978 targetm.calls.function_arg (args_so_far,
4979 VOIDmode, void_type_node, true),
4980 valreg,
4981 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
4983 if (flag_ipa_ra)
4985 rtx datum = orgfun;
4986 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
4987 rtx_call_insn *last = last_call_insn ();
4988 add_reg_note (last, REG_CALL_DECL, datum);
4991 /* Right-shift returned value if necessary. */
4992 if (!pcc_struct_value
4993 && TYPE_MODE (tfom) != BLKmode
4994 && targetm.calls.return_in_msb (tfom))
4996 shift_return_value (TYPE_MODE (tfom), false, valreg);
4997 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
5000 targetm.calls.end_call_args ();
5002 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
5003 that it should complain if nonvolatile values are live. For
5004 functions that cannot return, inform flow that control does not
5005 fall through. */
5006 if (flags & ECF_NORETURN)
5008 /* The barrier note must be emitted
5009 immediately after the CALL_INSN. Some ports emit more than
5010 just a CALL_INSN above, so we must search for it here. */
5011 rtx_insn *last = get_last_insn ();
5012 while (!CALL_P (last))
5014 last = PREV_INSN (last);
5015 /* There was no CALL_INSN? */
5016 gcc_assert (last != before_call);
5019 emit_barrier_after (last);
5022 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
5023 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
5024 if (flags & ECF_NOTHROW)
5026 rtx_insn *last = get_last_insn ();
5027 while (!CALL_P (last))
5029 last = PREV_INSN (last);
5030 /* There was no CALL_INSN? */
5031 gcc_assert (last != before_call);
5034 make_reg_eh_region_note_nothrow_nononlocal (last);
5037 /* Now restore inhibit_defer_pop to its actual original value. */
5038 OK_DEFER_POP;
5040 pop_temp_slots ();
5042 /* Copy the value to the right place. */
5043 if (outmode != VOIDmode && retval)
5045 if (mem_value)
5047 if (value == 0)
5048 value = mem_value;
5049 if (value != mem_value)
5050 emit_move_insn (value, mem_value);
5052 else if (GET_CODE (valreg) == PARALLEL)
5054 if (value == 0)
5055 value = gen_reg_rtx (outmode);
5056 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
5058 else
5060 /* Convert to the proper mode if a promotion has been active. */
5061 if (GET_MODE (valreg) != outmode)
5063 int unsignedp = TYPE_UNSIGNED (tfom);
5065 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
5066 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
5067 == GET_MODE (valreg));
5068 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
5071 if (value != 0)
5072 emit_move_insn (value, valreg);
5073 else
5074 value = valreg;
5078 if (ACCUMULATE_OUTGOING_ARGS)
5080 #ifdef REG_PARM_STACK_SPACE
5081 if (save_area)
5082 restore_fixed_argument_area (save_area, argblock,
5083 high_to_save, low_to_save);
5084 #endif
5086 /* If we saved any argument areas, restore them. */
5087 for (count = 0; count < nargs; count++)
5088 if (argvec[count].save_area)
5090 machine_mode save_mode = GET_MODE (argvec[count].save_area);
5091 rtx adr = plus_constant (Pmode, argblock,
5092 argvec[count].locate.offset.constant);
5093 rtx stack_area = gen_rtx_MEM (save_mode,
5094 memory_address (save_mode, adr));
5096 if (save_mode == BLKmode)
5097 emit_block_move (stack_area,
5098 validize_mem
5099 (copy_rtx (argvec[count].save_area)),
5100 GEN_INT (argvec[count].locate.size.constant),
5101 BLOCK_OP_CALL_PARM);
5102 else
5103 emit_move_insn (stack_area, argvec[count].save_area);
5106 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
5107 stack_usage_map = initial_stack_usage_map;
5110 free (stack_usage_map_buf);
5112 return value;
5116 /* Output a library call to function FUN (a SYMBOL_REF rtx)
5117 (emitting the queue unless NO_QUEUE is nonzero),
5118 for a value of mode OUTMODE,
5119 with NARGS different arguments, passed as alternating rtx values
5120 and machine_modes to convert them to.
5122 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
5123 `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for
5124 other types of library calls. */
5126 void
5127 emit_library_call (rtx orgfun, enum libcall_type fn_type,
5128 machine_mode outmode, int nargs, ...)
5130 va_list p;
5132 va_start (p, nargs);
5133 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
5134 va_end (p);
5137 /* Like emit_library_call except that an extra argument, VALUE,
5138 comes second and says where to store the result.
5139 (If VALUE is zero, this function chooses a convenient way
5140 to return the value.
5142 This function returns an rtx for where the value is to be found.
5143 If VALUE is nonzero, VALUE is returned. */
5146 emit_library_call_value (rtx orgfun, rtx value,
5147 enum libcall_type fn_type,
5148 machine_mode outmode, int nargs, ...)
5150 rtx result;
5151 va_list p;
5153 va_start (p, nargs);
5154 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
5155 nargs, p);
5156 va_end (p);
5158 return result;
5162 /* Store pointer bounds argument ARG into Bounds Table entry
5163 associated with PARM. */
5164 static void
5165 store_bounds (struct arg_data *arg, struct arg_data *parm)
5167 rtx slot = NULL, ptr = NULL, addr = NULL;
5169 /* We may pass bounds not associated with any pointer. */
5170 if (!parm)
5172 gcc_assert (arg->special_slot);
5173 slot = arg->special_slot;
5174 ptr = const0_rtx;
5176 /* Find pointer associated with bounds and where it is
5177 passed. */
5178 else
5180 if (!parm->reg)
5182 gcc_assert (!arg->special_slot);
5184 addr = adjust_address (parm->stack, Pmode, arg->pointer_offset);
5186 else if (REG_P (parm->reg))
5188 gcc_assert (arg->special_slot);
5189 slot = arg->special_slot;
5191 if (MEM_P (parm->value))
5192 addr = adjust_address (parm->value, Pmode, arg->pointer_offset);
5193 else if (REG_P (parm->value))
5194 ptr = gen_rtx_SUBREG (Pmode, parm->value, arg->pointer_offset);
5195 else
5197 gcc_assert (!arg->pointer_offset);
5198 ptr = parm->value;
5201 else
5203 gcc_assert (GET_CODE (parm->reg) == PARALLEL);
5205 gcc_assert (arg->special_slot);
5206 slot = arg->special_slot;
5208 if (parm->parallel_value)
5209 ptr = chkp_get_value_with_offs (parm->parallel_value,
5210 GEN_INT (arg->pointer_offset));
5211 else
5212 gcc_unreachable ();
5216 /* Expand bounds. */
5217 if (!arg->value)
5218 arg->value = expand_normal (arg->tree_value);
5220 targetm.calls.store_bounds_for_arg (ptr, addr, arg->value, slot);
5223 /* Store a single argument for a function call
5224 into the register or memory area where it must be passed.
5225 *ARG describes the argument value and where to pass it.
5227 ARGBLOCK is the address of the stack-block for all the arguments,
5228 or 0 on a machine where arguments are pushed individually.
5230 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
5231 so must be careful about how the stack is used.
5233 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
5234 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
5235 that we need not worry about saving and restoring the stack.
5237 FNDECL is the declaration of the function we are calling.
5239 Return nonzero if this arg should cause sibcall failure,
5240 zero otherwise. */
5242 static int
5243 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
5244 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
5246 tree pval = arg->tree_value;
5247 rtx reg = 0;
5248 int partial = 0;
5249 int used = 0;
5250 int i, lower_bound = 0, upper_bound = 0;
5251 int sibcall_failure = 0;
5253 if (TREE_CODE (pval) == ERROR_MARK)
5254 return 1;
5256 /* Push a new temporary level for any temporaries we make for
5257 this argument. */
5258 push_temp_slots ();
5260 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
5262 /* If this is being stored into a pre-allocated, fixed-size, stack area,
5263 save any previous data at that location. */
5264 if (argblock && ! variable_size && arg->stack)
5266 if (ARGS_GROW_DOWNWARD)
5268 /* stack_slot is negative, but we want to index stack_usage_map
5269 with positive values. */
5270 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5271 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
5272 else
5273 upper_bound = 0;
5275 lower_bound = upper_bound - arg->locate.size.constant;
5277 else
5279 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5280 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
5281 else
5282 lower_bound = 0;
5284 upper_bound = lower_bound + arg->locate.size.constant;
5287 i = lower_bound;
5288 /* Don't worry about things in the fixed argument area;
5289 it has already been saved. */
5290 if (i < reg_parm_stack_space)
5291 i = reg_parm_stack_space;
5292 while (i < upper_bound && stack_usage_map[i] == 0)
5293 i++;
5295 if (i < upper_bound)
5297 /* We need to make a save area. */
5298 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
5299 machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
5300 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
5301 rtx stack_area = gen_rtx_MEM (save_mode, adr);
5303 if (save_mode == BLKmode)
5305 arg->save_area
5306 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
5307 preserve_temp_slots (arg->save_area);
5308 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
5309 stack_area,
5310 GEN_INT (arg->locate.size.constant),
5311 BLOCK_OP_CALL_PARM);
5313 else
5315 arg->save_area = gen_reg_rtx (save_mode);
5316 emit_move_insn (arg->save_area, stack_area);
5322 /* If this isn't going to be placed on both the stack and in registers,
5323 set up the register and number of words. */
5324 if (! arg->pass_on_stack)
5326 if (flags & ECF_SIBCALL)
5327 reg = arg->tail_call_reg;
5328 else
5329 reg = arg->reg;
5330 partial = arg->partial;
5333 /* Being passed entirely in a register. We shouldn't be called in
5334 this case. */
5335 gcc_assert (reg == 0 || partial != 0);
5337 /* If this arg needs special alignment, don't load the registers
5338 here. */
5339 if (arg->n_aligned_regs != 0)
5340 reg = 0;
5342 /* If this is being passed partially in a register, we can't evaluate
5343 it directly into its stack slot. Otherwise, we can. */
5344 if (arg->value == 0)
5346 /* stack_arg_under_construction is nonzero if a function argument is
5347 being evaluated directly into the outgoing argument list and
5348 expand_call must take special action to preserve the argument list
5349 if it is called recursively.
5351 For scalar function arguments stack_usage_map is sufficient to
5352 determine which stack slots must be saved and restored. Scalar
5353 arguments in general have pass_on_stack == 0.
5355 If this argument is initialized by a function which takes the
5356 address of the argument (a C++ constructor or a C function
5357 returning a BLKmode structure), then stack_usage_map is
5358 insufficient and expand_call must push the stack around the
5359 function call. Such arguments have pass_on_stack == 1.
5361 Note that it is always safe to set stack_arg_under_construction,
5362 but this generates suboptimal code if set when not needed. */
5364 if (arg->pass_on_stack)
5365 stack_arg_under_construction++;
5367 arg->value = expand_expr (pval,
5368 (partial
5369 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
5370 ? NULL_RTX : arg->stack,
5371 VOIDmode, EXPAND_STACK_PARM);
5373 /* If we are promoting object (or for any other reason) the mode
5374 doesn't agree, convert the mode. */
5376 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
5377 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
5378 arg->value, arg->unsignedp);
5380 if (arg->pass_on_stack)
5381 stack_arg_under_construction--;
5384 /* Check for overlap with already clobbered argument area. */
5385 if ((flags & ECF_SIBCALL)
5386 && MEM_P (arg->value)
5387 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
5388 arg->locate.size.constant))
5389 sibcall_failure = 1;
5391 /* Don't allow anything left on stack from computation
5392 of argument to alloca. */
5393 if (flags & ECF_MAY_BE_ALLOCA)
5394 do_pending_stack_adjust ();
5396 if (arg->value == arg->stack)
5397 /* If the value is already in the stack slot, we are done. */
5399 else if (arg->mode != BLKmode)
5401 int size;
5402 unsigned int parm_align;
5404 /* Argument is a scalar, not entirely passed in registers.
5405 (If part is passed in registers, arg->partial says how much
5406 and emit_push_insn will take care of putting it there.)
5408 Push it, and if its size is less than the
5409 amount of space allocated to it,
5410 also bump stack pointer by the additional space.
5411 Note that in C the default argument promotions
5412 will prevent such mismatches. */
5414 size = GET_MODE_SIZE (arg->mode);
5415 /* Compute how much space the push instruction will push.
5416 On many machines, pushing a byte will advance the stack
5417 pointer by a halfword. */
5418 #ifdef PUSH_ROUNDING
5419 size = PUSH_ROUNDING (size);
5420 #endif
5421 used = size;
5423 /* Compute how much space the argument should get:
5424 round up to a multiple of the alignment for arguments. */
5425 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
5426 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
5427 / (PARM_BOUNDARY / BITS_PER_UNIT))
5428 * (PARM_BOUNDARY / BITS_PER_UNIT));
5430 /* Compute the alignment of the pushed argument. */
5431 parm_align = arg->locate.boundary;
5432 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
5434 int pad = used - size;
5435 if (pad)
5437 unsigned int pad_align = least_bit_hwi (pad) * BITS_PER_UNIT;
5438 parm_align = MIN (parm_align, pad_align);
5442 /* This isn't already where we want it on the stack, so put it there.
5443 This can either be done with push or copy insns. */
5444 if (!emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
5445 parm_align, partial, reg, used - size, argblock,
5446 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
5447 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
5448 sibcall_failure = 1;
5450 /* Unless this is a partially-in-register argument, the argument is now
5451 in the stack. */
5452 if (partial == 0)
5453 arg->value = arg->stack;
5455 else
5457 /* BLKmode, at least partly to be pushed. */
5459 unsigned int parm_align;
5460 int excess;
5461 rtx size_rtx;
5463 /* Pushing a nonscalar.
5464 If part is passed in registers, PARTIAL says how much
5465 and emit_push_insn will take care of putting it there. */
5467 /* Round its size up to a multiple
5468 of the allocation unit for arguments. */
5470 if (arg->locate.size.var != 0)
5472 excess = 0;
5473 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
5475 else
5477 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
5478 for BLKmode is careful to avoid it. */
5479 excess = (arg->locate.size.constant
5480 - int_size_in_bytes (TREE_TYPE (pval))
5481 + partial);
5482 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
5483 NULL_RTX, TYPE_MODE (sizetype),
5484 EXPAND_NORMAL);
5487 parm_align = arg->locate.boundary;
5489 /* When an argument is padded down, the block is aligned to
5490 PARM_BOUNDARY, but the actual argument isn't. */
5491 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
5493 if (arg->locate.size.var)
5494 parm_align = BITS_PER_UNIT;
5495 else if (excess)
5497 unsigned int excess_align = least_bit_hwi (excess) * BITS_PER_UNIT;
5498 parm_align = MIN (parm_align, excess_align);
5502 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
5504 /* emit_push_insn might not work properly if arg->value and
5505 argblock + arg->locate.offset areas overlap. */
5506 rtx x = arg->value;
5507 int i = 0;
5509 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
5510 || (GET_CODE (XEXP (x, 0)) == PLUS
5511 && XEXP (XEXP (x, 0), 0) ==
5512 crtl->args.internal_arg_pointer
5513 && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
5515 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
5516 i = INTVAL (XEXP (XEXP (x, 0), 1));
5518 /* arg.locate doesn't contain the pretend_args_size offset,
5519 it's part of argblock. Ensure we don't count it in I. */
5520 if (STACK_GROWS_DOWNWARD)
5521 i -= crtl->args.pretend_args_size;
5522 else
5523 i += crtl->args.pretend_args_size;
5525 /* expand_call should ensure this. */
5526 gcc_assert (!arg->locate.offset.var
5527 && arg->locate.size.var == 0
5528 && CONST_INT_P (size_rtx));
5530 if (arg->locate.offset.constant > i)
5532 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
5533 sibcall_failure = 1;
5535 else if (arg->locate.offset.constant < i)
5537 /* Use arg->locate.size.constant instead of size_rtx
5538 because we only care about the part of the argument
5539 on the stack. */
5540 if (i < (arg->locate.offset.constant
5541 + arg->locate.size.constant))
5542 sibcall_failure = 1;
5544 else
5546 /* Even though they appear to be at the same location,
5547 if part of the outgoing argument is in registers,
5548 they aren't really at the same location. Check for
5549 this by making sure that the incoming size is the
5550 same as the outgoing size. */
5551 if (arg->locate.size.constant != INTVAL (size_rtx))
5552 sibcall_failure = 1;
5557 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
5558 parm_align, partial, reg, excess, argblock,
5559 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
5560 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
5562 /* Unless this is a partially-in-register argument, the argument is now
5563 in the stack.
5565 ??? Unlike the case above, in which we want the actual
5566 address of the data, so that we can load it directly into a
5567 register, here we want the address of the stack slot, so that
5568 it's properly aligned for word-by-word copying or something
5569 like that. It's not clear that this is always correct. */
5570 if (partial == 0)
5571 arg->value = arg->stack_slot;
5574 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
5576 tree type = TREE_TYPE (arg->tree_value);
5577 arg->parallel_value
5578 = emit_group_load_into_temps (arg->reg, arg->value, type,
5579 int_size_in_bytes (type));
5582 /* Mark all slots this store used. */
5583 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
5584 && argblock && ! variable_size && arg->stack)
5585 for (i = lower_bound; i < upper_bound; i++)
5586 stack_usage_map[i] = 1;
5588 /* Once we have pushed something, pops can't safely
5589 be deferred during the rest of the arguments. */
5590 NO_DEFER_POP;
5592 /* Free any temporary slots made in processing this argument. */
5593 pop_temp_slots ();
5595 return sibcall_failure;
5598 /* Nonzero if we do not know how to pass TYPE solely in registers. */
5600 bool
5601 must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED,
5602 const_tree type)
5604 if (!type)
5605 return false;
5607 /* If the type has variable size... */
5608 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5609 return true;
5611 /* If the type is marked as addressable (it is required
5612 to be constructed into the stack)... */
5613 if (TREE_ADDRESSABLE (type))
5614 return true;
5616 return false;
5619 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
5620 takes trailing padding of a structure into account. */
5621 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
5623 bool
5624 must_pass_in_stack_var_size_or_pad (machine_mode mode, const_tree type)
5626 if (!type)
5627 return false;
5629 /* If the type has variable size... */
5630 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5631 return true;
5633 /* If the type is marked as addressable (it is required
5634 to be constructed into the stack)... */
5635 if (TREE_ADDRESSABLE (type))
5636 return true;
5638 /* If the padding and mode of the type is such that a copy into
5639 a register would put it into the wrong part of the register. */
5640 if (mode == BLKmode
5641 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
5642 && (FUNCTION_ARG_PADDING (mode, type)
5643 == (BYTES_BIG_ENDIAN ? upward : downward)))
5644 return true;
5646 return false;