Add a gen_int_shift_amount helper function
[official-gcc.git] / gcc / calls.c
blob537b34818a6c6a57af9fbeb13f035368b9135cda
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "predict.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "stringpool.h"
32 #include "expmed.h"
33 #include "optabs.h"
34 #include "emit-rtl.h"
35 #include "cgraph.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "varasm.h"
40 #include "internal-fn.h"
41 #include "dojump.h"
42 #include "explow.h"
43 #include "calls.h"
44 #include "expr.h"
45 #include "output.h"
46 #include "langhooks.h"
47 #include "except.h"
48 #include "dbgcnt.h"
49 #include "rtl-iter.h"
50 #include "tree-chkp.h"
51 #include "tree-vrp.h"
52 #include "tree-ssanames.h"
53 #include "rtl-chkp.h"
54 #include "intl.h"
55 #include "stringpool.h"
56 #include "attribs.h"
57 #include "builtins.h"
59 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
60 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
62 /* Data structure and subroutines used within expand_call. */
64 struct arg_data
66 /* Tree node for this argument. */
67 tree tree_value;
68 /* Mode for value; TYPE_MODE unless promoted. */
69 machine_mode mode;
70 /* Current RTL value for argument, or 0 if it isn't precomputed. */
71 rtx value;
72 /* Initially-compute RTL value for argument; only for const functions. */
73 rtx initial_value;
74 /* Register to pass this argument in, 0 if passed on stack, or an
75 PARALLEL if the arg is to be copied into multiple non-contiguous
76 registers. */
77 rtx reg;
78 /* Register to pass this argument in when generating tail call sequence.
79 This is not the same register as for normal calls on machines with
80 register windows. */
81 rtx tail_call_reg;
82 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
83 form for emit_group_move. */
84 rtx parallel_value;
85 /* If value is passed in neither reg nor stack, this field holds a number
86 of a special slot to be used. */
87 rtx special_slot;
88 /* For pointer bounds hold an index of parm bounds are bound to. -1 if
89 there is no such pointer. */
90 int pointer_arg;
91 /* If pointer_arg refers a structure, then pointer_offset holds an offset
92 of a pointer in this structure. */
93 int pointer_offset;
94 /* If REG was promoted from the actual mode of the argument expression,
95 indicates whether the promotion is sign- or zero-extended. */
96 int unsignedp;
97 /* Number of bytes to put in registers. 0 means put the whole arg
98 in registers. Also 0 if not passed in registers. */
99 int partial;
100 /* Nonzero if argument must be passed on stack.
101 Note that some arguments may be passed on the stack
102 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
103 pass_on_stack identifies arguments that *cannot* go in registers. */
104 int pass_on_stack;
105 /* Some fields packaged up for locate_and_pad_parm. */
106 struct locate_and_pad_arg_data locate;
107 /* Location on the stack at which parameter should be stored. The store
108 has already been done if STACK == VALUE. */
109 rtx stack;
110 /* Location on the stack of the start of this argument slot. This can
111 differ from STACK if this arg pads downward. This location is known
112 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
113 rtx stack_slot;
114 /* Place that this stack area has been saved, if needed. */
115 rtx save_area;
116 /* If an argument's alignment does not permit direct copying into registers,
117 copy in smaller-sized pieces into pseudos. These are stored in a
118 block pointed to by this field. The next field says how many
119 word-sized pseudos we made. */
120 rtx *aligned_regs;
121 int n_aligned_regs;
124 /* A vector of one char per byte of stack space. A byte if nonzero if
125 the corresponding stack location has been used.
126 This vector is used to prevent a function call within an argument from
127 clobbering any stack already set up. */
128 static char *stack_usage_map;
130 /* Size of STACK_USAGE_MAP. */
131 static int highest_outgoing_arg_in_use;
133 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
134 stack location's tail call argument has been already stored into the stack.
135 This bitmap is used to prevent sibling call optimization if function tries
136 to use parent's incoming argument slots when they have been already
137 overwritten with tail call arguments. */
138 static sbitmap stored_args_map;
140 /* stack_arg_under_construction is nonzero when an argument may be
141 initialized with a constructor call (including a C function that
142 returns a BLKmode struct) and expand_call must take special action
143 to make sure the object being constructed does not overlap the
144 argument list for the constructor call. */
145 static int stack_arg_under_construction;
147 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
148 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
149 cumulative_args_t);
150 static void precompute_register_parameters (int, struct arg_data *, int *);
151 static void store_bounds (struct arg_data *, struct arg_data *);
152 static int store_one_arg (struct arg_data *, rtx, int, int, int);
153 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
154 static int finalize_must_preallocate (int, int, struct arg_data *,
155 struct args_size *);
156 static void precompute_arguments (int, struct arg_data *);
157 static int compute_argument_block_size (int, struct args_size *, tree, tree, int);
158 static void initialize_argument_information (int, struct arg_data *,
159 struct args_size *, int,
160 tree, tree,
161 tree, tree, cumulative_args_t, int,
162 rtx *, int *, int *, int *,
163 bool *, bool);
164 static void compute_argument_addresses (struct arg_data *, rtx, int);
165 static rtx rtx_for_function_call (tree, tree);
166 static void load_register_parameters (struct arg_data *, int, rtx *, int,
167 int, int *);
168 static int special_function_p (const_tree, int);
169 static int check_sibcall_argument_overlap_1 (rtx);
170 static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
172 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
173 unsigned int);
174 static tree split_complex_types (tree);
176 #ifdef REG_PARM_STACK_SPACE
177 static rtx save_fixed_argument_area (int, rtx, int *, int *);
178 static void restore_fixed_argument_area (rtx, rtx, int, int);
179 #endif
181 /* Force FUNEXP into a form suitable for the address of a CALL,
182 and return that as an rtx. Also load the static chain register
183 if FNDECL is a nested function.
185 CALL_FUSAGE points to a variable holding the prospective
186 CALL_INSN_FUNCTION_USAGE information. */
189 prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
190 rtx *call_fusage, int reg_parm_seen, int flags)
192 /* Make a valid memory address and copy constants through pseudo-regs,
193 but not for a constant address if -fno-function-cse. */
194 if (GET_CODE (funexp) != SYMBOL_REF)
196 /* If it's an indirect call by descriptor, generate code to perform
197 runtime identification of the pointer and load the descriptor. */
198 if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
200 const int bit_val = targetm.calls.custom_function_descriptors;
201 rtx call_lab = gen_label_rtx ();
203 gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
204 fndecl_or_type
205 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
206 fndecl_or_type);
207 DECL_STATIC_CHAIN (fndecl_or_type) = 1;
208 rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
210 if (GET_MODE (funexp) != Pmode)
211 funexp = convert_memory_address (Pmode, funexp);
213 /* Avoid long live ranges around function calls. */
214 funexp = copy_to_mode_reg (Pmode, funexp);
216 if (REG_P (chain))
217 emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
219 /* Emit the runtime identification pattern. */
220 rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
221 emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
222 call_lab);
224 /* Statically predict the branch to very likely taken. */
225 rtx_insn *insn = get_last_insn ();
226 if (JUMP_P (insn))
227 predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
229 /* Load the descriptor. */
230 rtx mem = gen_rtx_MEM (ptr_mode,
231 plus_constant (Pmode, funexp, - bit_val));
232 MEM_NOTRAP_P (mem) = 1;
233 mem = convert_memory_address (Pmode, mem);
234 emit_move_insn (chain, mem);
236 mem = gen_rtx_MEM (ptr_mode,
237 plus_constant (Pmode, funexp,
238 POINTER_SIZE / BITS_PER_UNIT
239 - bit_val));
240 MEM_NOTRAP_P (mem) = 1;
241 mem = convert_memory_address (Pmode, mem);
242 emit_move_insn (funexp, mem);
244 emit_label (call_lab);
246 if (REG_P (chain))
248 use_reg (call_fusage, chain);
249 STATIC_CHAIN_REG_P (chain) = 1;
252 /* Make sure we're not going to be overwritten below. */
253 gcc_assert (!static_chain_value);
256 /* If we are using registers for parameters, force the
257 function address into a register now. */
258 funexp = ((reg_parm_seen
259 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
260 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
261 : memory_address (FUNCTION_MODE, funexp));
263 else
265 /* funexp could be a SYMBOL_REF represents a function pointer which is
266 of ptr_mode. In this case, it should be converted into address mode
267 to be a valid address for memory rtx pattern. See PR 64971. */
268 if (GET_MODE (funexp) != Pmode)
269 funexp = convert_memory_address (Pmode, funexp);
271 if (!(flags & ECF_SIBCALL))
273 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
274 funexp = force_reg (Pmode, funexp);
278 if (static_chain_value != 0
279 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
280 || DECL_STATIC_CHAIN (fndecl_or_type)))
282 rtx chain;
284 chain = targetm.calls.static_chain (fndecl_or_type, false);
285 static_chain_value = convert_memory_address (Pmode, static_chain_value);
287 emit_move_insn (chain, static_chain_value);
288 if (REG_P (chain))
290 use_reg (call_fusage, chain);
291 STATIC_CHAIN_REG_P (chain) = 1;
295 return funexp;
298 /* Generate instructions to call function FUNEXP,
299 and optionally pop the results.
300 The CALL_INSN is the first insn generated.
302 FNDECL is the declaration node of the function. This is given to the
303 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
304 its own args.
306 FUNTYPE is the data type of the function. This is given to the hook
307 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
308 own args. We used to allow an identifier for library functions, but
309 that doesn't work when the return type is an aggregate type and the
310 calling convention says that the pointer to this aggregate is to be
311 popped by the callee.
313 STACK_SIZE is the number of bytes of arguments on the stack,
314 ROUNDED_STACK_SIZE is that number rounded up to
315 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
316 both to put into the call insn and to generate explicit popping
317 code if necessary.
319 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
320 It is zero if this call doesn't want a structure value.
322 NEXT_ARG_REG is the rtx that results from executing
323 targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
324 just after all the args have had their registers assigned.
325 This could be whatever you like, but normally it is the first
326 arg-register beyond those used for args in this call,
327 or 0 if all the arg-registers are used in this call.
328 It is passed on to `gen_call' so you can put this info in the call insn.
330 VALREG is a hard register in which a value is returned,
331 or 0 if the call does not return a value.
333 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
334 the args to this call were processed.
335 We restore `inhibit_defer_pop' to that value.
337 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
338 denote registers used by the called function. */
340 static void
341 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
342 tree funtype ATTRIBUTE_UNUSED,
343 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
344 HOST_WIDE_INT rounded_stack_size,
345 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
346 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
347 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
348 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
350 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
351 rtx call, funmem, pat;
352 int already_popped = 0;
353 HOST_WIDE_INT n_popped = 0;
355 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
356 patterns exist). Any popping that the callee does on return will
357 be from our caller's frame rather than ours. */
358 if (!(ecf_flags & ECF_SIBCALL))
360 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
362 #ifdef CALL_POPS_ARGS
363 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
364 #endif
367 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
368 and we don't want to load it into a register as an optimization,
369 because prepare_call_address already did it if it should be done. */
370 if (GET_CODE (funexp) != SYMBOL_REF)
371 funexp = memory_address (FUNCTION_MODE, funexp);
373 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
374 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
376 tree t = fndecl;
378 /* Although a built-in FUNCTION_DECL and its non-__builtin
379 counterpart compare equal and get a shared mem_attrs, they
380 produce different dump output in compare-debug compilations,
381 if an entry gets garbage collected in one compilation, then
382 adds a different (but equivalent) entry, while the other
383 doesn't run the garbage collector at the same spot and then
384 shares the mem_attr with the equivalent entry. */
385 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
387 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
388 if (t2)
389 t = t2;
392 set_mem_expr (funmem, t);
394 else if (fntree)
395 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
397 if (ecf_flags & ECF_SIBCALL)
399 if (valreg)
400 pat = targetm.gen_sibcall_value (valreg, funmem,
401 rounded_stack_size_rtx,
402 next_arg_reg, NULL_RTX);
403 else
404 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
405 next_arg_reg, GEN_INT (struct_value_size));
407 /* If the target has "call" or "call_value" insns, then prefer them
408 if no arguments are actually popped. If the target does not have
409 "call" or "call_value" insns, then we must use the popping versions
410 even if the call has no arguments to pop. */
411 else if (n_popped > 0
412 || !(valreg
413 ? targetm.have_call_value ()
414 : targetm.have_call ()))
416 rtx n_pop = GEN_INT (n_popped);
418 /* If this subroutine pops its own args, record that in the call insn
419 if possible, for the sake of frame pointer elimination. */
421 if (valreg)
422 pat = targetm.gen_call_value_pop (valreg, funmem,
423 rounded_stack_size_rtx,
424 next_arg_reg, n_pop);
425 else
426 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
427 next_arg_reg, n_pop);
429 already_popped = 1;
431 else
433 if (valreg)
434 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
435 next_arg_reg, NULL_RTX);
436 else
437 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
438 GEN_INT (struct_value_size));
440 emit_insn (pat);
442 /* Find the call we just emitted. */
443 rtx_call_insn *call_insn = last_call_insn ();
445 /* Some target create a fresh MEM instead of reusing the one provided
446 above. Set its MEM_EXPR. */
447 call = get_call_rtx_from (call_insn);
448 if (call
449 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
450 && MEM_EXPR (funmem) != NULL_TREE)
451 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
453 /* Mark instrumented calls. */
454 if (call && fntree)
455 CALL_EXPR_WITH_BOUNDS_P (call) = CALL_WITH_BOUNDS_P (fntree);
457 /* Put the register usage information there. */
458 add_function_usage_to (call_insn, call_fusage);
460 /* If this is a const call, then set the insn's unchanging bit. */
461 if (ecf_flags & ECF_CONST)
462 RTL_CONST_CALL_P (call_insn) = 1;
464 /* If this is a pure call, then set the insn's unchanging bit. */
465 if (ecf_flags & ECF_PURE)
466 RTL_PURE_CALL_P (call_insn) = 1;
468 /* If this is a const call, then set the insn's unchanging bit. */
469 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
470 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
472 /* Create a nothrow REG_EH_REGION note, if needed. */
473 make_reg_eh_region_note (call_insn, ecf_flags, 0);
475 if (ecf_flags & ECF_NORETURN)
476 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
478 if (ecf_flags & ECF_RETURNS_TWICE)
480 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
481 cfun->calls_setjmp = 1;
484 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
486 /* Restore this now, so that we do defer pops for this call's args
487 if the context of the call as a whole permits. */
488 inhibit_defer_pop = old_inhibit_defer_pop;
490 if (n_popped > 0)
492 if (!already_popped)
493 CALL_INSN_FUNCTION_USAGE (call_insn)
494 = gen_rtx_EXPR_LIST (VOIDmode,
495 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
496 CALL_INSN_FUNCTION_USAGE (call_insn));
497 rounded_stack_size -= n_popped;
498 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
499 stack_pointer_delta -= n_popped;
501 add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
503 /* If popup is needed, stack realign must use DRAP */
504 if (SUPPORTS_STACK_ALIGNMENT)
505 crtl->need_drap = true;
507 /* For noreturn calls when not accumulating outgoing args force
508 REG_ARGS_SIZE note to prevent crossjumping of calls with different
509 args sizes. */
510 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
511 add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
513 if (!ACCUMULATE_OUTGOING_ARGS)
515 /* If returning from the subroutine does not automatically pop the args,
516 we need an instruction to pop them sooner or later.
517 Perhaps do it now; perhaps just record how much space to pop later.
519 If returning from the subroutine does pop the args, indicate that the
520 stack pointer will be changed. */
522 if (rounded_stack_size != 0)
524 if (ecf_flags & ECF_NORETURN)
525 /* Just pretend we did the pop. */
526 stack_pointer_delta -= rounded_stack_size;
527 else if (flag_defer_pop && inhibit_defer_pop == 0
528 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
529 pending_stack_adjust += rounded_stack_size;
530 else
531 adjust_stack (rounded_stack_size_rtx);
534 /* When we accumulate outgoing args, we must avoid any stack manipulations.
535 Restore the stack pointer to its original value now. Usually
536 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
537 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
538 popping variants of functions exist as well.
540 ??? We may optimize similar to defer_pop above, but it is
541 probably not worthwhile.
543 ??? It will be worthwhile to enable combine_stack_adjustments even for
544 such machines. */
545 else if (n_popped)
546 anti_adjust_stack (GEN_INT (n_popped));
549 /* Determine if the function identified by FNDECL is one with
550 special properties we wish to know about. Modify FLAGS accordingly.
552 For example, if the function might return more than one time (setjmp), then
553 set ECF_RETURNS_TWICE.
555 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
556 space from the stack such as alloca. */
558 static int
559 special_function_p (const_tree fndecl, int flags)
561 tree name_decl = DECL_NAME (fndecl);
563 /* For instrumentation clones we want to derive flags
564 from the original name. */
565 if (cgraph_node::get (fndecl)
566 && cgraph_node::get (fndecl)->instrumentation_clone)
567 name_decl = DECL_NAME (cgraph_node::get (fndecl)->orig_decl);
569 if (fndecl && name_decl
570 && IDENTIFIER_LENGTH (name_decl) <= 11
571 /* Exclude functions not at the file scope, or not `extern',
572 since they are not the magic functions we would otherwise
573 think they are.
574 FIXME: this should be handled with attributes, not with this
575 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
576 because you can declare fork() inside a function if you
577 wish. */
578 && (DECL_CONTEXT (fndecl) == NULL_TREE
579 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
580 && TREE_PUBLIC (fndecl))
582 const char *name = IDENTIFIER_POINTER (name_decl);
583 const char *tname = name;
585 /* We assume that alloca will always be called by name. It
586 makes no sense to pass it as a pointer-to-function to
587 anything that does not understand its behavior. */
588 if (IDENTIFIER_LENGTH (name_decl) == 6
589 && name[0] == 'a'
590 && ! strcmp (name, "alloca"))
591 flags |= ECF_MAY_BE_ALLOCA;
593 /* Disregard prefix _ or __. */
594 if (name[0] == '_')
596 if (name[1] == '_')
597 tname += 2;
598 else
599 tname += 1;
602 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
603 if (! strcmp (tname, "setjmp")
604 || ! strcmp (tname, "sigsetjmp")
605 || ! strcmp (name, "savectx")
606 || ! strcmp (name, "vfork")
607 || ! strcmp (name, "getcontext"))
608 flags |= ECF_RETURNS_TWICE;
611 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
612 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
613 flags |= ECF_MAY_BE_ALLOCA;
615 return flags;
618 /* Similar to special_function_p; return a set of ERF_ flags for the
619 function FNDECL. */
620 static int
621 decl_return_flags (tree fndecl)
623 tree attr;
624 tree type = TREE_TYPE (fndecl);
625 if (!type)
626 return 0;
628 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
629 if (!attr)
630 return 0;
632 attr = TREE_VALUE (TREE_VALUE (attr));
633 if (!attr || TREE_STRING_LENGTH (attr) < 1)
634 return 0;
636 switch (TREE_STRING_POINTER (attr)[0])
638 case '1':
639 case '2':
640 case '3':
641 case '4':
642 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
644 case 'm':
645 return ERF_NOALIAS;
647 case '.':
648 default:
649 return 0;
653 /* Return nonzero when FNDECL represents a call to setjmp. */
656 setjmp_call_p (const_tree fndecl)
658 if (DECL_IS_RETURNS_TWICE (fndecl))
659 return ECF_RETURNS_TWICE;
660 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
664 /* Return true if STMT may be an alloca call. */
666 bool
667 gimple_maybe_alloca_call_p (const gimple *stmt)
669 tree fndecl;
671 if (!is_gimple_call (stmt))
672 return false;
674 fndecl = gimple_call_fndecl (stmt);
675 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
676 return true;
678 return false;
681 /* Return true if STMT is a builtin alloca call. */
683 bool
684 gimple_alloca_call_p (const gimple *stmt)
686 tree fndecl;
688 if (!is_gimple_call (stmt))
689 return false;
691 fndecl = gimple_call_fndecl (stmt);
692 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
693 switch (DECL_FUNCTION_CODE (fndecl))
695 CASE_BUILT_IN_ALLOCA:
696 return true;
697 default:
698 break;
701 return false;
704 /* Return true when exp contains a builtin alloca call. */
706 bool
707 alloca_call_p (const_tree exp)
709 tree fndecl;
710 if (TREE_CODE (exp) == CALL_EXPR
711 && (fndecl = get_callee_fndecl (exp))
712 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
713 switch (DECL_FUNCTION_CODE (fndecl))
715 CASE_BUILT_IN_ALLOCA:
716 return true;
717 default:
718 break;
721 return false;
724 /* Return TRUE if FNDECL is either a TM builtin or a TM cloned
725 function. Return FALSE otherwise. */
727 static bool
728 is_tm_builtin (const_tree fndecl)
730 if (fndecl == NULL)
731 return false;
733 if (decl_is_tm_clone (fndecl))
734 return true;
736 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
738 switch (DECL_FUNCTION_CODE (fndecl))
740 case BUILT_IN_TM_COMMIT:
741 case BUILT_IN_TM_COMMIT_EH:
742 case BUILT_IN_TM_ABORT:
743 case BUILT_IN_TM_IRREVOCABLE:
744 case BUILT_IN_TM_GETTMCLONE_IRR:
745 case BUILT_IN_TM_MEMCPY:
746 case BUILT_IN_TM_MEMMOVE:
747 case BUILT_IN_TM_MEMSET:
748 CASE_BUILT_IN_TM_STORE (1):
749 CASE_BUILT_IN_TM_STORE (2):
750 CASE_BUILT_IN_TM_STORE (4):
751 CASE_BUILT_IN_TM_STORE (8):
752 CASE_BUILT_IN_TM_STORE (FLOAT):
753 CASE_BUILT_IN_TM_STORE (DOUBLE):
754 CASE_BUILT_IN_TM_STORE (LDOUBLE):
755 CASE_BUILT_IN_TM_STORE (M64):
756 CASE_BUILT_IN_TM_STORE (M128):
757 CASE_BUILT_IN_TM_STORE (M256):
758 CASE_BUILT_IN_TM_LOAD (1):
759 CASE_BUILT_IN_TM_LOAD (2):
760 CASE_BUILT_IN_TM_LOAD (4):
761 CASE_BUILT_IN_TM_LOAD (8):
762 CASE_BUILT_IN_TM_LOAD (FLOAT):
763 CASE_BUILT_IN_TM_LOAD (DOUBLE):
764 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
765 CASE_BUILT_IN_TM_LOAD (M64):
766 CASE_BUILT_IN_TM_LOAD (M128):
767 CASE_BUILT_IN_TM_LOAD (M256):
768 case BUILT_IN_TM_LOG:
769 case BUILT_IN_TM_LOG_1:
770 case BUILT_IN_TM_LOG_2:
771 case BUILT_IN_TM_LOG_4:
772 case BUILT_IN_TM_LOG_8:
773 case BUILT_IN_TM_LOG_FLOAT:
774 case BUILT_IN_TM_LOG_DOUBLE:
775 case BUILT_IN_TM_LOG_LDOUBLE:
776 case BUILT_IN_TM_LOG_M64:
777 case BUILT_IN_TM_LOG_M128:
778 case BUILT_IN_TM_LOG_M256:
779 return true;
780 default:
781 break;
784 return false;
787 /* Detect flags (function attributes) from the function decl or type node. */
790 flags_from_decl_or_type (const_tree exp)
792 int flags = 0;
794 if (DECL_P (exp))
796 /* The function exp may have the `malloc' attribute. */
797 if (DECL_IS_MALLOC (exp))
798 flags |= ECF_MALLOC;
800 /* The function exp may have the `returns_twice' attribute. */
801 if (DECL_IS_RETURNS_TWICE (exp))
802 flags |= ECF_RETURNS_TWICE;
804 /* Process the pure and const attributes. */
805 if (TREE_READONLY (exp))
806 flags |= ECF_CONST;
807 if (DECL_PURE_P (exp))
808 flags |= ECF_PURE;
809 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
810 flags |= ECF_LOOPING_CONST_OR_PURE;
812 if (DECL_IS_NOVOPS (exp))
813 flags |= ECF_NOVOPS;
814 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
815 flags |= ECF_LEAF;
816 if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp)))
817 flags |= ECF_COLD;
819 if (TREE_NOTHROW (exp))
820 flags |= ECF_NOTHROW;
822 if (flag_tm)
824 if (is_tm_builtin (exp))
825 flags |= ECF_TM_BUILTIN;
826 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
827 || lookup_attribute ("transaction_pure",
828 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
829 flags |= ECF_TM_PURE;
832 flags = special_function_p (exp, flags);
834 else if (TYPE_P (exp))
836 if (TYPE_READONLY (exp))
837 flags |= ECF_CONST;
839 if (flag_tm
840 && ((flags & ECF_CONST) != 0
841 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
842 flags |= ECF_TM_PURE;
844 else
845 gcc_unreachable ();
847 if (TREE_THIS_VOLATILE (exp))
849 flags |= ECF_NORETURN;
850 if (flags & (ECF_CONST|ECF_PURE))
851 flags |= ECF_LOOPING_CONST_OR_PURE;
854 return flags;
857 /* Detect flags from a CALL_EXPR. */
860 call_expr_flags (const_tree t)
862 int flags;
863 tree decl = get_callee_fndecl (t);
865 if (decl)
866 flags = flags_from_decl_or_type (decl);
867 else if (CALL_EXPR_FN (t) == NULL_TREE)
868 flags = internal_fn_flags (CALL_EXPR_IFN (t));
869 else
871 tree type = TREE_TYPE (CALL_EXPR_FN (t));
872 if (type && TREE_CODE (type) == POINTER_TYPE)
873 flags = flags_from_decl_or_type (TREE_TYPE (type));
874 else
875 flags = 0;
876 if (CALL_EXPR_BY_DESCRIPTOR (t))
877 flags |= ECF_BY_DESCRIPTOR;
880 return flags;
883 /* Return true if TYPE should be passed by invisible reference. */
885 bool
886 pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
887 tree type, bool named_arg)
889 if (type)
891 /* If this type contains non-trivial constructors, then it is
892 forbidden for the middle-end to create any new copies. */
893 if (TREE_ADDRESSABLE (type))
894 return true;
896 /* GCC post 3.4 passes *all* variable sized types by reference. */
897 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
898 return true;
900 /* If a record type should be passed the same as its first (and only)
901 member, use the type and mode of that member. */
902 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
904 type = TREE_TYPE (first_field (type));
905 mode = TYPE_MODE (type);
909 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
910 type, named_arg);
913 /* Return true if TYPE, which is passed by reference, should be callee
914 copied instead of caller copied. */
916 bool
917 reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
918 tree type, bool named_arg)
920 if (type && TREE_ADDRESSABLE (type))
921 return false;
922 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
923 named_arg);
927 /* Precompute all register parameters as described by ARGS, storing values
928 into fields within the ARGS array.
930 NUM_ACTUALS indicates the total number elements in the ARGS array.
932 Set REG_PARM_SEEN if we encounter a register parameter. */
934 static void
935 precompute_register_parameters (int num_actuals, struct arg_data *args,
936 int *reg_parm_seen)
938 int i;
940 *reg_parm_seen = 0;
942 for (i = 0; i < num_actuals; i++)
943 if (args[i].reg != 0 && ! args[i].pass_on_stack)
945 *reg_parm_seen = 1;
947 if (args[i].value == 0)
949 push_temp_slots ();
950 args[i].value = expand_normal (args[i].tree_value);
951 preserve_temp_slots (args[i].value);
952 pop_temp_slots ();
955 /* If we are to promote the function arg to a wider mode,
956 do it now. */
958 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
959 args[i].value
960 = convert_modes (args[i].mode,
961 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
962 args[i].value, args[i].unsignedp);
964 /* If the value is a non-legitimate constant, force it into a
965 pseudo now. TLS symbols sometimes need a call to resolve. */
966 if (CONSTANT_P (args[i].value)
967 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
968 args[i].value = force_reg (args[i].mode, args[i].value);
970 /* If we're going to have to load the value by parts, pull the
971 parts into pseudos. The part extraction process can involve
972 non-trivial computation. */
973 if (GET_CODE (args[i].reg) == PARALLEL)
975 tree type = TREE_TYPE (args[i].tree_value);
976 args[i].parallel_value
977 = emit_group_load_into_temps (args[i].reg, args[i].value,
978 type, int_size_in_bytes (type));
981 /* If the value is expensive, and we are inside an appropriately
982 short loop, put the value into a pseudo and then put the pseudo
983 into the hard reg.
985 For small register classes, also do this if this call uses
986 register parameters. This is to avoid reload conflicts while
987 loading the parameters registers. */
989 else if ((! (REG_P (args[i].value)
990 || (GET_CODE (args[i].value) == SUBREG
991 && REG_P (SUBREG_REG (args[i].value)))))
992 && args[i].mode != BLKmode
993 && (set_src_cost (args[i].value, args[i].mode,
994 optimize_insn_for_speed_p ())
995 > COSTS_N_INSNS (1))
996 && ((*reg_parm_seen
997 && targetm.small_register_classes_for_mode_p (args[i].mode))
998 || optimize))
999 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1003 #ifdef REG_PARM_STACK_SPACE
1005 /* The argument list is the property of the called routine and it
1006 may clobber it. If the fixed area has been used for previous
1007 parameters, we must save and restore it. */
1009 static rtx
1010 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
1012 int low;
1013 int high;
1015 /* Compute the boundary of the area that needs to be saved, if any. */
1016 high = reg_parm_stack_space;
1017 if (ARGS_GROW_DOWNWARD)
1018 high += 1;
1020 if (high > highest_outgoing_arg_in_use)
1021 high = highest_outgoing_arg_in_use;
1023 for (low = 0; low < high; low++)
1024 if (stack_usage_map[low] != 0)
1026 int num_to_save;
1027 machine_mode save_mode;
1028 int delta;
1029 rtx addr;
1030 rtx stack_area;
1031 rtx save_area;
1033 while (stack_usage_map[--high] == 0)
1036 *low_to_save = low;
1037 *high_to_save = high;
1039 num_to_save = high - low + 1;
1041 /* If we don't have the required alignment, must do this
1042 in BLKmode. */
1043 scalar_int_mode imode;
1044 if (int_mode_for_size (num_to_save * BITS_PER_UNIT, 1).exists (&imode)
1045 && (low & (MIN (GET_MODE_SIZE (imode),
1046 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
1047 save_mode = imode;
1048 else
1049 save_mode = BLKmode;
1051 if (ARGS_GROW_DOWNWARD)
1052 delta = -high;
1053 else
1054 delta = low;
1056 addr = plus_constant (Pmode, argblock, delta);
1057 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1059 set_mem_align (stack_area, PARM_BOUNDARY);
1060 if (save_mode == BLKmode)
1062 save_area = assign_stack_temp (BLKmode, num_to_save);
1063 emit_block_move (validize_mem (save_area), stack_area,
1064 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1066 else
1068 save_area = gen_reg_rtx (save_mode);
1069 emit_move_insn (save_area, stack_area);
1072 return save_area;
1075 return NULL_RTX;
1078 static void
1079 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
1081 machine_mode save_mode = GET_MODE (save_area);
1082 int delta;
1083 rtx addr, stack_area;
1085 if (ARGS_GROW_DOWNWARD)
1086 delta = -high_to_save;
1087 else
1088 delta = low_to_save;
1090 addr = plus_constant (Pmode, argblock, delta);
1091 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1092 set_mem_align (stack_area, PARM_BOUNDARY);
1094 if (save_mode != BLKmode)
1095 emit_move_insn (stack_area, save_area);
1096 else
1097 emit_block_move (stack_area, validize_mem (save_area),
1098 GEN_INT (high_to_save - low_to_save + 1),
1099 BLOCK_OP_CALL_PARM);
1101 #endif /* REG_PARM_STACK_SPACE */
1103 /* If any elements in ARGS refer to parameters that are to be passed in
1104 registers, but not in memory, and whose alignment does not permit a
1105 direct copy into registers. Copy the values into a group of pseudos
1106 which we will later copy into the appropriate hard registers.
1108 Pseudos for each unaligned argument will be stored into the array
1109 args[argnum].aligned_regs. The caller is responsible for deallocating
1110 the aligned_regs array if it is nonzero. */
1112 static void
1113 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
1115 int i, j;
1117 for (i = 0; i < num_actuals; i++)
1118 if (args[i].reg != 0 && ! args[i].pass_on_stack
1119 && GET_CODE (args[i].reg) != PARALLEL
1120 && args[i].mode == BLKmode
1121 && MEM_P (args[i].value)
1122 && (MEM_ALIGN (args[i].value)
1123 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1125 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1126 int endian_correction = 0;
1128 if (args[i].partial)
1130 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1131 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1133 else
1135 args[i].n_aligned_regs
1136 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1139 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
1141 /* Structures smaller than a word are normally aligned to the
1142 least significant byte. On a BYTES_BIG_ENDIAN machine,
1143 this means we must skip the empty high order bytes when
1144 calculating the bit offset. */
1145 if (bytes < UNITS_PER_WORD
1146 #ifdef BLOCK_REG_PADDING
1147 && (BLOCK_REG_PADDING (args[i].mode,
1148 TREE_TYPE (args[i].tree_value), 1)
1149 == PAD_DOWNWARD)
1150 #else
1151 && BYTES_BIG_ENDIAN
1152 #endif
1154 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
1156 for (j = 0; j < args[i].n_aligned_regs; j++)
1158 rtx reg = gen_reg_rtx (word_mode);
1159 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1160 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1162 args[i].aligned_regs[j] = reg;
1163 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1164 word_mode, word_mode, false, NULL);
1166 /* There is no need to restrict this code to loading items
1167 in TYPE_ALIGN sized hunks. The bitfield instructions can
1168 load up entire word sized registers efficiently.
1170 ??? This may not be needed anymore.
1171 We use to emit a clobber here but that doesn't let later
1172 passes optimize the instructions we emit. By storing 0 into
1173 the register later passes know the first AND to zero out the
1174 bitfield being set in the register is unnecessary. The store
1175 of 0 will be deleted as will at least the first AND. */
1177 emit_move_insn (reg, const0_rtx);
1179 bytes -= bitsize / BITS_PER_UNIT;
1180 store_bit_field (reg, bitsize, endian_correction, 0, 0,
1181 word_mode, word, false);
1186 /* The limit set by -Walloc-larger-than=. */
1187 static GTY(()) tree alloc_object_size_limit;
1189 /* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
1190 setting if the option is specified, or to the maximum object size if it
1191 is not. Return the initialized value. */
1193 static tree
1194 alloc_max_size (void)
1196 if (!alloc_object_size_limit)
1198 alloc_object_size_limit = max_object_size ();
1200 if (warn_alloc_size_limit)
1202 char *end = NULL;
1203 errno = 0;
1204 unsigned HOST_WIDE_INT unit = 1;
1205 unsigned HOST_WIDE_INT limit
1206 = strtoull (warn_alloc_size_limit, &end, 10);
1208 if (!errno)
1210 if (end && *end)
1212 /* Numeric option arguments are at most INT_MAX. Make it
1213 possible to specify a larger value by accepting common
1214 suffixes. */
1215 if (!strcmp (end, "kB"))
1216 unit = 1000;
1217 else if (!strcasecmp (end, "KiB") || strcmp (end, "KB"))
1218 unit = 1024;
1219 else if (!strcmp (end, "MB"))
1220 unit = HOST_WIDE_INT_UC (1000) * 1000;
1221 else if (!strcasecmp (end, "MiB"))
1222 unit = HOST_WIDE_INT_UC (1024) * 1024;
1223 else if (!strcasecmp (end, "GB"))
1224 unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000;
1225 else if (!strcasecmp (end, "GiB"))
1226 unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024;
1227 else if (!strcasecmp (end, "TB"))
1228 unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000;
1229 else if (!strcasecmp (end, "TiB"))
1230 unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024;
1231 else if (!strcasecmp (end, "PB"))
1232 unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000 * 1000;
1233 else if (!strcasecmp (end, "PiB"))
1234 unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024 * 1024;
1235 else if (!strcasecmp (end, "EB"))
1236 unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000 * 1000
1237 * 1000;
1238 else if (!strcasecmp (end, "EiB"))
1239 unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024 * 1024
1240 * 1024;
1241 else
1242 unit = 0;
1245 if (unit)
1247 widest_int w = wi::mul (limit, unit);
1248 if (w < wi::to_widest (alloc_object_size_limit))
1249 alloc_object_size_limit
1250 = wide_int_to_tree (ptrdiff_type_node, w);
1255 return alloc_object_size_limit;
1258 /* Return true when EXP's range can be determined and set RANGE[] to it
1259 after adjusting it if necessary to make EXP a represents a valid size
1260 of object, or a valid size argument to an allocation function declared
1261 with attribute alloc_size (whose argument may be signed), or to a string
1262 manipulation function like memset. When ALLOW_ZERO is true, allow
1263 returning a range of [0, 0] for a size in an anti-range [1, N] where
1264 N > PTRDIFF_MAX. A zero range is a (nearly) invalid argument to
1265 allocation functions like malloc but it is a valid argument to
1266 functions like memset. */
1268 bool
1269 get_size_range (tree exp, tree range[2], bool allow_zero /* = false */)
1271 if (tree_fits_uhwi_p (exp))
1273 /* EXP is a constant. */
1274 range[0] = range[1] = exp;
1275 return true;
1278 tree exptype = TREE_TYPE (exp);
1279 bool integral = INTEGRAL_TYPE_P (exptype);
1281 wide_int min, max;
1282 enum value_range_type range_type;
1284 if (TREE_CODE (exp) == SSA_NAME && integral)
1285 range_type = get_range_info (exp, &min, &max);
1286 else
1287 range_type = VR_VARYING;
1289 if (range_type == VR_VARYING)
1291 if (integral)
1293 /* Use the full range of the type of the expression when
1294 no value range information is available. */
1295 range[0] = TYPE_MIN_VALUE (exptype);
1296 range[1] = TYPE_MAX_VALUE (exptype);
1297 return true;
1300 range[0] = NULL_TREE;
1301 range[1] = NULL_TREE;
1302 return false;
1305 unsigned expprec = TYPE_PRECISION (exptype);
1307 bool signed_p = !TYPE_UNSIGNED (exptype);
1309 if (range_type == VR_ANTI_RANGE)
1311 if (signed_p)
1313 if (wi::les_p (max, 0))
1315 /* EXP is not in a strictly negative range. That means
1316 it must be in some (not necessarily strictly) positive
1317 range which includes zero. Since in signed to unsigned
1318 conversions negative values end up converted to large
1319 positive values, and otherwise they are not valid sizes,
1320 the resulting range is in both cases [0, TYPE_MAX]. */
1321 min = wi::zero (expprec);
1322 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1324 else if (wi::les_p (min - 1, 0))
1326 /* EXP is not in a negative-positive range. That means EXP
1327 is either negative, or greater than max. Since negative
1328 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
1329 min = max + 1;
1330 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1332 else
1334 max = min - 1;
1335 min = wi::zero (expprec);
1338 else if (wi::eq_p (0, min - 1))
1340 /* EXP is unsigned and not in the range [1, MAX]. That means
1341 it's either zero or greater than MAX. Even though 0 would
1342 normally be detected by -Walloc-zero, unless ALLOW_ZERO
1343 is true, set the range to [MAX, TYPE_MAX] so that when MAX
1344 is greater than the limit the whole range is diagnosed. */
1345 if (allow_zero)
1346 min = max = wi::zero (expprec);
1347 else
1349 min = max + 1;
1350 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1353 else
1355 max = min - 1;
1356 min = wi::zero (expprec);
1360 range[0] = wide_int_to_tree (exptype, min);
1361 range[1] = wide_int_to_tree (exptype, max);
1363 return true;
1366 /* Diagnose a call EXP to function FN decorated with attribute alloc_size
1367 whose argument numbers given by IDX with values given by ARGS exceed
1368 the maximum object size or cause an unsigned oveflow (wrapping) when
1369 multiplied. When ARGS[0] is null the function does nothing. ARGS[1]
1370 may be null for functions like malloc, and non-null for those like
1371 calloc that are decorated with a two-argument attribute alloc_size. */
1373 void
1374 maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
1376 /* The range each of the (up to) two arguments is known to be in. */
1377 tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } };
1379 /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */
1380 tree maxobjsize = alloc_max_size ();
1382 location_t loc = EXPR_LOCATION (exp);
1384 bool warned = false;
1386 /* Validate each argument individually. */
1387 for (unsigned i = 0; i != 2 && args[i]; ++i)
1389 if (TREE_CODE (args[i]) == INTEGER_CST)
1391 argrange[i][0] = args[i];
1392 argrange[i][1] = args[i];
1394 if (tree_int_cst_lt (args[i], integer_zero_node))
1396 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1397 "%Kargument %i value %qE is negative",
1398 exp, idx[i] + 1, args[i]);
1400 else if (integer_zerop (args[i]))
1402 /* Avoid issuing -Walloc-zero for allocation functions other
1403 than __builtin_alloca that are declared with attribute
1404 returns_nonnull because there's no portability risk. This
1405 avoids warning for such calls to libiberty's xmalloc and
1406 friends.
1407 Also avoid issuing the warning for calls to function named
1408 "alloca". */
1409 if ((DECL_FUNCTION_CODE (fn) == BUILT_IN_ALLOCA
1410 && IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6)
1411 || (DECL_FUNCTION_CODE (fn) != BUILT_IN_ALLOCA
1412 && !lookup_attribute ("returns_nonnull",
1413 TYPE_ATTRIBUTES (TREE_TYPE (fn)))))
1414 warned = warning_at (loc, OPT_Walloc_zero,
1415 "%Kargument %i value is zero",
1416 exp, idx[i] + 1);
1418 else if (tree_int_cst_lt (maxobjsize, args[i]))
1420 /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
1421 mode and with -fno-exceptions as a way to indicate array
1422 size overflow. There's no good way to detect C++98 here
1423 so avoid diagnosing these calls for all C++ modes. */
1424 if (i == 0
1425 && !args[1]
1426 && lang_GNU_CXX ()
1427 && DECL_IS_OPERATOR_NEW (fn)
1428 && integer_all_onesp (args[i]))
1429 continue;
1431 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1432 "%Kargument %i value %qE exceeds "
1433 "maximum object size %E",
1434 exp, idx[i] + 1, args[i], maxobjsize);
1437 else if (TREE_CODE (args[i]) == SSA_NAME
1438 && get_size_range (args[i], argrange[i]))
1440 /* Verify that the argument's range is not negative (including
1441 upper bound of zero). */
1442 if (tree_int_cst_lt (argrange[i][0], integer_zero_node)
1443 && tree_int_cst_le (argrange[i][1], integer_zero_node))
1445 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1446 "%Kargument %i range [%E, %E] is negative",
1447 exp, idx[i] + 1,
1448 argrange[i][0], argrange[i][1]);
1450 else if (tree_int_cst_lt (maxobjsize, argrange[i][0]))
1452 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1453 "%Kargument %i range [%E, %E] exceeds "
1454 "maximum object size %E",
1455 exp, idx[i] + 1,
1456 argrange[i][0], argrange[i][1],
1457 maxobjsize);
1462 if (!argrange[0])
1463 return;
1465 /* For a two-argument alloc_size, validate the product of the two
1466 arguments if both of their values or ranges are known. */
1467 if (!warned && tree_fits_uhwi_p (argrange[0][0])
1468 && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0])
1469 && !integer_onep (argrange[0][0])
1470 && !integer_onep (argrange[1][0]))
1472 /* Check for overflow in the product of a function decorated with
1473 attribute alloc_size (X, Y). */
1474 unsigned szprec = TYPE_PRECISION (size_type_node);
1475 wide_int x = wi::to_wide (argrange[0][0], szprec);
1476 wide_int y = wi::to_wide (argrange[1][0], szprec);
1478 bool vflow;
1479 wide_int prod = wi::umul (x, y, &vflow);
1481 if (vflow)
1482 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1483 "%Kproduct %<%E * %E%> of arguments %i and %i "
1484 "exceeds %<SIZE_MAX%>",
1485 exp, argrange[0][0], argrange[1][0],
1486 idx[0] + 1, idx[1] + 1);
1487 else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod))
1488 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1489 "%Kproduct %<%E * %E%> of arguments %i and %i "
1490 "exceeds maximum object size %E",
1491 exp, argrange[0][0], argrange[1][0],
1492 idx[0] + 1, idx[1] + 1,
1493 maxobjsize);
1495 if (warned)
1497 /* Print the full range of each of the two arguments to make
1498 it clear when it is, in fact, in a range and not constant. */
1499 if (argrange[0][0] != argrange [0][1])
1500 inform (loc, "argument %i in the range [%E, %E]",
1501 idx[0] + 1, argrange[0][0], argrange[0][1]);
1502 if (argrange[1][0] != argrange [1][1])
1503 inform (loc, "argument %i in the range [%E, %E]",
1504 idx[1] + 1, argrange[1][0], argrange[1][1]);
1508 if (warned)
1510 location_t fnloc = DECL_SOURCE_LOCATION (fn);
1512 if (DECL_IS_BUILTIN (fn))
1513 inform (loc,
1514 "in a call to built-in allocation function %qD", fn);
1515 else
1516 inform (fnloc,
1517 "in a call to allocation function %qD declared here", fn);
1521 /* If EXPR refers to a character array or pointer declared attribute
1522 nonstring return a decl for that array or pointer and set *REF to
1523 the referenced enclosing object or pointer. Otherwise returns
1524 null. */
1526 tree
1527 get_attr_nonstring_decl (tree expr, tree *ref)
1529 tree decl = expr;
1530 if (TREE_CODE (decl) == SSA_NAME)
1532 gimple *def = SSA_NAME_DEF_STMT (decl);
1534 if (is_gimple_assign (def))
1536 tree_code code = gimple_assign_rhs_code (def);
1537 if (code == ADDR_EXPR
1538 || code == COMPONENT_REF
1539 || code == VAR_DECL)
1540 decl = gimple_assign_rhs1 (def);
1542 else if (tree var = SSA_NAME_VAR (decl))
1543 decl = var;
1546 if (TREE_CODE (decl) == ADDR_EXPR)
1547 decl = TREE_OPERAND (decl, 0);
1549 if (ref)
1550 *ref = decl;
1552 if (TREE_CODE (decl) == COMPONENT_REF)
1553 decl = TREE_OPERAND (decl, 1);
1555 if (DECL_P (decl)
1556 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
1557 return decl;
1559 return NULL_TREE;
1562 /* Warn about passing a non-string array/pointer to a function that
1563 expects a nul-terminated string argument. */
1565 void
1566 maybe_warn_nonstring_arg (tree fndecl, tree exp)
1568 if (!fndecl || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
1569 return;
1571 bool with_bounds = CALL_WITH_BOUNDS_P (exp);
1573 /* The bound argument to a bounded string function like strncpy. */
1574 tree bound = NULL_TREE;
1576 /* It's safe to call "bounded" string functions with a non-string
1577 argument since the functions provide an explicit bound for this
1578 purpose. */
1579 switch (DECL_FUNCTION_CODE (fndecl))
1581 case BUILT_IN_STPNCPY:
1582 case BUILT_IN_STPNCPY_CHK:
1583 case BUILT_IN_STRNCMP:
1584 case BUILT_IN_STRNCASECMP:
1585 case BUILT_IN_STRNCPY:
1586 case BUILT_IN_STRNCPY_CHK:
1587 bound = CALL_EXPR_ARG (exp, with_bounds ? 4 : 2);
1588 break;
1590 case BUILT_IN_STRNDUP:
1591 bound = CALL_EXPR_ARG (exp, with_bounds ? 2 : 1);
1592 break;
1594 default:
1595 break;
1598 /* Determine the range of the bound argument (if specified). */
1599 tree bndrng[2] = { NULL_TREE, NULL_TREE };
1600 if (bound)
1601 get_size_range (bound, bndrng);
1603 /* Iterate over the built-in function's formal arguments and check
1604 each const char* against the actual argument. If the actual
1605 argument is declared attribute non-string issue a warning unless
1606 the argument's maximum length is bounded. */
1607 function_args_iterator it;
1608 function_args_iter_init (&it, TREE_TYPE (fndecl));
1610 for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it))
1612 tree argtype = function_args_iter_cond (&it);
1613 if (!argtype)
1614 break;
1616 if (TREE_CODE (argtype) != POINTER_TYPE)
1617 continue;
1619 argtype = TREE_TYPE (argtype);
1621 if (TREE_CODE (argtype) != INTEGER_TYPE
1622 || !TYPE_READONLY (argtype))
1623 continue;
1625 argtype = TYPE_MAIN_VARIANT (argtype);
1626 if (argtype != char_type_node)
1627 continue;
1629 tree callarg = CALL_EXPR_ARG (exp, argno);
1630 if (TREE_CODE (callarg) == ADDR_EXPR)
1631 callarg = TREE_OPERAND (callarg, 0);
1633 /* See if the destination is declared with attribute "nonstring". */
1634 tree decl = get_attr_nonstring_decl (callarg);
1635 if (!decl)
1636 continue;
1638 tree type = TREE_TYPE (decl);
1640 offset_int wibnd = 0;
1641 if (bndrng[0])
1642 wibnd = wi::to_offset (bndrng[0]);
1644 offset_int asize = wibnd;
1646 if (TREE_CODE (type) == ARRAY_TYPE)
1647 if (tree arrbnd = TYPE_DOMAIN (type))
1649 if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
1650 asize = wi::to_offset (arrbnd) + 1;
1653 location_t loc = EXPR_LOCATION (exp);
1655 bool warned = false;
1657 if (wi::ltu_p (asize, wibnd))
1658 warned = warning_at (loc, OPT_Wstringop_overflow_,
1659 "%qD argument %i declared attribute %<nonstring%> "
1660 "is smaller than the specified bound %E",
1661 fndecl, argno + 1, bndrng[0]);
1662 else if (!bound)
1663 warned = warning_at (loc, OPT_Wstringop_overflow_,
1664 "%qD argument %i declared attribute %<nonstring%>",
1665 fndecl, argno + 1);
1667 if (warned)
1668 inform (DECL_SOURCE_LOCATION (decl),
1669 "argument %qD declared here", decl);
1673 /* Issue an error if CALL_EXPR was flagged as requiring
1674 tall-call optimization. */
1676 static void
1677 maybe_complain_about_tail_call (tree call_expr, const char *reason)
1679 gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1680 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1681 return;
1683 error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1686 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1687 CALL_EXPR EXP.
1689 NUM_ACTUALS is the total number of parameters.
1691 N_NAMED_ARGS is the total number of named arguments.
1693 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1694 value, or null.
1696 FNDECL is the tree code for the target of this call (if known)
1698 ARGS_SO_FAR holds state needed by the target to know where to place
1699 the next argument.
1701 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1702 for arguments which are passed in registers.
1704 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1705 and may be modified by this routine.
1707 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1708 flags which may be modified by this routine.
1710 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1711 that requires allocation of stack space.
1713 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1714 the thunked-to function. */
1716 static void
1717 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1718 struct arg_data *args,
1719 struct args_size *args_size,
1720 int n_named_args ATTRIBUTE_UNUSED,
1721 tree exp, tree struct_value_addr_value,
1722 tree fndecl, tree fntype,
1723 cumulative_args_t args_so_far,
1724 int reg_parm_stack_space,
1725 rtx *old_stack_level, int *old_pending_adj,
1726 int *must_preallocate, int *ecf_flags,
1727 bool *may_tailcall, bool call_from_thunk_p)
1729 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
1730 location_t loc = EXPR_LOCATION (exp);
1732 /* Count arg position in order args appear. */
1733 int argpos;
1735 int i;
1737 args_size->constant = 0;
1738 args_size->var = 0;
1740 bitmap_obstack_initialize (NULL);
1742 /* In this loop, we consider args in the order they are written.
1743 We fill up ARGS from the back. */
1745 i = num_actuals - 1;
1747 int j = i, ptr_arg = -1;
1748 call_expr_arg_iterator iter;
1749 tree arg;
1750 bitmap slots = NULL;
1752 if (struct_value_addr_value)
1754 args[j].tree_value = struct_value_addr_value;
1755 j--;
1757 /* If we pass structure address then we need to
1758 create bounds for it. Since created bounds is
1759 a call statement, we expand it right here to avoid
1760 fixing all other places where it may be expanded. */
1761 if (CALL_WITH_BOUNDS_P (exp))
1763 args[j].value = gen_reg_rtx (targetm.chkp_bound_mode ());
1764 args[j].tree_value
1765 = chkp_make_bounds_for_struct_addr (struct_value_addr_value);
1766 expand_expr_real (args[j].tree_value, args[j].value, VOIDmode,
1767 EXPAND_NORMAL, 0, false);
1768 args[j].pointer_arg = j + 1;
1769 j--;
1772 argpos = 0;
1773 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1775 tree argtype = TREE_TYPE (arg);
1777 /* Remember last param with pointer and associate it
1778 with following pointer bounds. */
1779 if (CALL_WITH_BOUNDS_P (exp)
1780 && chkp_type_has_pointer (argtype))
1782 if (slots)
1783 BITMAP_FREE (slots);
1784 ptr_arg = j;
1785 if (!BOUNDED_TYPE_P (argtype))
1787 slots = BITMAP_ALLOC (NULL);
1788 chkp_find_bound_slots (argtype, slots);
1791 else if (CALL_WITH_BOUNDS_P (exp)
1792 && pass_by_reference (NULL, TYPE_MODE (argtype), argtype,
1793 argpos < n_named_args))
1795 if (slots)
1796 BITMAP_FREE (slots);
1797 ptr_arg = j;
1799 else if (POINTER_BOUNDS_TYPE_P (argtype))
1801 /* We expect bounds in instrumented calls only.
1802 Otherwise it is a sign we lost flag due to some optimization
1803 and may emit call args incorrectly. */
1804 gcc_assert (CALL_WITH_BOUNDS_P (exp));
1806 /* For structures look for the next available pointer. */
1807 if (ptr_arg != -1 && slots)
1809 unsigned bnd_no = bitmap_first_set_bit (slots);
1810 args[j].pointer_offset =
1811 bnd_no * POINTER_SIZE / BITS_PER_UNIT;
1813 bitmap_clear_bit (slots, bnd_no);
1815 /* Check we have no more pointers in the structure. */
1816 if (bitmap_empty_p (slots))
1817 BITMAP_FREE (slots);
1819 args[j].pointer_arg = ptr_arg;
1821 /* Check we covered all pointers in the previous
1822 non bounds arg. */
1823 if (!slots)
1824 ptr_arg = -1;
1826 else
1827 ptr_arg = -1;
1829 if (targetm.calls.split_complex_arg
1830 && argtype
1831 && TREE_CODE (argtype) == COMPLEX_TYPE
1832 && targetm.calls.split_complex_arg (argtype))
1834 tree subtype = TREE_TYPE (argtype);
1835 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
1836 j--;
1837 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1839 else
1840 args[j].tree_value = arg;
1841 j--;
1842 argpos++;
1845 if (slots)
1846 BITMAP_FREE (slots);
1849 bitmap_obstack_release (NULL);
1851 /* Extract attribute alloc_size and if set, store the indices of
1852 the corresponding arguments in ALLOC_IDX, and then the actual
1853 argument(s) at those indices in ALLOC_ARGS. */
1854 int alloc_idx[2] = { -1, -1 };
1855 if (tree alloc_size
1856 = (fndecl ? lookup_attribute ("alloc_size",
1857 TYPE_ATTRIBUTES (TREE_TYPE (fndecl)))
1858 : NULL_TREE))
1860 tree args = TREE_VALUE (alloc_size);
1861 alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
1862 if (TREE_CHAIN (args))
1863 alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
1866 /* Array for up to the two attribute alloc_size arguments. */
1867 tree alloc_args[] = { NULL_TREE, NULL_TREE };
1869 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1870 for (argpos = 0; argpos < num_actuals; i--, argpos++)
1872 tree type = TREE_TYPE (args[i].tree_value);
1873 int unsignedp;
1874 machine_mode mode;
1876 /* Replace erroneous argument with constant zero. */
1877 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1878 args[i].tree_value = integer_zero_node, type = integer_type_node;
1880 /* If TYPE is a transparent union or record, pass things the way
1881 we would pass the first field of the union or record. We have
1882 already verified that the modes are the same. */
1883 if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1884 && TYPE_TRANSPARENT_AGGR (type))
1885 type = TREE_TYPE (first_field (type));
1887 /* Decide where to pass this arg.
1889 args[i].reg is nonzero if all or part is passed in registers.
1891 args[i].partial is nonzero if part but not all is passed in registers,
1892 and the exact value says how many bytes are passed in registers.
1894 args[i].pass_on_stack is nonzero if the argument must at least be
1895 computed on the stack. It may then be loaded back into registers
1896 if args[i].reg is nonzero.
1898 These decisions are driven by the FUNCTION_... macros and must agree
1899 with those made by function.c. */
1901 /* See if this argument should be passed by invisible reference. */
1902 if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
1903 type, argpos < n_named_args))
1905 bool callee_copies;
1906 tree base = NULL_TREE;
1908 callee_copies
1909 = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
1910 type, argpos < n_named_args);
1912 /* If we're compiling a thunk, pass through invisible references
1913 instead of making a copy. */
1914 if (call_from_thunk_p
1915 || (callee_copies
1916 && !TREE_ADDRESSABLE (type)
1917 && (base = get_base_address (args[i].tree_value))
1918 && TREE_CODE (base) != SSA_NAME
1919 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1921 /* We may have turned the parameter value into an SSA name.
1922 Go back to the original parameter so we can take the
1923 address. */
1924 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
1926 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
1927 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
1928 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
1930 /* Argument setup code may have copied the value to register. We
1931 revert that optimization now because the tail call code must
1932 use the original location. */
1933 if (TREE_CODE (args[i].tree_value) == PARM_DECL
1934 && !MEM_P (DECL_RTL (args[i].tree_value))
1935 && DECL_INCOMING_RTL (args[i].tree_value)
1936 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
1937 set_decl_rtl (args[i].tree_value,
1938 DECL_INCOMING_RTL (args[i].tree_value));
1940 mark_addressable (args[i].tree_value);
1942 /* We can't use sibcalls if a callee-copied argument is
1943 stored in the current function's frame. */
1944 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1946 *may_tailcall = false;
1947 maybe_complain_about_tail_call (exp,
1948 "a callee-copied argument is"
1949 " stored in the current "
1950 " function's frame");
1953 args[i].tree_value = build_fold_addr_expr_loc (loc,
1954 args[i].tree_value);
1955 type = TREE_TYPE (args[i].tree_value);
1957 if (*ecf_flags & ECF_CONST)
1958 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
1960 else
1962 /* We make a copy of the object and pass the address to the
1963 function being called. */
1964 rtx copy;
1966 if (!COMPLETE_TYPE_P (type)
1967 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1968 || (flag_stack_check == GENERIC_STACK_CHECK
1969 && compare_tree_int (TYPE_SIZE_UNIT (type),
1970 STACK_CHECK_MAX_VAR_SIZE) > 0))
1972 /* This is a variable-sized object. Make space on the stack
1973 for it. */
1974 rtx size_rtx = expr_size (args[i].tree_value);
1976 if (*old_stack_level == 0)
1978 emit_stack_save (SAVE_BLOCK, old_stack_level);
1979 *old_pending_adj = pending_stack_adjust;
1980 pending_stack_adjust = 0;
1983 /* We can pass TRUE as the 4th argument because we just
1984 saved the stack pointer and will restore it right after
1985 the call. */
1986 copy = allocate_dynamic_stack_space (size_rtx,
1987 TYPE_ALIGN (type),
1988 TYPE_ALIGN (type),
1989 max_int_size_in_bytes
1990 (type),
1991 true);
1992 copy = gen_rtx_MEM (BLKmode, copy);
1993 set_mem_attributes (copy, type, 1);
1995 else
1996 copy = assign_temp (type, 1, 0);
1998 store_expr (args[i].tree_value, copy, 0, false, false);
2000 /* Just change the const function to pure and then let
2001 the next test clear the pure based on
2002 callee_copies. */
2003 if (*ecf_flags & ECF_CONST)
2005 *ecf_flags &= ~ECF_CONST;
2006 *ecf_flags |= ECF_PURE;
2009 if (!callee_copies && *ecf_flags & ECF_PURE)
2010 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2012 args[i].tree_value
2013 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
2014 type = TREE_TYPE (args[i].tree_value);
2015 *may_tailcall = false;
2016 maybe_complain_about_tail_call (exp,
2017 "argument must be passed"
2018 " by copying");
2022 unsignedp = TYPE_UNSIGNED (type);
2023 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
2024 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
2026 args[i].unsignedp = unsignedp;
2027 args[i].mode = mode;
2029 targetm.calls.warn_parameter_passing_abi (args_so_far, type);
2031 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
2032 argpos < n_named_args);
2034 if (args[i].reg && CONST_INT_P (args[i].reg))
2036 args[i].special_slot = args[i].reg;
2037 args[i].reg = NULL;
2040 /* If this is a sibling call and the machine has register windows, the
2041 register window has to be unwinded before calling the routine, so
2042 arguments have to go into the incoming registers. */
2043 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
2044 args[i].tail_call_reg
2045 = targetm.calls.function_incoming_arg (args_so_far, mode, type,
2046 argpos < n_named_args);
2047 else
2048 args[i].tail_call_reg = args[i].reg;
2050 if (args[i].reg)
2051 args[i].partial
2052 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
2053 argpos < n_named_args);
2055 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
2057 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
2058 it means that we are to pass this arg in the register(s) designated
2059 by the PARALLEL, but also to pass it in the stack. */
2060 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
2061 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
2062 args[i].pass_on_stack = 1;
2064 /* If this is an addressable type, we must preallocate the stack
2065 since we must evaluate the object into its final location.
2067 If this is to be passed in both registers and the stack, it is simpler
2068 to preallocate. */
2069 if (TREE_ADDRESSABLE (type)
2070 || (args[i].pass_on_stack && args[i].reg != 0))
2071 *must_preallocate = 1;
2073 /* No stack allocation and padding for bounds. */
2074 if (POINTER_BOUNDS_P (args[i].tree_value))
2076 /* Compute the stack-size of this argument. */
2077 else if (args[i].reg == 0 || args[i].partial != 0
2078 || reg_parm_stack_space > 0
2079 || args[i].pass_on_stack)
2080 locate_and_pad_parm (mode, type,
2081 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2083 #else
2084 args[i].reg != 0,
2085 #endif
2086 reg_parm_stack_space,
2087 args[i].pass_on_stack ? 0 : args[i].partial,
2088 fndecl, args_size, &args[i].locate);
2089 #ifdef BLOCK_REG_PADDING
2090 else
2091 /* The argument is passed entirely in registers. See at which
2092 end it should be padded. */
2093 args[i].locate.where_pad =
2094 BLOCK_REG_PADDING (mode, type,
2095 int_size_in_bytes (type) <= UNITS_PER_WORD);
2096 #endif
2098 /* Update ARGS_SIZE, the total stack space for args so far. */
2100 args_size->constant += args[i].locate.size.constant;
2101 if (args[i].locate.size.var)
2102 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
2104 /* Increment ARGS_SO_FAR, which has info about which arg-registers
2105 have been used, etc. */
2107 targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
2108 type, argpos < n_named_args);
2110 /* Store argument values for functions decorated with attribute
2111 alloc_size. */
2112 if (argpos == alloc_idx[0])
2113 alloc_args[0] = args[i].tree_value;
2114 else if (argpos == alloc_idx[1])
2115 alloc_args[1] = args[i].tree_value;
2118 if (alloc_args[0])
2120 /* Check the arguments of functions decorated with attribute
2121 alloc_size. */
2122 maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
2125 /* Detect passing non-string arguments to functions expecting
2126 nul-terminated strings. */
2127 maybe_warn_nonstring_arg (fndecl, exp);
2130 /* Update ARGS_SIZE to contain the total size for the argument block.
2131 Return the original constant component of the argument block's size.
2133 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
2134 for arguments passed in registers. */
2136 static int
2137 compute_argument_block_size (int reg_parm_stack_space,
2138 struct args_size *args_size,
2139 tree fndecl ATTRIBUTE_UNUSED,
2140 tree fntype ATTRIBUTE_UNUSED,
2141 int preferred_stack_boundary ATTRIBUTE_UNUSED)
2143 int unadjusted_args_size = args_size->constant;
2145 /* For accumulate outgoing args mode we don't need to align, since the frame
2146 will be already aligned. Align to STACK_BOUNDARY in order to prevent
2147 backends from generating misaligned frame sizes. */
2148 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
2149 preferred_stack_boundary = STACK_BOUNDARY;
2151 /* Compute the actual size of the argument block required. The variable
2152 and constant sizes must be combined, the size may have to be rounded,
2153 and there may be a minimum required size. */
2155 if (args_size->var)
2157 args_size->var = ARGS_SIZE_TREE (*args_size);
2158 args_size->constant = 0;
2160 preferred_stack_boundary /= BITS_PER_UNIT;
2161 if (preferred_stack_boundary > 1)
2163 /* We don't handle this case yet. To handle it correctly we have
2164 to add the delta, round and subtract the delta.
2165 Currently no machine description requires this support. */
2166 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
2167 args_size->var = round_up (args_size->var, preferred_stack_boundary);
2170 if (reg_parm_stack_space > 0)
2172 args_size->var
2173 = size_binop (MAX_EXPR, args_size->var,
2174 ssize_int (reg_parm_stack_space));
2176 /* The area corresponding to register parameters is not to count in
2177 the size of the block we need. So make the adjustment. */
2178 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2179 args_size->var
2180 = size_binop (MINUS_EXPR, args_size->var,
2181 ssize_int (reg_parm_stack_space));
2184 else
2186 preferred_stack_boundary /= BITS_PER_UNIT;
2187 if (preferred_stack_boundary < 1)
2188 preferred_stack_boundary = 1;
2189 args_size->constant = (((args_size->constant
2190 + stack_pointer_delta
2191 + preferred_stack_boundary - 1)
2192 / preferred_stack_boundary
2193 * preferred_stack_boundary)
2194 - stack_pointer_delta);
2196 args_size->constant = MAX (args_size->constant,
2197 reg_parm_stack_space);
2199 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2200 args_size->constant -= reg_parm_stack_space;
2202 return unadjusted_args_size;
2205 /* Precompute parameters as needed for a function call.
2207 FLAGS is mask of ECF_* constants.
2209 NUM_ACTUALS is the number of arguments.
2211 ARGS is an array containing information for each argument; this
2212 routine fills in the INITIAL_VALUE and VALUE fields for each
2213 precomputed argument. */
2215 static void
2216 precompute_arguments (int num_actuals, struct arg_data *args)
2218 int i;
2220 /* If this is a libcall, then precompute all arguments so that we do not
2221 get extraneous instructions emitted as part of the libcall sequence. */
2223 /* If we preallocated the stack space, and some arguments must be passed
2224 on the stack, then we must precompute any parameter which contains a
2225 function call which will store arguments on the stack.
2226 Otherwise, evaluating the parameter may clobber previous parameters
2227 which have already been stored into the stack. (we have code to avoid
2228 such case by saving the outgoing stack arguments, but it results in
2229 worse code) */
2230 if (!ACCUMULATE_OUTGOING_ARGS)
2231 return;
2233 for (i = 0; i < num_actuals; i++)
2235 tree type;
2236 machine_mode mode;
2238 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
2239 continue;
2241 /* If this is an addressable type, we cannot pre-evaluate it. */
2242 type = TREE_TYPE (args[i].tree_value);
2243 gcc_assert (!TREE_ADDRESSABLE (type));
2245 args[i].initial_value = args[i].value
2246 = expand_normal (args[i].tree_value);
2248 mode = TYPE_MODE (type);
2249 if (mode != args[i].mode)
2251 int unsignedp = args[i].unsignedp;
2252 args[i].value
2253 = convert_modes (args[i].mode, mode,
2254 args[i].value, args[i].unsignedp);
2256 /* CSE will replace this only if it contains args[i].value
2257 pseudo, so convert it down to the declared mode using
2258 a SUBREG. */
2259 if (REG_P (args[i].value)
2260 && GET_MODE_CLASS (args[i].mode) == MODE_INT
2261 && promote_mode (type, mode, &unsignedp) != args[i].mode)
2263 args[i].initial_value
2264 = gen_lowpart_SUBREG (mode, args[i].value);
2265 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
2266 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
2272 /* Given the current state of MUST_PREALLOCATE and information about
2273 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
2274 compute and return the final value for MUST_PREALLOCATE. */
2276 static int
2277 finalize_must_preallocate (int must_preallocate, int num_actuals,
2278 struct arg_data *args, struct args_size *args_size)
2280 /* See if we have or want to preallocate stack space.
2282 If we would have to push a partially-in-regs parm
2283 before other stack parms, preallocate stack space instead.
2285 If the size of some parm is not a multiple of the required stack
2286 alignment, we must preallocate.
2288 If the total size of arguments that would otherwise create a copy in
2289 a temporary (such as a CALL) is more than half the total argument list
2290 size, preallocation is faster.
2292 Another reason to preallocate is if we have a machine (like the m88k)
2293 where stack alignment is required to be maintained between every
2294 pair of insns, not just when the call is made. However, we assume here
2295 that such machines either do not have push insns (and hence preallocation
2296 would occur anyway) or the problem is taken care of with
2297 PUSH_ROUNDING. */
2299 if (! must_preallocate)
2301 int partial_seen = 0;
2302 int copy_to_evaluate_size = 0;
2303 int i;
2305 for (i = 0; i < num_actuals && ! must_preallocate; i++)
2307 if (args[i].partial > 0 && ! args[i].pass_on_stack)
2308 partial_seen = 1;
2309 else if (partial_seen && args[i].reg == 0)
2310 must_preallocate = 1;
2311 /* We preallocate in case there are bounds passed
2312 in the bounds table to have precomputed address
2313 for bounds association. */
2314 else if (POINTER_BOUNDS_P (args[i].tree_value)
2315 && !args[i].reg)
2316 must_preallocate = 1;
2318 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
2319 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
2320 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
2321 || TREE_CODE (args[i].tree_value) == COND_EXPR
2322 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
2323 copy_to_evaluate_size
2324 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2327 if (copy_to_evaluate_size * 2 >= args_size->constant
2328 && args_size->constant > 0)
2329 must_preallocate = 1;
2331 return must_preallocate;
2334 /* If we preallocated stack space, compute the address of each argument
2335 and store it into the ARGS array.
2337 We need not ensure it is a valid memory address here; it will be
2338 validized when it is used.
2340 ARGBLOCK is an rtx for the address of the outgoing arguments. */
2342 static void
2343 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
2345 if (argblock)
2347 rtx arg_reg = argblock;
2348 int i, arg_offset = 0;
2350 if (GET_CODE (argblock) == PLUS)
2351 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
2353 for (i = 0; i < num_actuals; i++)
2355 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
2356 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
2357 rtx addr;
2358 unsigned int align, boundary;
2359 unsigned int units_on_stack = 0;
2360 machine_mode partial_mode = VOIDmode;
2362 /* Skip this parm if it will not be passed on the stack. */
2363 if (! args[i].pass_on_stack
2364 && args[i].reg != 0
2365 && args[i].partial == 0)
2366 continue;
2368 if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
2369 continue;
2371 /* Pointer Bounds are never passed on the stack. */
2372 if (POINTER_BOUNDS_P (args[i].tree_value))
2373 continue;
2375 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
2376 addr = plus_constant (Pmode, addr, arg_offset);
2378 if (args[i].partial != 0)
2380 /* Only part of the parameter is being passed on the stack.
2381 Generate a simple memory reference of the correct size. */
2382 units_on_stack = args[i].locate.size.constant;
2383 unsigned int bits_on_stack = units_on_stack * BITS_PER_UNIT;
2384 partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk ();
2385 args[i].stack = gen_rtx_MEM (partial_mode, addr);
2386 set_mem_size (args[i].stack, units_on_stack);
2388 else
2390 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
2391 set_mem_attributes (args[i].stack,
2392 TREE_TYPE (args[i].tree_value), 1);
2394 align = BITS_PER_UNIT;
2395 boundary = args[i].locate.boundary;
2396 if (args[i].locate.where_pad != PAD_DOWNWARD)
2397 align = boundary;
2398 else if (CONST_INT_P (offset))
2400 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
2401 align = least_bit_hwi (align);
2403 set_mem_align (args[i].stack, align);
2405 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset);
2406 addr = plus_constant (Pmode, addr, arg_offset);
2408 if (args[i].partial != 0)
2410 /* Only part of the parameter is being passed on the stack.
2411 Generate a simple memory reference of the correct size.
2413 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
2414 set_mem_size (args[i].stack_slot, units_on_stack);
2416 else
2418 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
2419 set_mem_attributes (args[i].stack_slot,
2420 TREE_TYPE (args[i].tree_value), 1);
2422 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
2424 /* Function incoming arguments may overlap with sibling call
2425 outgoing arguments and we cannot allow reordering of reads
2426 from function arguments with stores to outgoing arguments
2427 of sibling calls. */
2428 set_mem_alias_set (args[i].stack, 0);
2429 set_mem_alias_set (args[i].stack_slot, 0);
2434 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
2435 in a call instruction.
2437 FNDECL is the tree node for the target function. For an indirect call
2438 FNDECL will be NULL_TREE.
2440 ADDR is the operand 0 of CALL_EXPR for this call. */
2442 static rtx
2443 rtx_for_function_call (tree fndecl, tree addr)
2445 rtx funexp;
2447 /* Get the function to call, in the form of RTL. */
2448 if (fndecl)
2450 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
2451 TREE_USED (fndecl) = 1;
2453 /* Get a SYMBOL_REF rtx for the function address. */
2454 funexp = XEXP (DECL_RTL (fndecl), 0);
2456 else
2457 /* Generate an rtx (probably a pseudo-register) for the address. */
2459 push_temp_slots ();
2460 funexp = expand_normal (addr);
2461 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
2463 return funexp;
2466 /* Return the static chain for this function, if any. */
2469 rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
2471 if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
2472 return NULL;
2474 return targetm.calls.static_chain (fndecl_or_type, incoming_p);
2477 /* Internal state for internal_arg_pointer_based_exp and its helpers. */
2478 static struct
2480 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
2481 or NULL_RTX if none has been scanned yet. */
2482 rtx_insn *scan_start;
2483 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
2484 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
2485 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
2486 with fixed offset, or PC if this is with variable or unknown offset. */
2487 vec<rtx> cache;
2488 } internal_arg_pointer_exp_state;
2490 static rtx internal_arg_pointer_based_exp (const_rtx, bool);
2492 /* Helper function for internal_arg_pointer_based_exp. Scan insns in
2493 the tail call sequence, starting with first insn that hasn't been
2494 scanned yet, and note for each pseudo on the LHS whether it is based
2495 on crtl->args.internal_arg_pointer or not, and what offset from that
2496 that pointer it has. */
2498 static void
2499 internal_arg_pointer_based_exp_scan (void)
2501 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
2503 if (scan_start == NULL_RTX)
2504 insn = get_insns ();
2505 else
2506 insn = NEXT_INSN (scan_start);
2508 while (insn)
2510 rtx set = single_set (insn);
2511 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
2513 rtx val = NULL_RTX;
2514 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
2515 /* Punt on pseudos set multiple times. */
2516 if (idx < internal_arg_pointer_exp_state.cache.length ()
2517 && (internal_arg_pointer_exp_state.cache[idx]
2518 != NULL_RTX))
2519 val = pc_rtx;
2520 else
2521 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
2522 if (val != NULL_RTX)
2524 if (idx >= internal_arg_pointer_exp_state.cache.length ())
2525 internal_arg_pointer_exp_state.cache
2526 .safe_grow_cleared (idx + 1);
2527 internal_arg_pointer_exp_state.cache[idx] = val;
2530 if (NEXT_INSN (insn) == NULL_RTX)
2531 scan_start = insn;
2532 insn = NEXT_INSN (insn);
2535 internal_arg_pointer_exp_state.scan_start = scan_start;
2538 /* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
2539 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
2540 it with fixed offset, or PC if this is with variable or unknown offset.
2541 TOPLEVEL is true if the function is invoked at the topmost level. */
2543 static rtx
2544 internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
2546 if (CONSTANT_P (rtl))
2547 return NULL_RTX;
2549 if (rtl == crtl->args.internal_arg_pointer)
2550 return const0_rtx;
2552 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
2553 return NULL_RTX;
2555 if (GET_CODE (rtl) == PLUS && CONST_INT_P (XEXP (rtl, 1)))
2557 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
2558 if (val == NULL_RTX || val == pc_rtx)
2559 return val;
2560 return plus_constant (Pmode, val, INTVAL (XEXP (rtl, 1)));
2563 /* When called at the topmost level, scan pseudo assignments in between the
2564 last scanned instruction in the tail call sequence and the latest insn
2565 in that sequence. */
2566 if (toplevel)
2567 internal_arg_pointer_based_exp_scan ();
2569 if (REG_P (rtl))
2571 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
2572 if (idx < internal_arg_pointer_exp_state.cache.length ())
2573 return internal_arg_pointer_exp_state.cache[idx];
2575 return NULL_RTX;
2578 subrtx_iterator::array_type array;
2579 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
2581 const_rtx x = *iter;
2582 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
2583 return pc_rtx;
2584 if (MEM_P (x))
2585 iter.skip_subrtxes ();
2588 return NULL_RTX;
2591 /* Return true if and only if SIZE storage units (usually bytes)
2592 starting from address ADDR overlap with already clobbered argument
2593 area. This function is used to determine if we should give up a
2594 sibcall. */
2596 static bool
2597 mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
2599 HOST_WIDE_INT i;
2600 rtx val;
2602 if (bitmap_empty_p (stored_args_map))
2603 return false;
2604 val = internal_arg_pointer_based_exp (addr, true);
2605 if (val == NULL_RTX)
2606 return false;
2607 else if (val == pc_rtx)
2608 return true;
2609 else
2610 i = INTVAL (val);
2612 if (STACK_GROWS_DOWNWARD)
2613 i -= crtl->args.pretend_args_size;
2614 else
2615 i += crtl->args.pretend_args_size;
2618 if (ARGS_GROW_DOWNWARD)
2619 i = -i - size;
2621 if (size > 0)
2623 unsigned HOST_WIDE_INT k;
2625 for (k = 0; k < size; k++)
2626 if (i + k < SBITMAP_SIZE (stored_args_map)
2627 && bitmap_bit_p (stored_args_map, i + k))
2628 return true;
2631 return false;
2634 /* Do the register loads required for any wholly-register parms or any
2635 parms which are passed both on the stack and in a register. Their
2636 expressions were already evaluated.
2638 Mark all register-parms as living through the call, putting these USE
2639 insns in the CALL_INSN_FUNCTION_USAGE field.
2641 When IS_SIBCALL, perform the check_sibcall_argument_overlap
2642 checking, setting *SIBCALL_FAILURE if appropriate. */
2644 static void
2645 load_register_parameters (struct arg_data *args, int num_actuals,
2646 rtx *call_fusage, int flags, int is_sibcall,
2647 int *sibcall_failure)
2649 int i, j;
2651 for (i = 0; i < num_actuals; i++)
2653 rtx reg = ((flags & ECF_SIBCALL)
2654 ? args[i].tail_call_reg : args[i].reg);
2655 if (reg)
2657 int partial = args[i].partial;
2658 int nregs;
2659 int size = 0;
2660 rtx_insn *before_arg = get_last_insn ();
2661 /* Set non-negative if we must move a word at a time, even if
2662 just one word (e.g, partial == 4 && mode == DFmode). Set
2663 to -1 if we just use a normal move insn. This value can be
2664 zero if the argument is a zero size structure. */
2665 nregs = -1;
2666 if (GET_CODE (reg) == PARALLEL)
2668 else if (partial)
2670 gcc_assert (partial % UNITS_PER_WORD == 0);
2671 nregs = partial / UNITS_PER_WORD;
2673 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
2675 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2676 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2678 else
2679 size = GET_MODE_SIZE (args[i].mode);
2681 /* Handle calls that pass values in multiple non-contiguous
2682 locations. The Irix 6 ABI has examples of this. */
2684 if (GET_CODE (reg) == PARALLEL)
2685 emit_group_move (reg, args[i].parallel_value);
2687 /* If simple case, just do move. If normal partial, store_one_arg
2688 has already loaded the register for us. In all other cases,
2689 load the register(s) from memory. */
2691 else if (nregs == -1)
2693 emit_move_insn (reg, args[i].value);
2694 #ifdef BLOCK_REG_PADDING
2695 /* Handle case where we have a value that needs shifting
2696 up to the msb. eg. a QImode value and we're padding
2697 upward on a BYTES_BIG_ENDIAN machine. */
2698 if (size < UNITS_PER_WORD
2699 && (args[i].locate.where_pad
2700 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
2702 rtx x;
2703 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2705 /* Assigning REG here rather than a temp makes CALL_FUSAGE
2706 report the whole reg as used. Strictly speaking, the
2707 call only uses SIZE bytes at the msb end, but it doesn't
2708 seem worth generating rtl to say that. */
2709 reg = gen_rtx_REG (word_mode, REGNO (reg));
2710 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
2711 if (x != reg)
2712 emit_move_insn (reg, x);
2714 #endif
2717 /* If we have pre-computed the values to put in the registers in
2718 the case of non-aligned structures, copy them in now. */
2720 else if (args[i].n_aligned_regs != 0)
2721 for (j = 0; j < args[i].n_aligned_regs; j++)
2722 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
2723 args[i].aligned_regs[j]);
2725 else if (partial == 0 || args[i].pass_on_stack)
2727 rtx mem = validize_mem (copy_rtx (args[i].value));
2729 /* Check for overlap with already clobbered argument area,
2730 providing that this has non-zero size. */
2731 if (is_sibcall
2732 && size != 0
2733 && (mem_overlaps_already_clobbered_arg_p
2734 (XEXP (args[i].value, 0), size)))
2735 *sibcall_failure = 1;
2737 if (size % UNITS_PER_WORD == 0
2738 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
2739 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
2740 else
2742 if (nregs > 1)
2743 move_block_to_reg (REGNO (reg), mem, nregs - 1,
2744 args[i].mode);
2745 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
2746 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
2747 unsigned int bitsize = size * BITS_PER_UNIT - bitoff;
2748 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
2749 word_mode, word_mode, false,
2750 NULL);
2751 if (BYTES_BIG_ENDIAN)
2752 x = expand_shift (LSHIFT_EXPR, word_mode, x,
2753 BITS_PER_WORD - bitsize, dest, 1);
2754 if (x != dest)
2755 emit_move_insn (dest, x);
2758 /* Handle a BLKmode that needs shifting. */
2759 if (nregs == 1 && size < UNITS_PER_WORD
2760 #ifdef BLOCK_REG_PADDING
2761 && args[i].locate.where_pad == PAD_DOWNWARD
2762 #else
2763 && BYTES_BIG_ENDIAN
2764 #endif
2767 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
2768 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2769 enum tree_code dir = (BYTES_BIG_ENDIAN
2770 ? RSHIFT_EXPR : LSHIFT_EXPR);
2771 rtx x;
2773 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
2774 if (x != dest)
2775 emit_move_insn (dest, x);
2779 /* When a parameter is a block, and perhaps in other cases, it is
2780 possible that it did a load from an argument slot that was
2781 already clobbered. */
2782 if (is_sibcall
2783 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
2784 *sibcall_failure = 1;
2786 /* Handle calls that pass values in multiple non-contiguous
2787 locations. The Irix 6 ABI has examples of this. */
2788 if (GET_CODE (reg) == PARALLEL)
2789 use_group_regs (call_fusage, reg);
2790 else if (nregs == -1)
2791 use_reg_mode (call_fusage, reg,
2792 TYPE_MODE (TREE_TYPE (args[i].tree_value)));
2793 else if (nregs > 0)
2794 use_regs (call_fusage, REGNO (reg), nregs);
2799 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
2800 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
2801 bytes, then we would need to push some additional bytes to pad the
2802 arguments. So, we compute an adjust to the stack pointer for an
2803 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
2804 bytes. Then, when the arguments are pushed the stack will be perfectly
2805 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
2806 be popped after the call. Returns the adjustment. */
2808 static int
2809 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
2810 struct args_size *args_size,
2811 unsigned int preferred_unit_stack_boundary)
2813 /* The number of bytes to pop so that the stack will be
2814 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
2815 HOST_WIDE_INT adjustment;
2816 /* The alignment of the stack after the arguments are pushed, if we
2817 just pushed the arguments without adjust the stack here. */
2818 unsigned HOST_WIDE_INT unadjusted_alignment;
2820 unadjusted_alignment
2821 = ((stack_pointer_delta + unadjusted_args_size)
2822 % preferred_unit_stack_boundary);
2824 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2825 as possible -- leaving just enough left to cancel out the
2826 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
2827 PENDING_STACK_ADJUST is non-negative, and congruent to
2828 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2830 /* Begin by trying to pop all the bytes. */
2831 unadjusted_alignment
2832 = (unadjusted_alignment
2833 - (pending_stack_adjust % preferred_unit_stack_boundary));
2834 adjustment = pending_stack_adjust;
2835 /* Push enough additional bytes that the stack will be aligned
2836 after the arguments are pushed. */
2837 if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
2838 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
2840 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2841 bytes after the call. The right number is the entire
2842 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2843 by the arguments in the first place. */
2844 args_size->constant
2845 = pending_stack_adjust - adjustment + unadjusted_args_size;
2847 return adjustment;
2850 /* Scan X expression if it does not dereference any argument slots
2851 we already clobbered by tail call arguments (as noted in stored_args_map
2852 bitmap).
2853 Return nonzero if X expression dereferences such argument slots,
2854 zero otherwise. */
2856 static int
2857 check_sibcall_argument_overlap_1 (rtx x)
2859 RTX_CODE code;
2860 int i, j;
2861 const char *fmt;
2863 if (x == NULL_RTX)
2864 return 0;
2866 code = GET_CODE (x);
2868 /* We need not check the operands of the CALL expression itself. */
2869 if (code == CALL)
2870 return 0;
2872 if (code == MEM)
2873 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
2874 GET_MODE_SIZE (GET_MODE (x)));
2876 /* Scan all subexpressions. */
2877 fmt = GET_RTX_FORMAT (code);
2878 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2880 if (*fmt == 'e')
2882 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2883 return 1;
2885 else if (*fmt == 'E')
2887 for (j = 0; j < XVECLEN (x, i); j++)
2888 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2889 return 1;
2892 return 0;
2895 /* Scan sequence after INSN if it does not dereference any argument slots
2896 we already clobbered by tail call arguments (as noted in stored_args_map
2897 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
2898 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
2899 should be 0). Return nonzero if sequence after INSN dereferences such argument
2900 slots, zero otherwise. */
2902 static int
2903 check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
2904 int mark_stored_args_map)
2906 int low, high;
2908 if (insn == NULL_RTX)
2909 insn = get_insns ();
2910 else
2911 insn = NEXT_INSN (insn);
2913 for (; insn; insn = NEXT_INSN (insn))
2914 if (INSN_P (insn)
2915 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
2916 break;
2918 if (mark_stored_args_map)
2920 if (ARGS_GROW_DOWNWARD)
2921 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
2922 else
2923 low = arg->locate.slot_offset.constant;
2925 for (high = low + arg->locate.size.constant; low < high; low++)
2926 bitmap_set_bit (stored_args_map, low);
2928 return insn != NULL_RTX;
2931 /* Given that a function returns a value of mode MODE at the most
2932 significant end of hard register VALUE, shift VALUE left or right
2933 as specified by LEFT_P. Return true if some action was needed. */
2935 bool
2936 shift_return_value (machine_mode mode, bool left_p, rtx value)
2938 HOST_WIDE_INT shift;
2940 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
2941 machine_mode value_mode = GET_MODE (value);
2942 shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
2943 if (shift == 0)
2944 return false;
2946 /* Use ashr rather than lshr for right shifts. This is for the benefit
2947 of the MIPS port, which requires SImode values to be sign-extended
2948 when stored in 64-bit registers. */
2949 if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
2950 value, gen_int_shift_amount (value_mode, shift),
2951 value, 1, OPTAB_WIDEN))
2952 gcc_unreachable ();
2953 return true;
2956 /* If X is a likely-spilled register value, copy it to a pseudo
2957 register and return that register. Return X otherwise. */
2959 static rtx
2960 avoid_likely_spilled_reg (rtx x)
2962 rtx new_rtx;
2964 if (REG_P (x)
2965 && HARD_REGISTER_P (x)
2966 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
2968 /* Make sure that we generate a REG rather than a CONCAT.
2969 Moves into CONCATs can need nontrivial instructions,
2970 and the whole point of this function is to avoid
2971 using the hard register directly in such a situation. */
2972 generating_concat_p = 0;
2973 new_rtx = gen_reg_rtx (GET_MODE (x));
2974 generating_concat_p = 1;
2975 emit_move_insn (new_rtx, x);
2976 return new_rtx;
2978 return x;
2981 /* Helper function for expand_call.
2982 Return false is EXP is not implementable as a sibling call. */
2984 static bool
2985 can_implement_as_sibling_call_p (tree exp,
2986 rtx structure_value_addr,
2987 tree funtype,
2988 int reg_parm_stack_space ATTRIBUTE_UNUSED,
2989 tree fndecl,
2990 int flags,
2991 tree addr,
2992 const args_size &args_size)
2994 if (!targetm.have_sibcall_epilogue ())
2996 maybe_complain_about_tail_call
2997 (exp,
2998 "machine description does not have"
2999 " a sibcall_epilogue instruction pattern");
3000 return false;
3003 /* Doing sibling call optimization needs some work, since
3004 structure_value_addr can be allocated on the stack.
3005 It does not seem worth the effort since few optimizable
3006 sibling calls will return a structure. */
3007 if (structure_value_addr != NULL_RTX)
3009 maybe_complain_about_tail_call (exp, "callee returns a structure");
3010 return false;
3013 #ifdef REG_PARM_STACK_SPACE
3014 /* If outgoing reg parm stack space changes, we can not do sibcall. */
3015 if (OUTGOING_REG_PARM_STACK_SPACE (funtype)
3016 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))
3017 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl)))
3019 maybe_complain_about_tail_call (exp,
3020 "inconsistent size of stack space"
3021 " allocated for arguments which are"
3022 " passed in registers");
3023 return false;
3025 #endif
3027 /* Check whether the target is able to optimize the call
3028 into a sibcall. */
3029 if (!targetm.function_ok_for_sibcall (fndecl, exp))
3031 maybe_complain_about_tail_call (exp,
3032 "target is not able to optimize the"
3033 " call into a sibling call");
3034 return false;
3037 /* Functions that do not return exactly once may not be sibcall
3038 optimized. */
3039 if (flags & ECF_RETURNS_TWICE)
3041 maybe_complain_about_tail_call (exp, "callee returns twice");
3042 return false;
3044 if (flags & ECF_NORETURN)
3046 maybe_complain_about_tail_call (exp, "callee does not return");
3047 return false;
3050 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
3052 maybe_complain_about_tail_call (exp, "volatile function type");
3053 return false;
3056 /* If the called function is nested in the current one, it might access
3057 some of the caller's arguments, but could clobber them beforehand if
3058 the argument areas are shared. */
3059 if (fndecl && decl_function_context (fndecl) == current_function_decl)
3061 maybe_complain_about_tail_call (exp, "nested function");
3062 return false;
3065 /* If this function requires more stack slots than the current
3066 function, we cannot change it into a sibling call.
3067 crtl->args.pretend_args_size is not part of the
3068 stack allocated by our caller. */
3069 if (args_size.constant > (crtl->args.size - crtl->args.pretend_args_size))
3071 maybe_complain_about_tail_call (exp,
3072 "callee required more stack slots"
3073 " than the caller");
3074 return false;
3077 /* If the callee pops its own arguments, then it must pop exactly
3078 the same number of arguments as the current function. */
3079 if (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant)
3080 != targetm.calls.return_pops_args (current_function_decl,
3081 TREE_TYPE (current_function_decl),
3082 crtl->args.size))
3084 maybe_complain_about_tail_call (exp,
3085 "inconsistent number of"
3086 " popped arguments");
3087 return false;
3090 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
3092 maybe_complain_about_tail_call (exp, "frontend does not support"
3093 " sibling call");
3094 return false;
3097 /* All checks passed. */
3098 return true;
3101 /* Generate all the code for a CALL_EXPR exp
3102 and return an rtx for its value.
3103 Store the value in TARGET (specified as an rtx) if convenient.
3104 If the value is stored in TARGET then TARGET is returned.
3105 If IGNORE is nonzero, then we ignore the value of the function call. */
3108 expand_call (tree exp, rtx target, int ignore)
3110 /* Nonzero if we are currently expanding a call. */
3111 static int currently_expanding_call = 0;
3113 /* RTX for the function to be called. */
3114 rtx funexp;
3115 /* Sequence of insns to perform a normal "call". */
3116 rtx_insn *normal_call_insns = NULL;
3117 /* Sequence of insns to perform a tail "call". */
3118 rtx_insn *tail_call_insns = NULL;
3119 /* Data type of the function. */
3120 tree funtype;
3121 tree type_arg_types;
3122 tree rettype;
3123 /* Declaration of the function being called,
3124 or 0 if the function is computed (not known by name). */
3125 tree fndecl = 0;
3126 /* The type of the function being called. */
3127 tree fntype;
3128 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
3129 bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
3130 int pass;
3132 /* Register in which non-BLKmode value will be returned,
3133 or 0 if no value or if value is BLKmode. */
3134 rtx valreg;
3135 /* Register(s) in which bounds are returned. */
3136 rtx valbnd = NULL;
3137 /* Address where we should return a BLKmode value;
3138 0 if value not BLKmode. */
3139 rtx structure_value_addr = 0;
3140 /* Nonzero if that address is being passed by treating it as
3141 an extra, implicit first parameter. Otherwise,
3142 it is passed by being copied directly into struct_value_rtx. */
3143 int structure_value_addr_parm = 0;
3144 /* Holds the value of implicit argument for the struct value. */
3145 tree structure_value_addr_value = NULL_TREE;
3146 /* Size of aggregate value wanted, or zero if none wanted
3147 or if we are using the non-reentrant PCC calling convention
3148 or expecting the value in registers. */
3149 HOST_WIDE_INT struct_value_size = 0;
3150 /* Nonzero if called function returns an aggregate in memory PCC style,
3151 by returning the address of where to find it. */
3152 int pcc_struct_value = 0;
3153 rtx struct_value = 0;
3155 /* Number of actual parameters in this call, including struct value addr. */
3156 int num_actuals;
3157 /* Number of named args. Args after this are anonymous ones
3158 and they must all go on the stack. */
3159 int n_named_args;
3160 /* Number of complex actual arguments that need to be split. */
3161 int num_complex_actuals = 0;
3163 /* Vector of information about each argument.
3164 Arguments are numbered in the order they will be pushed,
3165 not the order they are written. */
3166 struct arg_data *args;
3168 /* Total size in bytes of all the stack-parms scanned so far. */
3169 struct args_size args_size;
3170 struct args_size adjusted_args_size;
3171 /* Size of arguments before any adjustments (such as rounding). */
3172 int unadjusted_args_size;
3173 /* Data on reg parms scanned so far. */
3174 CUMULATIVE_ARGS args_so_far_v;
3175 cumulative_args_t args_so_far;
3176 /* Nonzero if a reg parm has been scanned. */
3177 int reg_parm_seen;
3178 /* Nonzero if this is an indirect function call. */
3180 /* Nonzero if we must avoid push-insns in the args for this call.
3181 If stack space is allocated for register parameters, but not by the
3182 caller, then it is preallocated in the fixed part of the stack frame.
3183 So the entire argument block must then be preallocated (i.e., we
3184 ignore PUSH_ROUNDING in that case). */
3186 int must_preallocate = !PUSH_ARGS;
3188 /* Size of the stack reserved for parameter registers. */
3189 int reg_parm_stack_space = 0;
3191 /* Address of space preallocated for stack parms
3192 (on machines that lack push insns), or 0 if space not preallocated. */
3193 rtx argblock = 0;
3195 /* Mask of ECF_ and ERF_ flags. */
3196 int flags = 0;
3197 int return_flags = 0;
3198 #ifdef REG_PARM_STACK_SPACE
3199 /* Define the boundary of the register parm stack space that needs to be
3200 saved, if any. */
3201 int low_to_save, high_to_save;
3202 rtx save_area = 0; /* Place that it is saved */
3203 #endif
3205 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3206 char *initial_stack_usage_map = stack_usage_map;
3207 char *stack_usage_map_buf = NULL;
3209 int old_stack_allocated;
3211 /* State variables to track stack modifications. */
3212 rtx old_stack_level = 0;
3213 int old_stack_arg_under_construction = 0;
3214 int old_pending_adj = 0;
3215 int old_inhibit_defer_pop = inhibit_defer_pop;
3217 /* Some stack pointer alterations we make are performed via
3218 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
3219 which we then also need to save/restore along the way. */
3220 int old_stack_pointer_delta = 0;
3222 rtx call_fusage;
3223 tree addr = CALL_EXPR_FN (exp);
3224 int i;
3225 /* The alignment of the stack, in bits. */
3226 unsigned HOST_WIDE_INT preferred_stack_boundary;
3227 /* The alignment of the stack, in bytes. */
3228 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
3229 /* The static chain value to use for this call. */
3230 rtx static_chain_value;
3231 /* See if this is "nothrow" function call. */
3232 if (TREE_NOTHROW (exp))
3233 flags |= ECF_NOTHROW;
3235 /* See if we can find a DECL-node for the actual function, and get the
3236 function attributes (flags) from the function decl or type node. */
3237 fndecl = get_callee_fndecl (exp);
3238 if (fndecl)
3240 fntype = TREE_TYPE (fndecl);
3241 flags |= flags_from_decl_or_type (fndecl);
3242 return_flags |= decl_return_flags (fndecl);
3244 else
3246 fntype = TREE_TYPE (TREE_TYPE (addr));
3247 flags |= flags_from_decl_or_type (fntype);
3248 if (CALL_EXPR_BY_DESCRIPTOR (exp))
3249 flags |= ECF_BY_DESCRIPTOR;
3251 rettype = TREE_TYPE (exp);
3253 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
3255 /* Warn if this value is an aggregate type,
3256 regardless of which calling convention we are using for it. */
3257 if (AGGREGATE_TYPE_P (rettype))
3258 warning (OPT_Waggregate_return, "function call has aggregate value");
3260 /* If the result of a non looping pure or const function call is
3261 ignored (or void), and none of its arguments are volatile, we can
3262 avoid expanding the call and just evaluate the arguments for
3263 side-effects. */
3264 if ((flags & (ECF_CONST | ECF_PURE))
3265 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
3266 && (ignore || target == const0_rtx
3267 || TYPE_MODE (rettype) == VOIDmode))
3269 bool volatilep = false;
3270 tree arg;
3271 call_expr_arg_iterator iter;
3273 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3274 if (TREE_THIS_VOLATILE (arg))
3276 volatilep = true;
3277 break;
3280 if (! volatilep)
3282 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3283 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
3284 return const0_rtx;
3288 #ifdef REG_PARM_STACK_SPACE
3289 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
3290 #endif
3292 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
3293 && reg_parm_stack_space > 0 && PUSH_ARGS)
3294 must_preallocate = 1;
3296 /* Set up a place to return a structure. */
3298 /* Cater to broken compilers. */
3299 if (aggregate_value_p (exp, fntype))
3301 /* This call returns a big structure. */
3302 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3304 #ifdef PCC_STATIC_STRUCT_RETURN
3306 pcc_struct_value = 1;
3308 #else /* not PCC_STATIC_STRUCT_RETURN */
3310 struct_value_size = int_size_in_bytes (rettype);
3312 /* Even if it is semantically safe to use the target as the return
3313 slot, it may be not sufficiently aligned for the return type. */
3314 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
3315 && target
3316 && MEM_P (target)
3317 && !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
3318 && targetm.slow_unaligned_access (TYPE_MODE (rettype),
3319 MEM_ALIGN (target))))
3320 structure_value_addr = XEXP (target, 0);
3321 else
3323 /* For variable-sized objects, we must be called with a target
3324 specified. If we were to allocate space on the stack here,
3325 we would have no way of knowing when to free it. */
3326 rtx d = assign_temp (rettype, 1, 1);
3327 structure_value_addr = XEXP (d, 0);
3328 target = 0;
3331 #endif /* not PCC_STATIC_STRUCT_RETURN */
3334 /* Figure out the amount to which the stack should be aligned. */
3335 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3336 if (fndecl)
3338 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
3339 /* Without automatic stack alignment, we can't increase preferred
3340 stack boundary. With automatic stack alignment, it is
3341 unnecessary since unless we can guarantee that all callers will
3342 align the outgoing stack properly, callee has to align its
3343 stack anyway. */
3344 if (i
3345 && i->preferred_incoming_stack_boundary
3346 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
3347 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
3350 /* Operand 0 is a pointer-to-function; get the type of the function. */
3351 funtype = TREE_TYPE (addr);
3352 gcc_assert (POINTER_TYPE_P (funtype));
3353 funtype = TREE_TYPE (funtype);
3355 /* Count whether there are actual complex arguments that need to be split
3356 into their real and imaginary parts. Munge the type_arg_types
3357 appropriately here as well. */
3358 if (targetm.calls.split_complex_arg)
3360 call_expr_arg_iterator iter;
3361 tree arg;
3362 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3364 tree type = TREE_TYPE (arg);
3365 if (type && TREE_CODE (type) == COMPLEX_TYPE
3366 && targetm.calls.split_complex_arg (type))
3367 num_complex_actuals++;
3369 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
3371 else
3372 type_arg_types = TYPE_ARG_TYPES (funtype);
3374 if (flags & ECF_MAY_BE_ALLOCA)
3375 cfun->calls_alloca = 1;
3377 /* If struct_value_rtx is 0, it means pass the address
3378 as if it were an extra parameter. Put the argument expression
3379 in structure_value_addr_value. */
3380 if (structure_value_addr && struct_value == 0)
3382 /* If structure_value_addr is a REG other than
3383 virtual_outgoing_args_rtx, we can use always use it. If it
3384 is not a REG, we must always copy it into a register.
3385 If it is virtual_outgoing_args_rtx, we must copy it to another
3386 register in some cases. */
3387 rtx temp = (!REG_P (structure_value_addr)
3388 || (ACCUMULATE_OUTGOING_ARGS
3389 && stack_arg_under_construction
3390 && structure_value_addr == virtual_outgoing_args_rtx)
3391 ? copy_addr_to_reg (convert_memory_address
3392 (Pmode, structure_value_addr))
3393 : structure_value_addr);
3395 structure_value_addr_value =
3396 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
3397 structure_value_addr_parm = CALL_WITH_BOUNDS_P (exp) ? 2 : 1;
3400 /* Count the arguments and set NUM_ACTUALS. */
3401 num_actuals =
3402 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
3404 /* Compute number of named args.
3405 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
3407 if (type_arg_types != 0)
3408 n_named_args
3409 = (list_length (type_arg_types)
3410 /* Count the struct value address, if it is passed as a parm. */
3411 + structure_value_addr_parm);
3412 else
3413 /* If we know nothing, treat all args as named. */
3414 n_named_args = num_actuals;
3416 /* Start updating where the next arg would go.
3418 On some machines (such as the PA) indirect calls have a different
3419 calling convention than normal calls. The fourth argument in
3420 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
3421 or not. */
3422 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
3423 args_so_far = pack_cumulative_args (&args_so_far_v);
3425 /* Now possibly adjust the number of named args.
3426 Normally, don't include the last named arg if anonymous args follow.
3427 We do include the last named arg if
3428 targetm.calls.strict_argument_naming() returns nonzero.
3429 (If no anonymous args follow, the result of list_length is actually
3430 one too large. This is harmless.)
3432 If targetm.calls.pretend_outgoing_varargs_named() returns
3433 nonzero, and targetm.calls.strict_argument_naming() returns zero,
3434 this machine will be able to place unnamed args that were passed
3435 in registers into the stack. So treat all args as named. This
3436 allows the insns emitting for a specific argument list to be
3437 independent of the function declaration.
3439 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
3440 we do not have any reliable way to pass unnamed args in
3441 registers, so we must force them into memory. */
3443 if (type_arg_types != 0
3444 && targetm.calls.strict_argument_naming (args_so_far))
3446 else if (type_arg_types != 0
3447 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
3448 /* Don't include the last named arg. */
3449 --n_named_args;
3450 else
3451 /* Treat all args as named. */
3452 n_named_args = num_actuals;
3454 /* Make a vector to hold all the information about each arg. */
3455 args = XCNEWVEC (struct arg_data, num_actuals);
3457 /* Build up entries in the ARGS array, compute the size of the
3458 arguments into ARGS_SIZE, etc. */
3459 initialize_argument_information (num_actuals, args, &args_size,
3460 n_named_args, exp,
3461 structure_value_addr_value, fndecl, fntype,
3462 args_so_far, reg_parm_stack_space,
3463 &old_stack_level, &old_pending_adj,
3464 &must_preallocate, &flags,
3465 &try_tail_call, CALL_FROM_THUNK_P (exp));
3467 if (args_size.var)
3468 must_preallocate = 1;
3470 /* Now make final decision about preallocating stack space. */
3471 must_preallocate = finalize_must_preallocate (must_preallocate,
3472 num_actuals, args,
3473 &args_size);
3475 /* If the structure value address will reference the stack pointer, we
3476 must stabilize it. We don't need to do this if we know that we are
3477 not going to adjust the stack pointer in processing this call. */
3479 if (structure_value_addr
3480 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
3481 || reg_mentioned_p (virtual_outgoing_args_rtx,
3482 structure_value_addr))
3483 && (args_size.var
3484 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
3485 structure_value_addr = copy_to_reg (structure_value_addr);
3487 /* Tail calls can make things harder to debug, and we've traditionally
3488 pushed these optimizations into -O2. Don't try if we're already
3489 expanding a call, as that means we're an argument. Don't try if
3490 there's cleanups, as we know there's code to follow the call. */
3492 if (currently_expanding_call++ != 0
3493 || !flag_optimize_sibling_calls
3494 || args_size.var
3495 || dbg_cnt (tail_call) == false)
3496 try_tail_call = 0;
3498 /* If the user has marked the function as requiring tail-call
3499 optimization, attempt it. */
3500 if (must_tail_call)
3501 try_tail_call = 1;
3503 /* Rest of purposes for tail call optimizations to fail. */
3504 if (try_tail_call)
3505 try_tail_call = can_implement_as_sibling_call_p (exp,
3506 structure_value_addr,
3507 funtype,
3508 reg_parm_stack_space,
3509 fndecl,
3510 flags, addr, args_size);
3512 /* Check if caller and callee disagree in promotion of function
3513 return value. */
3514 if (try_tail_call)
3516 machine_mode caller_mode, caller_promoted_mode;
3517 machine_mode callee_mode, callee_promoted_mode;
3518 int caller_unsignedp, callee_unsignedp;
3519 tree caller_res = DECL_RESULT (current_function_decl);
3521 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
3522 caller_mode = DECL_MODE (caller_res);
3523 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
3524 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
3525 caller_promoted_mode
3526 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
3527 &caller_unsignedp,
3528 TREE_TYPE (current_function_decl), 1);
3529 callee_promoted_mode
3530 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
3531 &callee_unsignedp,
3532 funtype, 1);
3533 if (caller_mode != VOIDmode
3534 && (caller_promoted_mode != callee_promoted_mode
3535 || ((caller_mode != caller_promoted_mode
3536 || callee_mode != callee_promoted_mode)
3537 && (caller_unsignedp != callee_unsignedp
3538 || partial_subreg_p (caller_mode, callee_mode)))))
3540 try_tail_call = 0;
3541 maybe_complain_about_tail_call (exp,
3542 "caller and callee disagree in"
3543 " promotion of function"
3544 " return value");
3548 /* Ensure current function's preferred stack boundary is at least
3549 what we need. Stack alignment may also increase preferred stack
3550 boundary. */
3551 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
3552 crtl->preferred_stack_boundary = preferred_stack_boundary;
3553 else
3554 preferred_stack_boundary = crtl->preferred_stack_boundary;
3556 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
3558 /* We want to make two insn chains; one for a sibling call, the other
3559 for a normal call. We will select one of the two chains after
3560 initial RTL generation is complete. */
3561 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
3563 int sibcall_failure = 0;
3564 /* We want to emit any pending stack adjustments before the tail
3565 recursion "call". That way we know any adjustment after the tail
3566 recursion call can be ignored if we indeed use the tail
3567 call expansion. */
3568 saved_pending_stack_adjust save;
3569 rtx_insn *insns, *before_call, *after_args;
3570 rtx next_arg_reg;
3572 if (pass == 0)
3574 /* State variables we need to save and restore between
3575 iterations. */
3576 save_pending_stack_adjust (&save);
3578 if (pass)
3579 flags &= ~ECF_SIBCALL;
3580 else
3581 flags |= ECF_SIBCALL;
3583 /* Other state variables that we must reinitialize each time
3584 through the loop (that are not initialized by the loop itself). */
3585 argblock = 0;
3586 call_fusage = 0;
3588 /* Start a new sequence for the normal call case.
3590 From this point on, if the sibling call fails, we want to set
3591 sibcall_failure instead of continuing the loop. */
3592 start_sequence ();
3594 /* Don't let pending stack adjusts add up to too much.
3595 Also, do all pending adjustments now if there is any chance
3596 this might be a call to alloca or if we are expanding a sibling
3597 call sequence.
3598 Also do the adjustments before a throwing call, otherwise
3599 exception handling can fail; PR 19225. */
3600 if (pending_stack_adjust >= 32
3601 || (pending_stack_adjust > 0
3602 && (flags & ECF_MAY_BE_ALLOCA))
3603 || (pending_stack_adjust > 0
3604 && flag_exceptions && !(flags & ECF_NOTHROW))
3605 || pass == 0)
3606 do_pending_stack_adjust ();
3608 /* Precompute any arguments as needed. */
3609 if (pass)
3610 precompute_arguments (num_actuals, args);
3612 /* Now we are about to start emitting insns that can be deleted
3613 if a libcall is deleted. */
3614 if (pass && (flags & ECF_MALLOC))
3615 start_sequence ();
3617 if (pass == 0
3618 && crtl->stack_protect_guard
3619 && targetm.stack_protect_runtime_enabled_p ())
3620 stack_protect_epilogue ();
3622 adjusted_args_size = args_size;
3623 /* Compute the actual size of the argument block required. The variable
3624 and constant sizes must be combined, the size may have to be rounded,
3625 and there may be a minimum required size. When generating a sibcall
3626 pattern, do not round up, since we'll be re-using whatever space our
3627 caller provided. */
3628 unadjusted_args_size
3629 = compute_argument_block_size (reg_parm_stack_space,
3630 &adjusted_args_size,
3631 fndecl, fntype,
3632 (pass == 0 ? 0
3633 : preferred_stack_boundary));
3635 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3637 /* The argument block when performing a sibling call is the
3638 incoming argument block. */
3639 if (pass == 0)
3641 argblock = crtl->args.internal_arg_pointer;
3642 if (STACK_GROWS_DOWNWARD)
3643 argblock
3644 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
3645 else
3646 argblock
3647 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
3649 stored_args_map = sbitmap_alloc (args_size.constant);
3650 bitmap_clear (stored_args_map);
3653 /* If we have no actual push instructions, or shouldn't use them,
3654 make space for all args right now. */
3655 else if (adjusted_args_size.var != 0)
3657 if (old_stack_level == 0)
3659 emit_stack_save (SAVE_BLOCK, &old_stack_level);
3660 old_stack_pointer_delta = stack_pointer_delta;
3661 old_pending_adj = pending_stack_adjust;
3662 pending_stack_adjust = 0;
3663 /* stack_arg_under_construction says whether a stack arg is
3664 being constructed at the old stack level. Pushing the stack
3665 gets a clean outgoing argument block. */
3666 old_stack_arg_under_construction = stack_arg_under_construction;
3667 stack_arg_under_construction = 0;
3669 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
3670 if (flag_stack_usage_info)
3671 current_function_has_unbounded_dynamic_stack_size = 1;
3673 else
3675 /* Note that we must go through the motions of allocating an argument
3676 block even if the size is zero because we may be storing args
3677 in the area reserved for register arguments, which may be part of
3678 the stack frame. */
3680 int needed = adjusted_args_size.constant;
3682 /* Store the maximum argument space used. It will be pushed by
3683 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
3684 checking). */
3686 if (needed > crtl->outgoing_args_size)
3687 crtl->outgoing_args_size = needed;
3689 if (must_preallocate)
3691 if (ACCUMULATE_OUTGOING_ARGS)
3693 /* Since the stack pointer will never be pushed, it is
3694 possible for the evaluation of a parm to clobber
3695 something we have already written to the stack.
3696 Since most function calls on RISC machines do not use
3697 the stack, this is uncommon, but must work correctly.
3699 Therefore, we save any area of the stack that was already
3700 written and that we are using. Here we set up to do this
3701 by making a new stack usage map from the old one. The
3702 actual save will be done by store_one_arg.
3704 Another approach might be to try to reorder the argument
3705 evaluations to avoid this conflicting stack usage. */
3707 /* Since we will be writing into the entire argument area,
3708 the map must be allocated for its entire size, not just
3709 the part that is the responsibility of the caller. */
3710 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3711 needed += reg_parm_stack_space;
3713 if (ARGS_GROW_DOWNWARD)
3714 highest_outgoing_arg_in_use
3715 = MAX (initial_highest_arg_in_use, needed + 1);
3716 else
3717 highest_outgoing_arg_in_use
3718 = MAX (initial_highest_arg_in_use, needed);
3720 free (stack_usage_map_buf);
3721 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3722 stack_usage_map = stack_usage_map_buf;
3724 if (initial_highest_arg_in_use)
3725 memcpy (stack_usage_map, initial_stack_usage_map,
3726 initial_highest_arg_in_use);
3728 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3729 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3730 (highest_outgoing_arg_in_use
3731 - initial_highest_arg_in_use));
3732 needed = 0;
3734 /* The address of the outgoing argument list must not be
3735 copied to a register here, because argblock would be left
3736 pointing to the wrong place after the call to
3737 allocate_dynamic_stack_space below. */
3739 argblock = virtual_outgoing_args_rtx;
3741 else
3743 if (inhibit_defer_pop == 0)
3745 /* Try to reuse some or all of the pending_stack_adjust
3746 to get this space. */
3747 needed
3748 = (combine_pending_stack_adjustment_and_call
3749 (unadjusted_args_size,
3750 &adjusted_args_size,
3751 preferred_unit_stack_boundary));
3753 /* combine_pending_stack_adjustment_and_call computes
3754 an adjustment before the arguments are allocated.
3755 Account for them and see whether or not the stack
3756 needs to go up or down. */
3757 needed = unadjusted_args_size - needed;
3759 if (needed < 0)
3761 /* We're releasing stack space. */
3762 /* ??? We can avoid any adjustment at all if we're
3763 already aligned. FIXME. */
3764 pending_stack_adjust = -needed;
3765 do_pending_stack_adjust ();
3766 needed = 0;
3768 else
3769 /* We need to allocate space. We'll do that in
3770 push_block below. */
3771 pending_stack_adjust = 0;
3774 /* Special case this because overhead of `push_block' in
3775 this case is non-trivial. */
3776 if (needed == 0)
3777 argblock = virtual_outgoing_args_rtx;
3778 else
3780 argblock = push_block (GEN_INT (needed), 0, 0);
3781 if (ARGS_GROW_DOWNWARD)
3782 argblock = plus_constant (Pmode, argblock, needed);
3785 /* We only really need to call `copy_to_reg' in the case
3786 where push insns are going to be used to pass ARGBLOCK
3787 to a function call in ARGS. In that case, the stack
3788 pointer changes value from the allocation point to the
3789 call point, and hence the value of
3790 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
3791 as well always do it. */
3792 argblock = copy_to_reg (argblock);
3797 if (ACCUMULATE_OUTGOING_ARGS)
3799 /* The save/restore code in store_one_arg handles all
3800 cases except one: a constructor call (including a C
3801 function returning a BLKmode struct) to initialize
3802 an argument. */
3803 if (stack_arg_under_construction)
3805 rtx push_size
3806 = GEN_INT (adjusted_args_size.constant
3807 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype
3808 : TREE_TYPE (fndecl))) ? 0
3809 : reg_parm_stack_space));
3810 if (old_stack_level == 0)
3812 emit_stack_save (SAVE_BLOCK, &old_stack_level);
3813 old_stack_pointer_delta = stack_pointer_delta;
3814 old_pending_adj = pending_stack_adjust;
3815 pending_stack_adjust = 0;
3816 /* stack_arg_under_construction says whether a stack
3817 arg is being constructed at the old stack level.
3818 Pushing the stack gets a clean outgoing argument
3819 block. */
3820 old_stack_arg_under_construction
3821 = stack_arg_under_construction;
3822 stack_arg_under_construction = 0;
3823 /* Make a new map for the new argument list. */
3824 free (stack_usage_map_buf);
3825 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
3826 stack_usage_map = stack_usage_map_buf;
3827 highest_outgoing_arg_in_use = 0;
3829 /* We can pass TRUE as the 4th argument because we just
3830 saved the stack pointer and will restore it right after
3831 the call. */
3832 allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
3833 -1, true);
3836 /* If argument evaluation might modify the stack pointer,
3837 copy the address of the argument list to a register. */
3838 for (i = 0; i < num_actuals; i++)
3839 if (args[i].pass_on_stack)
3841 argblock = copy_addr_to_reg (argblock);
3842 break;
3846 compute_argument_addresses (args, argblock, num_actuals);
3848 /* Stack is properly aligned, pops can't safely be deferred during
3849 the evaluation of the arguments. */
3850 NO_DEFER_POP;
3852 /* Precompute all register parameters. It isn't safe to compute
3853 anything once we have started filling any specific hard regs.
3854 TLS symbols sometimes need a call to resolve. Precompute
3855 register parameters before any stack pointer manipulation
3856 to avoid unaligned stack in the called function. */
3857 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
3859 OK_DEFER_POP;
3861 /* Perform stack alignment before the first push (the last arg). */
3862 if (argblock == 0
3863 && adjusted_args_size.constant > reg_parm_stack_space
3864 && adjusted_args_size.constant != unadjusted_args_size)
3866 /* When the stack adjustment is pending, we get better code
3867 by combining the adjustments. */
3868 if (pending_stack_adjust
3869 && ! inhibit_defer_pop)
3871 pending_stack_adjust
3872 = (combine_pending_stack_adjustment_and_call
3873 (unadjusted_args_size,
3874 &adjusted_args_size,
3875 preferred_unit_stack_boundary));
3876 do_pending_stack_adjust ();
3878 else if (argblock == 0)
3879 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
3880 - unadjusted_args_size));
3882 /* Now that the stack is properly aligned, pops can't safely
3883 be deferred during the evaluation of the arguments. */
3884 NO_DEFER_POP;
3886 /* Record the maximum pushed stack space size. We need to delay
3887 doing it this far to take into account the optimization done
3888 by combine_pending_stack_adjustment_and_call. */
3889 if (flag_stack_usage_info
3890 && !ACCUMULATE_OUTGOING_ARGS
3891 && pass
3892 && adjusted_args_size.var == 0)
3894 int pushed = adjusted_args_size.constant + pending_stack_adjust;
3895 if (pushed > current_function_pushed_stack_size)
3896 current_function_pushed_stack_size = pushed;
3899 funexp = rtx_for_function_call (fndecl, addr);
3901 if (CALL_EXPR_STATIC_CHAIN (exp))
3902 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
3903 else
3904 static_chain_value = 0;
3906 #ifdef REG_PARM_STACK_SPACE
3907 /* Save the fixed argument area if it's part of the caller's frame and
3908 is clobbered by argument setup for this call. */
3909 if (ACCUMULATE_OUTGOING_ARGS && pass)
3910 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3911 &low_to_save, &high_to_save);
3912 #endif
3914 /* Now store (and compute if necessary) all non-register parms.
3915 These come before register parms, since they can require block-moves,
3916 which could clobber the registers used for register parms.
3917 Parms which have partial registers are not stored here,
3918 but we do preallocate space here if they want that. */
3920 for (i = 0; i < num_actuals; i++)
3922 /* Delay bounds until all other args are stored. */
3923 if (POINTER_BOUNDS_P (args[i].tree_value))
3924 continue;
3925 else if (args[i].reg == 0 || args[i].pass_on_stack)
3927 rtx_insn *before_arg = get_last_insn ();
3929 /* We don't allow passing huge (> 2^30 B) arguments
3930 by value. It would cause an overflow later on. */
3931 if (adjusted_args_size.constant
3932 >= (1 << (HOST_BITS_PER_INT - 2)))
3934 sorry ("passing too large argument on stack");
3935 continue;
3938 if (store_one_arg (&args[i], argblock, flags,
3939 adjusted_args_size.var != 0,
3940 reg_parm_stack_space)
3941 || (pass == 0
3942 && check_sibcall_argument_overlap (before_arg,
3943 &args[i], 1)))
3944 sibcall_failure = 1;
3947 if (args[i].stack)
3948 call_fusage
3949 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
3950 gen_rtx_USE (VOIDmode, args[i].stack),
3951 call_fusage);
3954 /* If we have a parm that is passed in registers but not in memory
3955 and whose alignment does not permit a direct copy into registers,
3956 make a group of pseudos that correspond to each register that we
3957 will later fill. */
3958 if (STRICT_ALIGNMENT)
3959 store_unaligned_arguments_into_pseudos (args, num_actuals);
3961 /* Now store any partially-in-registers parm.
3962 This is the last place a block-move can happen. */
3963 if (reg_parm_seen)
3964 for (i = 0; i < num_actuals; i++)
3965 if (args[i].partial != 0 && ! args[i].pass_on_stack)
3967 rtx_insn *before_arg = get_last_insn ();
3969 /* On targets with weird calling conventions (e.g. PA) it's
3970 hard to ensure that all cases of argument overlap between
3971 stack and registers work. Play it safe and bail out. */
3972 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
3974 sibcall_failure = 1;
3975 break;
3978 if (store_one_arg (&args[i], argblock, flags,
3979 adjusted_args_size.var != 0,
3980 reg_parm_stack_space)
3981 || (pass == 0
3982 && check_sibcall_argument_overlap (before_arg,
3983 &args[i], 1)))
3984 sibcall_failure = 1;
3987 bool any_regs = false;
3988 for (i = 0; i < num_actuals; i++)
3989 if (args[i].reg != NULL_RTX)
3991 any_regs = true;
3992 targetm.calls.call_args (args[i].reg, funtype);
3994 if (!any_regs)
3995 targetm.calls.call_args (pc_rtx, funtype);
3997 /* Figure out the register where the value, if any, will come back. */
3998 valreg = 0;
3999 valbnd = 0;
4000 if (TYPE_MODE (rettype) != VOIDmode
4001 && ! structure_value_addr)
4003 if (pcc_struct_value)
4005 valreg = hard_function_value (build_pointer_type (rettype),
4006 fndecl, NULL, (pass == 0));
4007 if (CALL_WITH_BOUNDS_P (exp))
4008 valbnd = targetm.calls.
4009 chkp_function_value_bounds (build_pointer_type (rettype),
4010 fndecl, (pass == 0));
4012 else
4014 valreg = hard_function_value (rettype, fndecl, fntype,
4015 (pass == 0));
4016 if (CALL_WITH_BOUNDS_P (exp))
4017 valbnd = targetm.calls.chkp_function_value_bounds (rettype,
4018 fndecl,
4019 (pass == 0));
4022 /* If VALREG is a PARALLEL whose first member has a zero
4023 offset, use that. This is for targets such as m68k that
4024 return the same value in multiple places. */
4025 if (GET_CODE (valreg) == PARALLEL)
4027 rtx elem = XVECEXP (valreg, 0, 0);
4028 rtx where = XEXP (elem, 0);
4029 rtx offset = XEXP (elem, 1);
4030 if (offset == const0_rtx
4031 && GET_MODE (where) == GET_MODE (valreg))
4032 valreg = where;
4036 /* Store all bounds not passed in registers. */
4037 for (i = 0; i < num_actuals; i++)
4039 if (POINTER_BOUNDS_P (args[i].tree_value)
4040 && !args[i].reg)
4041 store_bounds (&args[i],
4042 args[i].pointer_arg == -1
4043 ? NULL
4044 : &args[args[i].pointer_arg]);
4047 /* If register arguments require space on the stack and stack space
4048 was not preallocated, allocate stack space here for arguments
4049 passed in registers. */
4050 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
4051 && !ACCUMULATE_OUTGOING_ARGS
4052 && must_preallocate == 0 && reg_parm_stack_space > 0)
4053 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
4055 /* Pass the function the address in which to return a
4056 structure value. */
4057 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
4059 structure_value_addr
4060 = convert_memory_address (Pmode, structure_value_addr);
4061 emit_move_insn (struct_value,
4062 force_reg (Pmode,
4063 force_operand (structure_value_addr,
4064 NULL_RTX)));
4066 if (REG_P (struct_value))
4067 use_reg (&call_fusage, struct_value);
4070 after_args = get_last_insn ();
4071 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
4072 static_chain_value, &call_fusage,
4073 reg_parm_seen, flags);
4075 load_register_parameters (args, num_actuals, &call_fusage, flags,
4076 pass == 0, &sibcall_failure);
4078 /* Save a pointer to the last insn before the call, so that we can
4079 later safely search backwards to find the CALL_INSN. */
4080 before_call = get_last_insn ();
4082 /* Set up next argument register. For sibling calls on machines
4083 with register windows this should be the incoming register. */
4084 if (pass == 0)
4085 next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
4086 VOIDmode,
4087 void_type_node,
4088 true);
4089 else
4090 next_arg_reg = targetm.calls.function_arg (args_so_far,
4091 VOIDmode, void_type_node,
4092 true);
4094 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
4096 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
4097 arg_nr = num_actuals - arg_nr - 1;
4098 if (arg_nr >= 0
4099 && arg_nr < num_actuals
4100 && args[arg_nr].reg
4101 && valreg
4102 && REG_P (valreg)
4103 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
4104 call_fusage
4105 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
4106 gen_rtx_SET (valreg, args[arg_nr].reg),
4107 call_fusage);
4109 /* All arguments and registers used for the call must be set up by
4110 now! */
4112 /* Stack must be properly aligned now. */
4113 gcc_assert (!pass
4114 || !(stack_pointer_delta % preferred_unit_stack_boundary));
4116 /* Generate the actual call instruction. */
4117 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
4118 adjusted_args_size.constant, struct_value_size,
4119 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
4120 flags, args_so_far);
4122 if (flag_ipa_ra)
4124 rtx_call_insn *last;
4125 rtx datum = NULL_RTX;
4126 if (fndecl != NULL_TREE)
4128 datum = XEXP (DECL_RTL (fndecl), 0);
4129 gcc_assert (datum != NULL_RTX
4130 && GET_CODE (datum) == SYMBOL_REF);
4132 last = last_call_insn ();
4133 add_reg_note (last, REG_CALL_DECL, datum);
4136 /* If the call setup or the call itself overlaps with anything
4137 of the argument setup we probably clobbered our call address.
4138 In that case we can't do sibcalls. */
4139 if (pass == 0
4140 && check_sibcall_argument_overlap (after_args, 0, 0))
4141 sibcall_failure = 1;
4143 /* If a non-BLKmode value is returned at the most significant end
4144 of a register, shift the register right by the appropriate amount
4145 and update VALREG accordingly. BLKmode values are handled by the
4146 group load/store machinery below. */
4147 if (!structure_value_addr
4148 && !pcc_struct_value
4149 && TYPE_MODE (rettype) != VOIDmode
4150 && TYPE_MODE (rettype) != BLKmode
4151 && REG_P (valreg)
4152 && targetm.calls.return_in_msb (rettype))
4154 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
4155 sibcall_failure = 1;
4156 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
4159 if (pass && (flags & ECF_MALLOC))
4161 rtx temp = gen_reg_rtx (GET_MODE (valreg));
4162 rtx_insn *last, *insns;
4164 /* The return value from a malloc-like function is a pointer. */
4165 if (TREE_CODE (rettype) == POINTER_TYPE)
4166 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
4168 emit_move_insn (temp, valreg);
4170 /* The return value from a malloc-like function can not alias
4171 anything else. */
4172 last = get_last_insn ();
4173 add_reg_note (last, REG_NOALIAS, temp);
4175 /* Write out the sequence. */
4176 insns = get_insns ();
4177 end_sequence ();
4178 emit_insn (insns);
4179 valreg = temp;
4182 /* For calls to `setjmp', etc., inform
4183 function.c:setjmp_warnings that it should complain if
4184 nonvolatile values are live. For functions that cannot
4185 return, inform flow that control does not fall through. */
4187 if ((flags & ECF_NORETURN) || pass == 0)
4189 /* The barrier must be emitted
4190 immediately after the CALL_INSN. Some ports emit more
4191 than just a CALL_INSN above, so we must search for it here. */
4193 rtx_insn *last = get_last_insn ();
4194 while (!CALL_P (last))
4196 last = PREV_INSN (last);
4197 /* There was no CALL_INSN? */
4198 gcc_assert (last != before_call);
4201 emit_barrier_after (last);
4203 /* Stack adjustments after a noreturn call are dead code.
4204 However when NO_DEFER_POP is in effect, we must preserve
4205 stack_pointer_delta. */
4206 if (inhibit_defer_pop == 0)
4208 stack_pointer_delta = old_stack_allocated;
4209 pending_stack_adjust = 0;
4213 /* If value type not void, return an rtx for the value. */
4215 if (TYPE_MODE (rettype) == VOIDmode
4216 || ignore)
4217 target = const0_rtx;
4218 else if (structure_value_addr)
4220 if (target == 0 || !MEM_P (target))
4222 target
4223 = gen_rtx_MEM (TYPE_MODE (rettype),
4224 memory_address (TYPE_MODE (rettype),
4225 structure_value_addr));
4226 set_mem_attributes (target, rettype, 1);
4229 else if (pcc_struct_value)
4231 /* This is the special C++ case where we need to
4232 know what the true target was. We take care to
4233 never use this value more than once in one expression. */
4234 target = gen_rtx_MEM (TYPE_MODE (rettype),
4235 copy_to_reg (valreg));
4236 set_mem_attributes (target, rettype, 1);
4238 /* Handle calls that return values in multiple non-contiguous locations.
4239 The Irix 6 ABI has examples of this. */
4240 else if (GET_CODE (valreg) == PARALLEL)
4242 if (target == 0)
4243 target = emit_group_move_into_temps (valreg);
4244 else if (rtx_equal_p (target, valreg))
4246 else if (GET_CODE (target) == PARALLEL)
4247 /* Handle the result of a emit_group_move_into_temps
4248 call in the previous pass. */
4249 emit_group_move (target, valreg);
4250 else
4251 emit_group_store (target, valreg, rettype,
4252 int_size_in_bytes (rettype));
4254 else if (target
4255 && GET_MODE (target) == TYPE_MODE (rettype)
4256 && GET_MODE (target) == GET_MODE (valreg))
4258 bool may_overlap = false;
4260 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
4261 reg to a plain register. */
4262 if (!REG_P (target) || HARD_REGISTER_P (target))
4263 valreg = avoid_likely_spilled_reg (valreg);
4265 /* If TARGET is a MEM in the argument area, and we have
4266 saved part of the argument area, then we can't store
4267 directly into TARGET as it may get overwritten when we
4268 restore the argument save area below. Don't work too
4269 hard though and simply force TARGET to a register if it
4270 is a MEM; the optimizer is quite likely to sort it out. */
4271 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
4272 for (i = 0; i < num_actuals; i++)
4273 if (args[i].save_area)
4275 may_overlap = true;
4276 break;
4279 if (may_overlap)
4280 target = copy_to_reg (valreg);
4281 else
4283 /* TARGET and VALREG cannot be equal at this point
4284 because the latter would not have
4285 REG_FUNCTION_VALUE_P true, while the former would if
4286 it were referring to the same register.
4288 If they refer to the same register, this move will be
4289 a no-op, except when function inlining is being
4290 done. */
4291 emit_move_insn (target, valreg);
4293 /* If we are setting a MEM, this code must be executed.
4294 Since it is emitted after the call insn, sibcall
4295 optimization cannot be performed in that case. */
4296 if (MEM_P (target))
4297 sibcall_failure = 1;
4300 else
4301 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
4303 /* If we promoted this return value, make the proper SUBREG.
4304 TARGET might be const0_rtx here, so be careful. */
4305 if (REG_P (target)
4306 && TYPE_MODE (rettype) != BLKmode
4307 && GET_MODE (target) != TYPE_MODE (rettype))
4309 tree type = rettype;
4310 int unsignedp = TYPE_UNSIGNED (type);
4311 machine_mode pmode;
4313 /* Ensure we promote as expected, and get the new unsignedness. */
4314 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
4315 funtype, 1);
4316 gcc_assert (GET_MODE (target) == pmode);
4318 unsigned int offset = subreg_lowpart_offset (TYPE_MODE (type),
4319 GET_MODE (target));
4320 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
4321 SUBREG_PROMOTED_VAR_P (target) = 1;
4322 SUBREG_PROMOTED_SET (target, unsignedp);
4325 /* If size of args is variable or this was a constructor call for a stack
4326 argument, restore saved stack-pointer value. */
4328 if (old_stack_level)
4330 rtx_insn *prev = get_last_insn ();
4332 emit_stack_restore (SAVE_BLOCK, old_stack_level);
4333 stack_pointer_delta = old_stack_pointer_delta;
4335 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
4337 pending_stack_adjust = old_pending_adj;
4338 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
4339 stack_arg_under_construction = old_stack_arg_under_construction;
4340 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4341 stack_usage_map = initial_stack_usage_map;
4342 sibcall_failure = 1;
4344 else if (ACCUMULATE_OUTGOING_ARGS && pass)
4346 #ifdef REG_PARM_STACK_SPACE
4347 if (save_area)
4348 restore_fixed_argument_area (save_area, argblock,
4349 high_to_save, low_to_save);
4350 #endif
4352 /* If we saved any argument areas, restore them. */
4353 for (i = 0; i < num_actuals; i++)
4354 if (args[i].save_area)
4356 machine_mode save_mode = GET_MODE (args[i].save_area);
4357 rtx stack_area
4358 = gen_rtx_MEM (save_mode,
4359 memory_address (save_mode,
4360 XEXP (args[i].stack_slot, 0)));
4362 if (save_mode != BLKmode)
4363 emit_move_insn (stack_area, args[i].save_area);
4364 else
4365 emit_block_move (stack_area, args[i].save_area,
4366 GEN_INT (args[i].locate.size.constant),
4367 BLOCK_OP_CALL_PARM);
4370 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4371 stack_usage_map = initial_stack_usage_map;
4374 /* If this was alloca, record the new stack level. */
4375 if (flags & ECF_MAY_BE_ALLOCA)
4376 record_new_stack_level ();
4378 /* Free up storage we no longer need. */
4379 for (i = 0; i < num_actuals; ++i)
4380 free (args[i].aligned_regs);
4382 targetm.calls.end_call_args ();
4384 insns = get_insns ();
4385 end_sequence ();
4387 if (pass == 0)
4389 tail_call_insns = insns;
4391 /* Restore the pending stack adjustment now that we have
4392 finished generating the sibling call sequence. */
4394 restore_pending_stack_adjust (&save);
4396 /* Prepare arg structure for next iteration. */
4397 for (i = 0; i < num_actuals; i++)
4399 args[i].value = 0;
4400 args[i].aligned_regs = 0;
4401 args[i].stack = 0;
4404 sbitmap_free (stored_args_map);
4405 internal_arg_pointer_exp_state.scan_start = NULL;
4406 internal_arg_pointer_exp_state.cache.release ();
4408 else
4410 normal_call_insns = insns;
4412 /* Verify that we've deallocated all the stack we used. */
4413 gcc_assert ((flags & ECF_NORETURN)
4414 || (old_stack_allocated
4415 == stack_pointer_delta - pending_stack_adjust));
4418 /* If something prevents making this a sibling call,
4419 zero out the sequence. */
4420 if (sibcall_failure)
4421 tail_call_insns = NULL;
4422 else
4423 break;
4426 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
4427 arguments too, as argument area is now clobbered by the call. */
4428 if (tail_call_insns)
4430 emit_insn (tail_call_insns);
4431 crtl->tail_call_emit = true;
4433 else
4435 emit_insn (normal_call_insns);
4436 if (try_tail_call)
4437 /* Ideally we'd emit a message for all of the ways that it could
4438 have failed. */
4439 maybe_complain_about_tail_call (exp, "tail call production failed");
4442 currently_expanding_call--;
4444 free (stack_usage_map_buf);
4445 free (args);
4447 /* Join result with returned bounds so caller may use them if needed. */
4448 target = chkp_join_splitted_slot (target, valbnd);
4450 return target;
4453 /* A sibling call sequence invalidates any REG_EQUIV notes made for
4454 this function's incoming arguments.
4456 At the start of RTL generation we know the only REG_EQUIV notes
4457 in the rtl chain are those for incoming arguments, so we can look
4458 for REG_EQUIV notes between the start of the function and the
4459 NOTE_INSN_FUNCTION_BEG.
4461 This is (slight) overkill. We could keep track of the highest
4462 argument we clobber and be more selective in removing notes, but it
4463 does not seem to be worth the effort. */
4465 void
4466 fixup_tail_calls (void)
4468 rtx_insn *insn;
4470 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4472 rtx note;
4474 /* There are never REG_EQUIV notes for the incoming arguments
4475 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
4476 if (NOTE_P (insn)
4477 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
4478 break;
4480 note = find_reg_note (insn, REG_EQUIV, 0);
4481 if (note)
4482 remove_note (insn, note);
4483 note = find_reg_note (insn, REG_EQUIV, 0);
4484 gcc_assert (!note);
4488 /* Traverse a list of TYPES and expand all complex types into their
4489 components. */
4490 static tree
4491 split_complex_types (tree types)
4493 tree p;
4495 /* Before allocating memory, check for the common case of no complex. */
4496 for (p = types; p; p = TREE_CHAIN (p))
4498 tree type = TREE_VALUE (p);
4499 if (TREE_CODE (type) == COMPLEX_TYPE
4500 && targetm.calls.split_complex_arg (type))
4501 goto found;
4503 return types;
4505 found:
4506 types = copy_list (types);
4508 for (p = types; p; p = TREE_CHAIN (p))
4510 tree complex_type = TREE_VALUE (p);
4512 if (TREE_CODE (complex_type) == COMPLEX_TYPE
4513 && targetm.calls.split_complex_arg (complex_type))
4515 tree next, imag;
4517 /* Rewrite complex type with component type. */
4518 TREE_VALUE (p) = TREE_TYPE (complex_type);
4519 next = TREE_CHAIN (p);
4521 /* Add another component type for the imaginary part. */
4522 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
4523 TREE_CHAIN (p) = imag;
4524 TREE_CHAIN (imag) = next;
4526 /* Skip the newly created node. */
4527 p = TREE_CHAIN (p);
4531 return types;
4534 /* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
4535 for a value of mode OUTMODE,
4536 with NARGS different arguments, passed as ARGS.
4537 Store the return value if RETVAL is nonzero: store it in VALUE if
4538 VALUE is nonnull, otherwise pick a convenient location. In either
4539 case return the location of the stored value.
4541 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4542 `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
4543 other types of library calls. */
4546 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
4547 enum libcall_type fn_type,
4548 machine_mode outmode, int nargs, rtx_mode_t *args)
4550 /* Total size in bytes of all the stack-parms scanned so far. */
4551 struct args_size args_size;
4552 /* Size of arguments before any adjustments (such as rounding). */
4553 struct args_size original_args_size;
4554 int argnum;
4555 rtx fun;
4556 /* Todo, choose the correct decl type of orgfun. Sadly this information
4557 isn't present here, so we default to native calling abi here. */
4558 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
4559 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
4560 int count;
4561 rtx argblock = 0;
4562 CUMULATIVE_ARGS args_so_far_v;
4563 cumulative_args_t args_so_far;
4564 struct arg
4566 rtx value;
4567 machine_mode mode;
4568 rtx reg;
4569 int partial;
4570 struct locate_and_pad_arg_data locate;
4571 rtx save_area;
4573 struct arg *argvec;
4574 int old_inhibit_defer_pop = inhibit_defer_pop;
4575 rtx call_fusage = 0;
4576 rtx mem_value = 0;
4577 rtx valreg;
4578 int pcc_struct_value = 0;
4579 int struct_value_size = 0;
4580 int flags;
4581 int reg_parm_stack_space = 0;
4582 int needed;
4583 rtx_insn *before_call;
4584 bool have_push_fusage;
4585 tree tfom; /* type_for_mode (outmode, 0) */
4587 #ifdef REG_PARM_STACK_SPACE
4588 /* Define the boundary of the register parm stack space that needs to be
4589 save, if any. */
4590 int low_to_save = 0, high_to_save = 0;
4591 rtx save_area = 0; /* Place that it is saved. */
4592 #endif
4594 /* Size of the stack reserved for parameter registers. */
4595 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
4596 char *initial_stack_usage_map = stack_usage_map;
4597 char *stack_usage_map_buf = NULL;
4599 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
4601 #ifdef REG_PARM_STACK_SPACE
4602 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
4603 #endif
4605 /* By default, library functions cannot throw. */
4606 flags = ECF_NOTHROW;
4608 switch (fn_type)
4610 case LCT_NORMAL:
4611 break;
4612 case LCT_CONST:
4613 flags |= ECF_CONST;
4614 break;
4615 case LCT_PURE:
4616 flags |= ECF_PURE;
4617 break;
4618 case LCT_NORETURN:
4619 flags |= ECF_NORETURN;
4620 break;
4621 case LCT_THROW:
4622 flags &= ~ECF_NOTHROW;
4623 break;
4624 case LCT_RETURNS_TWICE:
4625 flags = ECF_RETURNS_TWICE;
4626 break;
4628 fun = orgfun;
4630 /* Ensure current function's preferred stack boundary is at least
4631 what we need. */
4632 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
4633 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4635 /* If this kind of value comes back in memory,
4636 decide where in memory it should come back. */
4637 if (outmode != VOIDmode)
4639 tfom = lang_hooks.types.type_for_mode (outmode, 0);
4640 if (aggregate_value_p (tfom, 0))
4642 #ifdef PCC_STATIC_STRUCT_RETURN
4643 rtx pointer_reg
4644 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
4645 mem_value = gen_rtx_MEM (outmode, pointer_reg);
4646 pcc_struct_value = 1;
4647 if (value == 0)
4648 value = gen_reg_rtx (outmode);
4649 #else /* not PCC_STATIC_STRUCT_RETURN */
4650 struct_value_size = GET_MODE_SIZE (outmode);
4651 if (value != 0 && MEM_P (value))
4652 mem_value = value;
4653 else
4654 mem_value = assign_temp (tfom, 1, 1);
4655 #endif
4656 /* This call returns a big structure. */
4657 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
4660 else
4661 tfom = void_type_node;
4663 /* ??? Unfinished: must pass the memory address as an argument. */
4665 /* Copy all the libcall-arguments out of the varargs data
4666 and into a vector ARGVEC.
4668 Compute how to pass each argument. We only support a very small subset
4669 of the full argument passing conventions to limit complexity here since
4670 library functions shouldn't have many args. */
4672 argvec = XALLOCAVEC (struct arg, nargs + 1);
4673 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
4675 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
4676 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
4677 #else
4678 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
4679 #endif
4680 args_so_far = pack_cumulative_args (&args_so_far_v);
4682 args_size.constant = 0;
4683 args_size.var = 0;
4685 count = 0;
4687 push_temp_slots ();
4689 /* If there's a structure value address to be passed,
4690 either pass it in the special place, or pass it as an extra argument. */
4691 if (mem_value && struct_value == 0 && ! pcc_struct_value)
4693 rtx addr = XEXP (mem_value, 0);
4695 nargs++;
4697 /* Make sure it is a reasonable operand for a move or push insn. */
4698 if (!REG_P (addr) && !MEM_P (addr)
4699 && !(CONSTANT_P (addr)
4700 && targetm.legitimate_constant_p (Pmode, addr)))
4701 addr = force_operand (addr, NULL_RTX);
4703 argvec[count].value = addr;
4704 argvec[count].mode = Pmode;
4705 argvec[count].partial = 0;
4707 argvec[count].reg = targetm.calls.function_arg (args_so_far,
4708 Pmode, NULL_TREE, true);
4709 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
4710 NULL_TREE, 1) == 0);
4712 locate_and_pad_parm (Pmode, NULL_TREE,
4713 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4715 #else
4716 argvec[count].reg != 0,
4717 #endif
4718 reg_parm_stack_space, 0,
4719 NULL_TREE, &args_size, &argvec[count].locate);
4721 if (argvec[count].reg == 0 || argvec[count].partial != 0
4722 || reg_parm_stack_space > 0)
4723 args_size.constant += argvec[count].locate.size.constant;
4725 targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
4727 count++;
4730 for (unsigned int i = 0; count < nargs; i++, count++)
4732 rtx val = args[i].first;
4733 machine_mode mode = args[i].second;
4734 int unsigned_p = 0;
4736 /* We cannot convert the arg value to the mode the library wants here;
4737 must do it earlier where we know the signedness of the arg. */
4738 gcc_assert (mode != BLKmode
4739 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
4741 /* Make sure it is a reasonable operand for a move or push insn. */
4742 if (!REG_P (val) && !MEM_P (val)
4743 && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
4744 val = force_operand (val, NULL_RTX);
4746 if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
4748 rtx slot;
4749 int must_copy
4750 = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
4752 /* If this was a CONST function, it is now PURE since it now
4753 reads memory. */
4754 if (flags & ECF_CONST)
4756 flags &= ~ECF_CONST;
4757 flags |= ECF_PURE;
4760 if (MEM_P (val) && !must_copy)
4762 tree val_expr = MEM_EXPR (val);
4763 if (val_expr)
4764 mark_addressable (val_expr);
4765 slot = val;
4767 else
4769 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
4770 1, 1);
4771 emit_move_insn (slot, val);
4774 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4775 gen_rtx_USE (VOIDmode, slot),
4776 call_fusage);
4777 if (must_copy)
4778 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4779 gen_rtx_CLOBBER (VOIDmode,
4780 slot),
4781 call_fusage);
4783 mode = Pmode;
4784 val = force_operand (XEXP (slot, 0), NULL_RTX);
4787 mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
4788 argvec[count].mode = mode;
4789 argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
4790 argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
4791 NULL_TREE, true);
4793 argvec[count].partial
4794 = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
4796 if (argvec[count].reg == 0
4797 || argvec[count].partial != 0
4798 || reg_parm_stack_space > 0)
4800 locate_and_pad_parm (mode, NULL_TREE,
4801 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4803 #else
4804 argvec[count].reg != 0,
4805 #endif
4806 reg_parm_stack_space, argvec[count].partial,
4807 NULL_TREE, &args_size, &argvec[count].locate);
4808 args_size.constant += argvec[count].locate.size.constant;
4809 gcc_assert (!argvec[count].locate.size.var);
4811 #ifdef BLOCK_REG_PADDING
4812 else
4813 /* The argument is passed entirely in registers. See at which
4814 end it should be padded. */
4815 argvec[count].locate.where_pad =
4816 BLOCK_REG_PADDING (mode, NULL_TREE,
4817 GET_MODE_SIZE (mode) <= UNITS_PER_WORD);
4818 #endif
4820 targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
4823 /* If this machine requires an external definition for library
4824 functions, write one out. */
4825 assemble_external_libcall (fun);
4827 original_args_size = args_size;
4828 args_size.constant = (((args_size.constant
4829 + stack_pointer_delta
4830 + STACK_BYTES - 1)
4831 / STACK_BYTES
4832 * STACK_BYTES)
4833 - stack_pointer_delta);
4835 args_size.constant = MAX (args_size.constant,
4836 reg_parm_stack_space);
4838 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4839 args_size.constant -= reg_parm_stack_space;
4841 if (args_size.constant > crtl->outgoing_args_size)
4842 crtl->outgoing_args_size = args_size.constant;
4844 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
4846 int pushed = args_size.constant + pending_stack_adjust;
4847 if (pushed > current_function_pushed_stack_size)
4848 current_function_pushed_stack_size = pushed;
4851 if (ACCUMULATE_OUTGOING_ARGS)
4853 /* Since the stack pointer will never be pushed, it is possible for
4854 the evaluation of a parm to clobber something we have already
4855 written to the stack. Since most function calls on RISC machines
4856 do not use the stack, this is uncommon, but must work correctly.
4858 Therefore, we save any area of the stack that was already written
4859 and that we are using. Here we set up to do this by making a new
4860 stack usage map from the old one.
4862 Another approach might be to try to reorder the argument
4863 evaluations to avoid this conflicting stack usage. */
4865 needed = args_size.constant;
4867 /* Since we will be writing into the entire argument area, the
4868 map must be allocated for its entire size, not just the part that
4869 is the responsibility of the caller. */
4870 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4871 needed += reg_parm_stack_space;
4873 if (ARGS_GROW_DOWNWARD)
4874 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
4875 needed + 1);
4876 else
4877 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, needed);
4879 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
4880 stack_usage_map = stack_usage_map_buf;
4882 if (initial_highest_arg_in_use)
4883 memcpy (stack_usage_map, initial_stack_usage_map,
4884 initial_highest_arg_in_use);
4886 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
4887 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
4888 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
4889 needed = 0;
4891 /* We must be careful to use virtual regs before they're instantiated,
4892 and real regs afterwards. Loop optimization, for example, can create
4893 new libcalls after we've instantiated the virtual regs, and if we
4894 use virtuals anyway, they won't match the rtl patterns. */
4896 if (virtuals_instantiated)
4897 argblock = plus_constant (Pmode, stack_pointer_rtx,
4898 STACK_POINTER_OFFSET);
4899 else
4900 argblock = virtual_outgoing_args_rtx;
4902 else
4904 if (!PUSH_ARGS)
4905 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
4908 /* We push args individually in reverse order, perform stack alignment
4909 before the first push (the last arg). */
4910 if (argblock == 0)
4911 anti_adjust_stack (GEN_INT (args_size.constant
4912 - original_args_size.constant));
4914 argnum = nargs - 1;
4916 #ifdef REG_PARM_STACK_SPACE
4917 if (ACCUMULATE_OUTGOING_ARGS)
4919 /* The argument list is the property of the called routine and it
4920 may clobber it. If the fixed area has been used for previous
4921 parameters, we must save and restore it. */
4922 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4923 &low_to_save, &high_to_save);
4925 #endif
4927 /* When expanding a normal call, args are stored in push order,
4928 which is the reverse of what we have here. */
4929 bool any_regs = false;
4930 for (int i = nargs; i-- > 0; )
4931 if (argvec[i].reg != NULL_RTX)
4933 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
4934 any_regs = true;
4936 if (!any_regs)
4937 targetm.calls.call_args (pc_rtx, NULL_TREE);
4939 /* Push the args that need to be pushed. */
4941 have_push_fusage = false;
4943 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4944 are to be pushed. */
4945 for (count = 0; count < nargs; count++, argnum--)
4947 machine_mode mode = argvec[argnum].mode;
4948 rtx val = argvec[argnum].value;
4949 rtx reg = argvec[argnum].reg;
4950 int partial = argvec[argnum].partial;
4951 unsigned int parm_align = argvec[argnum].locate.boundary;
4952 int lower_bound = 0, upper_bound = 0, i;
4954 if (! (reg != 0 && partial == 0))
4956 rtx use;
4958 if (ACCUMULATE_OUTGOING_ARGS)
4960 /* If this is being stored into a pre-allocated, fixed-size,
4961 stack area, save any previous data at that location. */
4963 if (ARGS_GROW_DOWNWARD)
4965 /* stack_slot is negative, but we want to index stack_usage_map
4966 with positive values. */
4967 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
4968 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
4970 else
4972 lower_bound = argvec[argnum].locate.slot_offset.constant;
4973 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
4976 i = lower_bound;
4977 /* Don't worry about things in the fixed argument area;
4978 it has already been saved. */
4979 if (i < reg_parm_stack_space)
4980 i = reg_parm_stack_space;
4981 while (i < upper_bound && stack_usage_map[i] == 0)
4982 i++;
4984 if (i < upper_bound)
4986 /* We need to make a save area. */
4987 unsigned int size
4988 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
4989 machine_mode save_mode
4990 = int_mode_for_size (size, 1).else_blk ();
4991 rtx adr
4992 = plus_constant (Pmode, argblock,
4993 argvec[argnum].locate.offset.constant);
4994 rtx stack_area
4995 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
4997 if (save_mode == BLKmode)
4999 argvec[argnum].save_area
5000 = assign_stack_temp (BLKmode,
5001 argvec[argnum].locate.size.constant
5004 emit_block_move (validize_mem
5005 (copy_rtx (argvec[argnum].save_area)),
5006 stack_area,
5007 GEN_INT (argvec[argnum].locate.size.constant),
5008 BLOCK_OP_CALL_PARM);
5010 else
5012 argvec[argnum].save_area = gen_reg_rtx (save_mode);
5014 emit_move_insn (argvec[argnum].save_area, stack_area);
5019 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
5020 partial, reg, 0, argblock,
5021 GEN_INT (argvec[argnum].locate.offset.constant),
5022 reg_parm_stack_space,
5023 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
5025 /* Now mark the segment we just used. */
5026 if (ACCUMULATE_OUTGOING_ARGS)
5027 for (i = lower_bound; i < upper_bound; i++)
5028 stack_usage_map[i] = 1;
5030 NO_DEFER_POP;
5032 /* Indicate argument access so that alias.c knows that these
5033 values are live. */
5034 if (argblock)
5035 use = plus_constant (Pmode, argblock,
5036 argvec[argnum].locate.offset.constant);
5037 else if (have_push_fusage)
5038 continue;
5039 else
5041 /* When arguments are pushed, trying to tell alias.c where
5042 exactly this argument is won't work, because the
5043 auto-increment causes confusion. So we merely indicate
5044 that we access something with a known mode somewhere on
5045 the stack. */
5046 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5047 gen_rtx_SCRATCH (Pmode));
5048 have_push_fusage = true;
5050 use = gen_rtx_MEM (argvec[argnum].mode, use);
5051 use = gen_rtx_USE (VOIDmode, use);
5052 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
5056 argnum = nargs - 1;
5058 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
5060 /* Now load any reg parms into their regs. */
5062 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5063 are to be pushed. */
5064 for (count = 0; count < nargs; count++, argnum--)
5066 machine_mode mode = argvec[argnum].mode;
5067 rtx val = argvec[argnum].value;
5068 rtx reg = argvec[argnum].reg;
5069 int partial = argvec[argnum].partial;
5070 #ifdef BLOCK_REG_PADDING
5071 int size = 0;
5072 #endif
5074 /* Handle calls that pass values in multiple non-contiguous
5075 locations. The PA64 has examples of this for library calls. */
5076 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5077 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
5078 else if (reg != 0 && partial == 0)
5080 emit_move_insn (reg, val);
5081 #ifdef BLOCK_REG_PADDING
5082 size = GET_MODE_SIZE (argvec[argnum].mode);
5084 /* Copied from load_register_parameters. */
5086 /* Handle case where we have a value that needs shifting
5087 up to the msb. eg. a QImode value and we're padding
5088 upward on a BYTES_BIG_ENDIAN machine. */
5089 if (size < UNITS_PER_WORD
5090 && (argvec[argnum].locate.where_pad
5091 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
5093 rtx x;
5094 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
5096 /* Assigning REG here rather than a temp makes CALL_FUSAGE
5097 report the whole reg as used. Strictly speaking, the
5098 call only uses SIZE bytes at the msb end, but it doesn't
5099 seem worth generating rtl to say that. */
5100 reg = gen_rtx_REG (word_mode, REGNO (reg));
5101 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
5102 if (x != reg)
5103 emit_move_insn (reg, x);
5105 #endif
5108 NO_DEFER_POP;
5111 /* Any regs containing parms remain in use through the call. */
5112 for (count = 0; count < nargs; count++)
5114 rtx reg = argvec[count].reg;
5115 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5116 use_group_regs (&call_fusage, reg);
5117 else if (reg != 0)
5119 int partial = argvec[count].partial;
5120 if (partial)
5122 int nregs;
5123 gcc_assert (partial % UNITS_PER_WORD == 0);
5124 nregs = partial / UNITS_PER_WORD;
5125 use_regs (&call_fusage, REGNO (reg), nregs);
5127 else
5128 use_reg (&call_fusage, reg);
5132 /* Pass the function the address in which to return a structure value. */
5133 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
5135 emit_move_insn (struct_value,
5136 force_reg (Pmode,
5137 force_operand (XEXP (mem_value, 0),
5138 NULL_RTX)));
5139 if (REG_P (struct_value))
5140 use_reg (&call_fusage, struct_value);
5143 /* Don't allow popping to be deferred, since then
5144 cse'ing of library calls could delete a call and leave the pop. */
5145 NO_DEFER_POP;
5146 valreg = (mem_value == 0 && outmode != VOIDmode
5147 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
5149 /* Stack must be properly aligned now. */
5150 gcc_assert (!(stack_pointer_delta
5151 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
5153 before_call = get_last_insn ();
5155 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
5156 will set inhibit_defer_pop to that value. */
5157 /* The return type is needed to decide how many bytes the function pops.
5158 Signedness plays no role in that, so for simplicity, we pretend it's
5159 always signed. We also assume that the list of arguments passed has
5160 no impact, so we pretend it is unknown. */
5162 emit_call_1 (fun, NULL,
5163 get_identifier (XSTR (orgfun, 0)),
5164 build_function_type (tfom, NULL_TREE),
5165 original_args_size.constant, args_size.constant,
5166 struct_value_size,
5167 targetm.calls.function_arg (args_so_far,
5168 VOIDmode, void_type_node, true),
5169 valreg,
5170 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
5172 if (flag_ipa_ra)
5174 rtx datum = orgfun;
5175 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
5176 rtx_call_insn *last = last_call_insn ();
5177 add_reg_note (last, REG_CALL_DECL, datum);
5180 /* Right-shift returned value if necessary. */
5181 if (!pcc_struct_value
5182 && TYPE_MODE (tfom) != BLKmode
5183 && targetm.calls.return_in_msb (tfom))
5185 shift_return_value (TYPE_MODE (tfom), false, valreg);
5186 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
5189 targetm.calls.end_call_args ();
5191 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
5192 that it should complain if nonvolatile values are live. For
5193 functions that cannot return, inform flow that control does not
5194 fall through. */
5195 if (flags & ECF_NORETURN)
5197 /* The barrier note must be emitted
5198 immediately after the CALL_INSN. Some ports emit more than
5199 just a CALL_INSN above, so we must search for it here. */
5200 rtx_insn *last = get_last_insn ();
5201 while (!CALL_P (last))
5203 last = PREV_INSN (last);
5204 /* There was no CALL_INSN? */
5205 gcc_assert (last != before_call);
5208 emit_barrier_after (last);
5211 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
5212 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
5213 if (flags & ECF_NOTHROW)
5215 rtx_insn *last = get_last_insn ();
5216 while (!CALL_P (last))
5218 last = PREV_INSN (last);
5219 /* There was no CALL_INSN? */
5220 gcc_assert (last != before_call);
5223 make_reg_eh_region_note_nothrow_nononlocal (last);
5226 /* Now restore inhibit_defer_pop to its actual original value. */
5227 OK_DEFER_POP;
5229 pop_temp_slots ();
5231 /* Copy the value to the right place. */
5232 if (outmode != VOIDmode && retval)
5234 if (mem_value)
5236 if (value == 0)
5237 value = mem_value;
5238 if (value != mem_value)
5239 emit_move_insn (value, mem_value);
5241 else if (GET_CODE (valreg) == PARALLEL)
5243 if (value == 0)
5244 value = gen_reg_rtx (outmode);
5245 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
5247 else
5249 /* Convert to the proper mode if a promotion has been active. */
5250 if (GET_MODE (valreg) != outmode)
5252 int unsignedp = TYPE_UNSIGNED (tfom);
5254 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
5255 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
5256 == GET_MODE (valreg));
5257 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
5260 if (value != 0)
5261 emit_move_insn (value, valreg);
5262 else
5263 value = valreg;
5267 if (ACCUMULATE_OUTGOING_ARGS)
5269 #ifdef REG_PARM_STACK_SPACE
5270 if (save_area)
5271 restore_fixed_argument_area (save_area, argblock,
5272 high_to_save, low_to_save);
5273 #endif
5275 /* If we saved any argument areas, restore them. */
5276 for (count = 0; count < nargs; count++)
5277 if (argvec[count].save_area)
5279 machine_mode save_mode = GET_MODE (argvec[count].save_area);
5280 rtx adr = plus_constant (Pmode, argblock,
5281 argvec[count].locate.offset.constant);
5282 rtx stack_area = gen_rtx_MEM (save_mode,
5283 memory_address (save_mode, adr));
5285 if (save_mode == BLKmode)
5286 emit_block_move (stack_area,
5287 validize_mem
5288 (copy_rtx (argvec[count].save_area)),
5289 GEN_INT (argvec[count].locate.size.constant),
5290 BLOCK_OP_CALL_PARM);
5291 else
5292 emit_move_insn (stack_area, argvec[count].save_area);
5295 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
5296 stack_usage_map = initial_stack_usage_map;
5299 free (stack_usage_map_buf);
5301 return value;
5306 /* Store pointer bounds argument ARG into Bounds Table entry
5307 associated with PARM. */
5308 static void
5309 store_bounds (struct arg_data *arg, struct arg_data *parm)
5311 rtx slot = NULL, ptr = NULL, addr = NULL;
5313 /* We may pass bounds not associated with any pointer. */
5314 if (!parm)
5316 gcc_assert (arg->special_slot);
5317 slot = arg->special_slot;
5318 ptr = const0_rtx;
5320 /* Find pointer associated with bounds and where it is
5321 passed. */
5322 else
5324 if (!parm->reg)
5326 gcc_assert (!arg->special_slot);
5328 addr = adjust_address (parm->stack, Pmode, arg->pointer_offset);
5330 else if (REG_P (parm->reg))
5332 gcc_assert (arg->special_slot);
5333 slot = arg->special_slot;
5335 if (MEM_P (parm->value))
5336 addr = adjust_address (parm->value, Pmode, arg->pointer_offset);
5337 else if (REG_P (parm->value))
5338 ptr = gen_rtx_SUBREG (Pmode, parm->value, arg->pointer_offset);
5339 else
5341 gcc_assert (!arg->pointer_offset);
5342 ptr = parm->value;
5345 else
5347 gcc_assert (GET_CODE (parm->reg) == PARALLEL);
5349 gcc_assert (arg->special_slot);
5350 slot = arg->special_slot;
5352 if (parm->parallel_value)
5353 ptr = chkp_get_value_with_offs (parm->parallel_value,
5354 GEN_INT (arg->pointer_offset));
5355 else
5356 gcc_unreachable ();
5360 /* Expand bounds. */
5361 if (!arg->value)
5362 arg->value = expand_normal (arg->tree_value);
5364 targetm.calls.store_bounds_for_arg (ptr, addr, arg->value, slot);
5367 /* Store a single argument for a function call
5368 into the register or memory area where it must be passed.
5369 *ARG describes the argument value and where to pass it.
5371 ARGBLOCK is the address of the stack-block for all the arguments,
5372 or 0 on a machine where arguments are pushed individually.
5374 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
5375 so must be careful about how the stack is used.
5377 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
5378 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
5379 that we need not worry about saving and restoring the stack.
5381 FNDECL is the declaration of the function we are calling.
5383 Return nonzero if this arg should cause sibcall failure,
5384 zero otherwise. */
5386 static int
5387 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
5388 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
5390 tree pval = arg->tree_value;
5391 rtx reg = 0;
5392 int partial = 0;
5393 int used = 0;
5394 int i, lower_bound = 0, upper_bound = 0;
5395 int sibcall_failure = 0;
5397 if (TREE_CODE (pval) == ERROR_MARK)
5398 return 1;
5400 /* Push a new temporary level for any temporaries we make for
5401 this argument. */
5402 push_temp_slots ();
5404 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
5406 /* If this is being stored into a pre-allocated, fixed-size, stack area,
5407 save any previous data at that location. */
5408 if (argblock && ! variable_size && arg->stack)
5410 if (ARGS_GROW_DOWNWARD)
5412 /* stack_slot is negative, but we want to index stack_usage_map
5413 with positive values. */
5414 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5415 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
5416 else
5417 upper_bound = 0;
5419 lower_bound = upper_bound - arg->locate.size.constant;
5421 else
5423 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5424 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
5425 else
5426 lower_bound = 0;
5428 upper_bound = lower_bound + arg->locate.size.constant;
5431 i = lower_bound;
5432 /* Don't worry about things in the fixed argument area;
5433 it has already been saved. */
5434 if (i < reg_parm_stack_space)
5435 i = reg_parm_stack_space;
5436 while (i < upper_bound && stack_usage_map[i] == 0)
5437 i++;
5439 if (i < upper_bound)
5441 /* We need to make a save area. */
5442 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
5443 machine_mode save_mode
5444 = int_mode_for_size (size, 1).else_blk ();
5445 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
5446 rtx stack_area = gen_rtx_MEM (save_mode, adr);
5448 if (save_mode == BLKmode)
5450 arg->save_area
5451 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
5452 preserve_temp_slots (arg->save_area);
5453 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
5454 stack_area,
5455 GEN_INT (arg->locate.size.constant),
5456 BLOCK_OP_CALL_PARM);
5458 else
5460 arg->save_area = gen_reg_rtx (save_mode);
5461 emit_move_insn (arg->save_area, stack_area);
5467 /* If this isn't going to be placed on both the stack and in registers,
5468 set up the register and number of words. */
5469 if (! arg->pass_on_stack)
5471 if (flags & ECF_SIBCALL)
5472 reg = arg->tail_call_reg;
5473 else
5474 reg = arg->reg;
5475 partial = arg->partial;
5478 /* Being passed entirely in a register. We shouldn't be called in
5479 this case. */
5480 gcc_assert (reg == 0 || partial != 0);
5482 /* If this arg needs special alignment, don't load the registers
5483 here. */
5484 if (arg->n_aligned_regs != 0)
5485 reg = 0;
5487 /* If this is being passed partially in a register, we can't evaluate
5488 it directly into its stack slot. Otherwise, we can. */
5489 if (arg->value == 0)
5491 /* stack_arg_under_construction is nonzero if a function argument is
5492 being evaluated directly into the outgoing argument list and
5493 expand_call must take special action to preserve the argument list
5494 if it is called recursively.
5496 For scalar function arguments stack_usage_map is sufficient to
5497 determine which stack slots must be saved and restored. Scalar
5498 arguments in general have pass_on_stack == 0.
5500 If this argument is initialized by a function which takes the
5501 address of the argument (a C++ constructor or a C function
5502 returning a BLKmode structure), then stack_usage_map is
5503 insufficient and expand_call must push the stack around the
5504 function call. Such arguments have pass_on_stack == 1.
5506 Note that it is always safe to set stack_arg_under_construction,
5507 but this generates suboptimal code if set when not needed. */
5509 if (arg->pass_on_stack)
5510 stack_arg_under_construction++;
5512 arg->value = expand_expr (pval,
5513 (partial
5514 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
5515 ? NULL_RTX : arg->stack,
5516 VOIDmode, EXPAND_STACK_PARM);
5518 /* If we are promoting object (or for any other reason) the mode
5519 doesn't agree, convert the mode. */
5521 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
5522 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
5523 arg->value, arg->unsignedp);
5525 if (arg->pass_on_stack)
5526 stack_arg_under_construction--;
5529 /* Check for overlap with already clobbered argument area. */
5530 if ((flags & ECF_SIBCALL)
5531 && MEM_P (arg->value)
5532 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
5533 arg->locate.size.constant))
5534 sibcall_failure = 1;
5536 /* Don't allow anything left on stack from computation
5537 of argument to alloca. */
5538 if (flags & ECF_MAY_BE_ALLOCA)
5539 do_pending_stack_adjust ();
5541 if (arg->value == arg->stack)
5542 /* If the value is already in the stack slot, we are done. */
5544 else if (arg->mode != BLKmode)
5546 int size;
5547 unsigned int parm_align;
5549 /* Argument is a scalar, not entirely passed in registers.
5550 (If part is passed in registers, arg->partial says how much
5551 and emit_push_insn will take care of putting it there.)
5553 Push it, and if its size is less than the
5554 amount of space allocated to it,
5555 also bump stack pointer by the additional space.
5556 Note that in C the default argument promotions
5557 will prevent such mismatches. */
5559 if (TYPE_EMPTY_P (TREE_TYPE (pval)))
5560 size = 0;
5561 else
5562 size = GET_MODE_SIZE (arg->mode);
5564 /* Compute how much space the push instruction will push.
5565 On many machines, pushing a byte will advance the stack
5566 pointer by a halfword. */
5567 #ifdef PUSH_ROUNDING
5568 size = PUSH_ROUNDING (size);
5569 #endif
5570 used = size;
5572 /* Compute how much space the argument should get:
5573 round up to a multiple of the alignment for arguments. */
5574 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5575 != PAD_NONE)
5576 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
5577 / (PARM_BOUNDARY / BITS_PER_UNIT))
5578 * (PARM_BOUNDARY / BITS_PER_UNIT));
5580 /* Compute the alignment of the pushed argument. */
5581 parm_align = arg->locate.boundary;
5582 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5583 == PAD_DOWNWARD)
5585 int pad = used - size;
5586 if (pad)
5588 unsigned int pad_align = least_bit_hwi (pad) * BITS_PER_UNIT;
5589 parm_align = MIN (parm_align, pad_align);
5593 /* This isn't already where we want it on the stack, so put it there.
5594 This can either be done with push or copy insns. */
5595 if (used
5596 && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
5597 NULL_RTX, parm_align, partial, reg, used - size,
5598 argblock, ARGS_SIZE_RTX (arg->locate.offset),
5599 reg_parm_stack_space,
5600 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
5601 sibcall_failure = 1;
5603 /* Unless this is a partially-in-register argument, the argument is now
5604 in the stack. */
5605 if (partial == 0)
5606 arg->value = arg->stack;
5608 else
5610 /* BLKmode, at least partly to be pushed. */
5612 unsigned int parm_align;
5613 int excess;
5614 rtx size_rtx;
5616 /* Pushing a nonscalar.
5617 If part is passed in registers, PARTIAL says how much
5618 and emit_push_insn will take care of putting it there. */
5620 /* Round its size up to a multiple
5621 of the allocation unit for arguments. */
5623 if (arg->locate.size.var != 0)
5625 excess = 0;
5626 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
5628 else
5630 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
5631 for BLKmode is careful to avoid it. */
5632 excess = (arg->locate.size.constant
5633 - arg_int_size_in_bytes (TREE_TYPE (pval))
5634 + partial);
5635 size_rtx = expand_expr (arg_size_in_bytes (TREE_TYPE (pval)),
5636 NULL_RTX, TYPE_MODE (sizetype),
5637 EXPAND_NORMAL);
5640 parm_align = arg->locate.boundary;
5642 /* When an argument is padded down, the block is aligned to
5643 PARM_BOUNDARY, but the actual argument isn't. */
5644 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5645 == PAD_DOWNWARD)
5647 if (arg->locate.size.var)
5648 parm_align = BITS_PER_UNIT;
5649 else if (excess)
5651 unsigned int excess_align = least_bit_hwi (excess) * BITS_PER_UNIT;
5652 parm_align = MIN (parm_align, excess_align);
5656 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
5658 /* emit_push_insn might not work properly if arg->value and
5659 argblock + arg->locate.offset areas overlap. */
5660 rtx x = arg->value;
5661 int i = 0;
5663 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
5664 || (GET_CODE (XEXP (x, 0)) == PLUS
5665 && XEXP (XEXP (x, 0), 0) ==
5666 crtl->args.internal_arg_pointer
5667 && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
5669 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
5670 i = INTVAL (XEXP (XEXP (x, 0), 1));
5672 /* arg.locate doesn't contain the pretend_args_size offset,
5673 it's part of argblock. Ensure we don't count it in I. */
5674 if (STACK_GROWS_DOWNWARD)
5675 i -= crtl->args.pretend_args_size;
5676 else
5677 i += crtl->args.pretend_args_size;
5679 /* expand_call should ensure this. */
5680 gcc_assert (!arg->locate.offset.var
5681 && arg->locate.size.var == 0
5682 && CONST_INT_P (size_rtx));
5684 if (arg->locate.offset.constant > i)
5686 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
5687 sibcall_failure = 1;
5689 else if (arg->locate.offset.constant < i)
5691 /* Use arg->locate.size.constant instead of size_rtx
5692 because we only care about the part of the argument
5693 on the stack. */
5694 if (i < (arg->locate.offset.constant
5695 + arg->locate.size.constant))
5696 sibcall_failure = 1;
5698 else
5700 /* Even though they appear to be at the same location,
5701 if part of the outgoing argument is in registers,
5702 they aren't really at the same location. Check for
5703 this by making sure that the incoming size is the
5704 same as the outgoing size. */
5705 if (arg->locate.size.constant != INTVAL (size_rtx))
5706 sibcall_failure = 1;
5711 if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
5712 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
5713 parm_align, partial, reg, excess, argblock,
5714 ARGS_SIZE_RTX (arg->locate.offset),
5715 reg_parm_stack_space,
5716 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
5718 /* Unless this is a partially-in-register argument, the argument is now
5719 in the stack.
5721 ??? Unlike the case above, in which we want the actual
5722 address of the data, so that we can load it directly into a
5723 register, here we want the address of the stack slot, so that
5724 it's properly aligned for word-by-word copying or something
5725 like that. It's not clear that this is always correct. */
5726 if (partial == 0)
5727 arg->value = arg->stack_slot;
5730 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
5732 tree type = TREE_TYPE (arg->tree_value);
5733 arg->parallel_value
5734 = emit_group_load_into_temps (arg->reg, arg->value, type,
5735 int_size_in_bytes (type));
5738 /* Mark all slots this store used. */
5739 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
5740 && argblock && ! variable_size && arg->stack)
5741 for (i = lower_bound; i < upper_bound; i++)
5742 stack_usage_map[i] = 1;
5744 /* Once we have pushed something, pops can't safely
5745 be deferred during the rest of the arguments. */
5746 NO_DEFER_POP;
5748 /* Free any temporary slots made in processing this argument. */
5749 pop_temp_slots ();
5751 return sibcall_failure;
5754 /* Nonzero if we do not know how to pass TYPE solely in registers. */
5756 bool
5757 must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED,
5758 const_tree type)
5760 if (!type)
5761 return false;
5763 /* If the type has variable size... */
5764 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5765 return true;
5767 /* If the type is marked as addressable (it is required
5768 to be constructed into the stack)... */
5769 if (TREE_ADDRESSABLE (type))
5770 return true;
5772 return false;
5775 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
5776 takes trailing padding of a structure into account. */
5777 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
5779 bool
5780 must_pass_in_stack_var_size_or_pad (machine_mode mode, const_tree type)
5782 if (!type)
5783 return false;
5785 /* If the type has variable size... */
5786 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5787 return true;
5789 /* If the type is marked as addressable (it is required
5790 to be constructed into the stack)... */
5791 if (TREE_ADDRESSABLE (type))
5792 return true;
5794 if (TYPE_EMPTY_P (type))
5795 return false;
5797 /* If the padding and mode of the type is such that a copy into
5798 a register would put it into the wrong part of the register. */
5799 if (mode == BLKmode
5800 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
5801 && (targetm.calls.function_arg_padding (mode, type)
5802 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
5803 return true;
5805 return false;
5808 /* Tell the garbage collector about GTY markers in this source file. */
5809 #include "gt-calls.h"