testsuite: Correct requirements for vadsdu*, vslv and vsrv testcases.
[official-gcc.git] / gcc / calls.c
bloba8f459632f2f4f9fec667a5f697f6f825b833210
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #define INCLUDE_STRING
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "stringpool.h"
33 #include "expmed.h"
34 #include "optabs.h"
35 #include "emit-rtl.h"
36 #include "cgraph.h"
37 #include "diagnostic-core.h"
38 #include "fold-const.h"
39 #include "stor-layout.h"
40 #include "varasm.h"
41 #include "internal-fn.h"
42 #include "dojump.h"
43 #include "explow.h"
44 #include "calls.h"
45 #include "expr.h"
46 #include "output.h"
47 #include "langhooks.h"
48 #include "except.h"
49 #include "dbgcnt.h"
50 #include "rtl-iter.h"
51 #include "tree-vrp.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-strlen.h"
54 #include "intl.h"
55 #include "stringpool.h"
56 #include "hash-map.h"
57 #include "hash-traits.h"
58 #include "attribs.h"
59 #include "builtins.h"
60 #include "gimple-fold.h"
61 #include "attr-fnspec.h"
62 #include "value-query.h"
64 #include "tree-pretty-print.h"
66 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
67 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
69 /* Data structure and subroutines used within expand_call. */
71 struct arg_data
73 /* Tree node for this argument. */
74 tree tree_value;
75 /* Mode for value; TYPE_MODE unless promoted. */
76 machine_mode mode;
77 /* Current RTL value for argument, or 0 if it isn't precomputed. */
78 rtx value;
79 /* Initially-compute RTL value for argument; only for const functions. */
80 rtx initial_value;
81 /* Register to pass this argument in, 0 if passed on stack, or an
82 PARALLEL if the arg is to be copied into multiple non-contiguous
83 registers. */
84 rtx reg;
85 /* Register to pass this argument in when generating tail call sequence.
86 This is not the same register as for normal calls on machines with
87 register windows. */
88 rtx tail_call_reg;
89 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
90 form for emit_group_move. */
91 rtx parallel_value;
92 /* If REG was promoted from the actual mode of the argument expression,
93 indicates whether the promotion is sign- or zero-extended. */
94 int unsignedp;
95 /* Number of bytes to put in registers. 0 means put the whole arg
96 in registers. Also 0 if not passed in registers. */
97 int partial;
98 /* Nonzero if argument must be passed on stack.
99 Note that some arguments may be passed on the stack
100 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
101 pass_on_stack identifies arguments that *cannot* go in registers. */
102 int pass_on_stack;
103 /* Some fields packaged up for locate_and_pad_parm. */
104 struct locate_and_pad_arg_data locate;
105 /* Location on the stack at which parameter should be stored. The store
106 has already been done if STACK == VALUE. */
107 rtx stack;
108 /* Location on the stack of the start of this argument slot. This can
109 differ from STACK if this arg pads downward. This location is known
110 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
111 rtx stack_slot;
112 /* Place that this stack area has been saved, if needed. */
113 rtx save_area;
114 /* If an argument's alignment does not permit direct copying into registers,
115 copy in smaller-sized pieces into pseudos. These are stored in a
116 block pointed to by this field. The next field says how many
117 word-sized pseudos we made. */
118 rtx *aligned_regs;
119 int n_aligned_regs;
122 /* A vector of one char per byte of stack space. A byte if nonzero if
123 the corresponding stack location has been used.
124 This vector is used to prevent a function call within an argument from
125 clobbering any stack already set up. */
126 static char *stack_usage_map;
128 /* Size of STACK_USAGE_MAP. */
129 static unsigned int highest_outgoing_arg_in_use;
131 /* Assume that any stack location at this byte index is used,
132 without checking the contents of stack_usage_map. */
133 static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U;
135 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
136 stack location's tail call argument has been already stored into the stack.
137 This bitmap is used to prevent sibling call optimization if function tries
138 to use parent's incoming argument slots when they have been already
139 overwritten with tail call arguments. */
140 static sbitmap stored_args_map;
142 /* Assume that any virtual-incoming location at this byte index has been
143 stored, without checking the contents of stored_args_map. */
144 static unsigned HOST_WIDE_INT stored_args_watermark;
146 /* stack_arg_under_construction is nonzero when an argument may be
147 initialized with a constructor call (including a C function that
148 returns a BLKmode struct) and expand_call must take special action
149 to make sure the object being constructed does not overlap the
150 argument list for the constructor call. */
151 static int stack_arg_under_construction;
153 static void precompute_register_parameters (int, struct arg_data *, int *);
154 static int store_one_arg (struct arg_data *, rtx, int, int, int);
155 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
156 static int finalize_must_preallocate (int, int, struct arg_data *,
157 struct args_size *);
158 static void precompute_arguments (int, struct arg_data *);
159 static void compute_argument_addresses (struct arg_data *, rtx, int);
160 static rtx rtx_for_function_call (tree, tree);
161 static void load_register_parameters (struct arg_data *, int, rtx *, int,
162 int, int *);
163 static int special_function_p (const_tree, int);
164 static int check_sibcall_argument_overlap_1 (rtx);
165 static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
167 static tree split_complex_types (tree);
169 #ifdef REG_PARM_STACK_SPACE
170 static rtx save_fixed_argument_area (int, rtx, int *, int *);
171 static void restore_fixed_argument_area (rtx, rtx, int, int);
172 #endif
174 /* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
175 stack region might already be in use. */
177 static bool
178 stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound,
179 unsigned int reg_parm_stack_space)
181 unsigned HOST_WIDE_INT const_lower, const_upper;
182 const_lower = constant_lower_bound (lower_bound);
183 if (!upper_bound.is_constant (&const_upper))
184 const_upper = HOST_WIDE_INT_M1U;
186 if (const_upper > stack_usage_watermark)
187 return true;
189 /* Don't worry about things in the fixed argument area;
190 it has already been saved. */
191 const_lower = MAX (const_lower, reg_parm_stack_space);
192 const_upper = MIN (const_upper, highest_outgoing_arg_in_use);
193 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
194 if (stack_usage_map[i])
195 return true;
196 return false;
199 /* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
200 stack region are now in use. */
202 static void
203 mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound)
205 unsigned HOST_WIDE_INT const_lower, const_upper;
206 const_lower = constant_lower_bound (lower_bound);
207 if (upper_bound.is_constant (&const_upper))
208 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
209 stack_usage_map[i] = 1;
210 else
211 stack_usage_watermark = MIN (stack_usage_watermark, const_lower);
214 /* Force FUNEXP into a form suitable for the address of a CALL,
215 and return that as an rtx. Also load the static chain register
216 if FNDECL is a nested function.
218 CALL_FUSAGE points to a variable holding the prospective
219 CALL_INSN_FUNCTION_USAGE information. */
222 prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
223 rtx *call_fusage, int reg_parm_seen, int flags)
225 /* Make a valid memory address and copy constants through pseudo-regs,
226 but not for a constant address if -fno-function-cse. */
227 if (GET_CODE (funexp) != SYMBOL_REF)
229 /* If it's an indirect call by descriptor, generate code to perform
230 runtime identification of the pointer and load the descriptor. */
231 if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
233 const int bit_val = targetm.calls.custom_function_descriptors;
234 rtx call_lab = gen_label_rtx ();
236 gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
237 fndecl_or_type
238 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
239 fndecl_or_type);
240 DECL_STATIC_CHAIN (fndecl_or_type) = 1;
241 rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
243 if (GET_MODE (funexp) != Pmode)
244 funexp = convert_memory_address (Pmode, funexp);
246 /* Avoid long live ranges around function calls. */
247 funexp = copy_to_mode_reg (Pmode, funexp);
249 if (REG_P (chain))
250 emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
252 /* Emit the runtime identification pattern. */
253 rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
254 emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
255 call_lab);
257 /* Statically predict the branch to very likely taken. */
258 rtx_insn *insn = get_last_insn ();
259 if (JUMP_P (insn))
260 predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
262 /* Load the descriptor. */
263 rtx mem = gen_rtx_MEM (ptr_mode,
264 plus_constant (Pmode, funexp, - bit_val));
265 MEM_NOTRAP_P (mem) = 1;
266 mem = convert_memory_address (Pmode, mem);
267 emit_move_insn (chain, mem);
269 mem = gen_rtx_MEM (ptr_mode,
270 plus_constant (Pmode, funexp,
271 POINTER_SIZE / BITS_PER_UNIT
272 - bit_val));
273 MEM_NOTRAP_P (mem) = 1;
274 mem = convert_memory_address (Pmode, mem);
275 emit_move_insn (funexp, mem);
277 emit_label (call_lab);
279 if (REG_P (chain))
281 use_reg (call_fusage, chain);
282 STATIC_CHAIN_REG_P (chain) = 1;
285 /* Make sure we're not going to be overwritten below. */
286 gcc_assert (!static_chain_value);
289 /* If we are using registers for parameters, force the
290 function address into a register now. */
291 funexp = ((reg_parm_seen
292 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
293 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
294 : memory_address (FUNCTION_MODE, funexp));
296 else
298 /* funexp could be a SYMBOL_REF represents a function pointer which is
299 of ptr_mode. In this case, it should be converted into address mode
300 to be a valid address for memory rtx pattern. See PR 64971. */
301 if (GET_MODE (funexp) != Pmode)
302 funexp = convert_memory_address (Pmode, funexp);
304 if (!(flags & ECF_SIBCALL))
306 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
307 funexp = force_reg (Pmode, funexp);
311 if (static_chain_value != 0
312 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
313 || DECL_STATIC_CHAIN (fndecl_or_type)))
315 rtx chain;
317 chain = targetm.calls.static_chain (fndecl_or_type, false);
318 static_chain_value = convert_memory_address (Pmode, static_chain_value);
320 emit_move_insn (chain, static_chain_value);
321 if (REG_P (chain))
323 use_reg (call_fusage, chain);
324 STATIC_CHAIN_REG_P (chain) = 1;
328 return funexp;
331 /* Generate instructions to call function FUNEXP,
332 and optionally pop the results.
333 The CALL_INSN is the first insn generated.
335 FNDECL is the declaration node of the function. This is given to the
336 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
337 its own args.
339 FUNTYPE is the data type of the function. This is given to the hook
340 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
341 own args. We used to allow an identifier for library functions, but
342 that doesn't work when the return type is an aggregate type and the
343 calling convention says that the pointer to this aggregate is to be
344 popped by the callee.
346 STACK_SIZE is the number of bytes of arguments on the stack,
347 ROUNDED_STACK_SIZE is that number rounded up to
348 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
349 both to put into the call insn and to generate explicit popping
350 code if necessary.
352 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
353 It is zero if this call doesn't want a structure value.
355 NEXT_ARG_REG is the rtx that results from executing
356 targetm.calls.function_arg (&args_so_far,
357 function_arg_info::end_marker ());
358 just after all the args have had their registers assigned.
359 This could be whatever you like, but normally it is the first
360 arg-register beyond those used for args in this call,
361 or 0 if all the arg-registers are used in this call.
362 It is passed on to `gen_call' so you can put this info in the call insn.
364 VALREG is a hard register in which a value is returned,
365 or 0 if the call does not return a value.
367 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
368 the args to this call were processed.
369 We restore `inhibit_defer_pop' to that value.
371 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
372 denote registers used by the called function. */
374 static void
375 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
376 tree funtype ATTRIBUTE_UNUSED,
377 poly_int64 stack_size ATTRIBUTE_UNUSED,
378 poly_int64 rounded_stack_size,
379 poly_int64 struct_value_size ATTRIBUTE_UNUSED,
380 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
381 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
382 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
384 rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
385 rtx call, funmem, pat;
386 int already_popped = 0;
387 poly_int64 n_popped = 0;
389 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
390 patterns exist). Any popping that the callee does on return will
391 be from our caller's frame rather than ours. */
392 if (!(ecf_flags & ECF_SIBCALL))
394 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
396 #ifdef CALL_POPS_ARGS
397 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
398 #endif
401 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
402 and we don't want to load it into a register as an optimization,
403 because prepare_call_address already did it if it should be done. */
404 if (GET_CODE (funexp) != SYMBOL_REF)
405 funexp = memory_address (FUNCTION_MODE, funexp);
407 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
408 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
410 tree t = fndecl;
412 /* Although a built-in FUNCTION_DECL and its non-__builtin
413 counterpart compare equal and get a shared mem_attrs, they
414 produce different dump output in compare-debug compilations,
415 if an entry gets garbage collected in one compilation, then
416 adds a different (but equivalent) entry, while the other
417 doesn't run the garbage collector at the same spot and then
418 shares the mem_attr with the equivalent entry. */
419 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
421 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
422 if (t2)
423 t = t2;
426 set_mem_expr (funmem, t);
428 else if (fntree)
429 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
431 if (ecf_flags & ECF_SIBCALL)
433 if (valreg)
434 pat = targetm.gen_sibcall_value (valreg, funmem,
435 rounded_stack_size_rtx,
436 next_arg_reg, NULL_RTX);
437 else
438 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
439 next_arg_reg,
440 gen_int_mode (struct_value_size, Pmode));
442 /* If the target has "call" or "call_value" insns, then prefer them
443 if no arguments are actually popped. If the target does not have
444 "call" or "call_value" insns, then we must use the popping versions
445 even if the call has no arguments to pop. */
446 else if (maybe_ne (n_popped, 0)
447 || !(valreg
448 ? targetm.have_call_value ()
449 : targetm.have_call ()))
451 rtx n_pop = gen_int_mode (n_popped, Pmode);
453 /* If this subroutine pops its own args, record that in the call insn
454 if possible, for the sake of frame pointer elimination. */
456 if (valreg)
457 pat = targetm.gen_call_value_pop (valreg, funmem,
458 rounded_stack_size_rtx,
459 next_arg_reg, n_pop);
460 else
461 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
462 next_arg_reg, n_pop);
464 already_popped = 1;
466 else
468 if (valreg)
469 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
470 next_arg_reg, NULL_RTX);
471 else
472 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
473 gen_int_mode (struct_value_size, Pmode));
475 emit_insn (pat);
477 /* Find the call we just emitted. */
478 rtx_call_insn *call_insn = last_call_insn ();
480 /* Some target create a fresh MEM instead of reusing the one provided
481 above. Set its MEM_EXPR. */
482 call = get_call_rtx_from (call_insn);
483 if (call
484 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
485 && MEM_EXPR (funmem) != NULL_TREE)
486 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
488 /* Put the register usage information there. */
489 add_function_usage_to (call_insn, call_fusage);
491 /* If this is a const call, then set the insn's unchanging bit. */
492 if (ecf_flags & ECF_CONST)
493 RTL_CONST_CALL_P (call_insn) = 1;
495 /* If this is a pure call, then set the insn's unchanging bit. */
496 if (ecf_flags & ECF_PURE)
497 RTL_PURE_CALL_P (call_insn) = 1;
499 /* If this is a const call, then set the insn's unchanging bit. */
500 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
501 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
503 /* Create a nothrow REG_EH_REGION note, if needed. */
504 make_reg_eh_region_note (call_insn, ecf_flags, 0);
506 if (ecf_flags & ECF_NORETURN)
507 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
509 if (ecf_flags & ECF_RETURNS_TWICE)
511 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
512 cfun->calls_setjmp = 1;
515 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
517 /* Restore this now, so that we do defer pops for this call's args
518 if the context of the call as a whole permits. */
519 inhibit_defer_pop = old_inhibit_defer_pop;
521 if (maybe_ne (n_popped, 0))
523 if (!already_popped)
524 CALL_INSN_FUNCTION_USAGE (call_insn)
525 = gen_rtx_EXPR_LIST (VOIDmode,
526 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
527 CALL_INSN_FUNCTION_USAGE (call_insn));
528 rounded_stack_size -= n_popped;
529 rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
530 stack_pointer_delta -= n_popped;
532 add_args_size_note (call_insn, stack_pointer_delta);
534 /* If popup is needed, stack realign must use DRAP */
535 if (SUPPORTS_STACK_ALIGNMENT)
536 crtl->need_drap = true;
538 /* For noreturn calls when not accumulating outgoing args force
539 REG_ARGS_SIZE note to prevent crossjumping of calls with different
540 args sizes. */
541 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
542 add_args_size_note (call_insn, stack_pointer_delta);
544 if (!ACCUMULATE_OUTGOING_ARGS)
546 /* If returning from the subroutine does not automatically pop the args,
547 we need an instruction to pop them sooner or later.
548 Perhaps do it now; perhaps just record how much space to pop later.
550 If returning from the subroutine does pop the args, indicate that the
551 stack pointer will be changed. */
553 if (maybe_ne (rounded_stack_size, 0))
555 if (ecf_flags & ECF_NORETURN)
556 /* Just pretend we did the pop. */
557 stack_pointer_delta -= rounded_stack_size;
558 else if (flag_defer_pop && inhibit_defer_pop == 0
559 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
560 pending_stack_adjust += rounded_stack_size;
561 else
562 adjust_stack (rounded_stack_size_rtx);
565 /* When we accumulate outgoing args, we must avoid any stack manipulations.
566 Restore the stack pointer to its original value now. Usually
567 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
568 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
569 popping variants of functions exist as well.
571 ??? We may optimize similar to defer_pop above, but it is
572 probably not worthwhile.
574 ??? It will be worthwhile to enable combine_stack_adjustments even for
575 such machines. */
576 else if (maybe_ne (n_popped, 0))
577 anti_adjust_stack (gen_int_mode (n_popped, Pmode));
580 /* Determine if the function identified by FNDECL is one with
581 special properties we wish to know about. Modify FLAGS accordingly.
583 For example, if the function might return more than one time (setjmp), then
584 set ECF_RETURNS_TWICE.
586 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
587 space from the stack such as alloca. */
589 static int
590 special_function_p (const_tree fndecl, int flags)
592 tree name_decl = DECL_NAME (fndecl);
594 if (maybe_special_function_p (fndecl)
595 && IDENTIFIER_LENGTH (name_decl) <= 11)
597 const char *name = IDENTIFIER_POINTER (name_decl);
598 const char *tname = name;
600 /* We assume that alloca will always be called by name. It
601 makes no sense to pass it as a pointer-to-function to
602 anything that does not understand its behavior. */
603 if (IDENTIFIER_LENGTH (name_decl) == 6
604 && name[0] == 'a'
605 && ! strcmp (name, "alloca"))
606 flags |= ECF_MAY_BE_ALLOCA;
608 /* Disregard prefix _ or __. */
609 if (name[0] == '_')
611 if (name[1] == '_')
612 tname += 2;
613 else
614 tname += 1;
617 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
618 if (! strcmp (tname, "setjmp")
619 || ! strcmp (tname, "sigsetjmp")
620 || ! strcmp (name, "savectx")
621 || ! strcmp (name, "vfork")
622 || ! strcmp (name, "getcontext"))
623 flags |= ECF_RETURNS_TWICE;
626 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
627 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
628 flags |= ECF_MAY_BE_ALLOCA;
630 return flags;
633 /* Return fnspec for DECL. */
635 static attr_fnspec
636 decl_fnspec (tree fndecl)
638 tree attr;
639 tree type = TREE_TYPE (fndecl);
640 if (type)
642 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
643 if (attr)
645 return TREE_VALUE (TREE_VALUE (attr));
648 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
649 return builtin_fnspec (fndecl);
650 return "";
653 /* Similar to special_function_p; return a set of ERF_ flags for the
654 function FNDECL. */
655 static int
656 decl_return_flags (tree fndecl)
658 attr_fnspec fnspec = decl_fnspec (fndecl);
660 unsigned int arg;
661 if (fnspec.returns_arg (&arg))
662 return ERF_RETURNS_ARG | arg;
664 if (fnspec.returns_noalias_p ())
665 return ERF_NOALIAS;
666 return 0;
669 /* Return nonzero when FNDECL represents a call to setjmp. */
672 setjmp_call_p (const_tree fndecl)
674 if (DECL_IS_RETURNS_TWICE (fndecl))
675 return ECF_RETURNS_TWICE;
676 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
680 /* Return true if STMT may be an alloca call. */
682 bool
683 gimple_maybe_alloca_call_p (const gimple *stmt)
685 tree fndecl;
687 if (!is_gimple_call (stmt))
688 return false;
690 fndecl = gimple_call_fndecl (stmt);
691 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
692 return true;
694 return false;
697 /* Return true if STMT is a builtin alloca call. */
699 bool
700 gimple_alloca_call_p (const gimple *stmt)
702 tree fndecl;
704 if (!is_gimple_call (stmt))
705 return false;
707 fndecl = gimple_call_fndecl (stmt);
708 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
709 switch (DECL_FUNCTION_CODE (fndecl))
711 CASE_BUILT_IN_ALLOCA:
712 return gimple_call_num_args (stmt) > 0;
713 default:
714 break;
717 return false;
720 /* Return true when exp contains a builtin alloca call. */
722 bool
723 alloca_call_p (const_tree exp)
725 tree fndecl;
726 if (TREE_CODE (exp) == CALL_EXPR
727 && (fndecl = get_callee_fndecl (exp))
728 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
729 switch (DECL_FUNCTION_CODE (fndecl))
731 CASE_BUILT_IN_ALLOCA:
732 return true;
733 default:
734 break;
737 return false;
740 /* Return TRUE if FNDECL is either a TM builtin or a TM cloned
741 function. Return FALSE otherwise. */
743 static bool
744 is_tm_builtin (const_tree fndecl)
746 if (fndecl == NULL)
747 return false;
749 if (decl_is_tm_clone (fndecl))
750 return true;
752 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
754 switch (DECL_FUNCTION_CODE (fndecl))
756 case BUILT_IN_TM_COMMIT:
757 case BUILT_IN_TM_COMMIT_EH:
758 case BUILT_IN_TM_ABORT:
759 case BUILT_IN_TM_IRREVOCABLE:
760 case BUILT_IN_TM_GETTMCLONE_IRR:
761 case BUILT_IN_TM_MEMCPY:
762 case BUILT_IN_TM_MEMMOVE:
763 case BUILT_IN_TM_MEMSET:
764 CASE_BUILT_IN_TM_STORE (1):
765 CASE_BUILT_IN_TM_STORE (2):
766 CASE_BUILT_IN_TM_STORE (4):
767 CASE_BUILT_IN_TM_STORE (8):
768 CASE_BUILT_IN_TM_STORE (FLOAT):
769 CASE_BUILT_IN_TM_STORE (DOUBLE):
770 CASE_BUILT_IN_TM_STORE (LDOUBLE):
771 CASE_BUILT_IN_TM_STORE (M64):
772 CASE_BUILT_IN_TM_STORE (M128):
773 CASE_BUILT_IN_TM_STORE (M256):
774 CASE_BUILT_IN_TM_LOAD (1):
775 CASE_BUILT_IN_TM_LOAD (2):
776 CASE_BUILT_IN_TM_LOAD (4):
777 CASE_BUILT_IN_TM_LOAD (8):
778 CASE_BUILT_IN_TM_LOAD (FLOAT):
779 CASE_BUILT_IN_TM_LOAD (DOUBLE):
780 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
781 CASE_BUILT_IN_TM_LOAD (M64):
782 CASE_BUILT_IN_TM_LOAD (M128):
783 CASE_BUILT_IN_TM_LOAD (M256):
784 case BUILT_IN_TM_LOG:
785 case BUILT_IN_TM_LOG_1:
786 case BUILT_IN_TM_LOG_2:
787 case BUILT_IN_TM_LOG_4:
788 case BUILT_IN_TM_LOG_8:
789 case BUILT_IN_TM_LOG_FLOAT:
790 case BUILT_IN_TM_LOG_DOUBLE:
791 case BUILT_IN_TM_LOG_LDOUBLE:
792 case BUILT_IN_TM_LOG_M64:
793 case BUILT_IN_TM_LOG_M128:
794 case BUILT_IN_TM_LOG_M256:
795 return true;
796 default:
797 break;
800 return false;
803 /* Detect flags (function attributes) from the function decl or type node. */
806 flags_from_decl_or_type (const_tree exp)
808 int flags = 0;
810 if (DECL_P (exp))
812 /* The function exp may have the `malloc' attribute. */
813 if (DECL_IS_MALLOC (exp))
814 flags |= ECF_MALLOC;
816 /* The function exp may have the `returns_twice' attribute. */
817 if (DECL_IS_RETURNS_TWICE (exp))
818 flags |= ECF_RETURNS_TWICE;
820 /* Process the pure and const attributes. */
821 if (TREE_READONLY (exp))
822 flags |= ECF_CONST;
823 if (DECL_PURE_P (exp))
824 flags |= ECF_PURE;
825 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
826 flags |= ECF_LOOPING_CONST_OR_PURE;
828 if (DECL_IS_NOVOPS (exp))
829 flags |= ECF_NOVOPS;
830 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
831 flags |= ECF_LEAF;
832 if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp)))
833 flags |= ECF_COLD;
835 if (TREE_NOTHROW (exp))
836 flags |= ECF_NOTHROW;
838 if (flag_tm)
840 if (is_tm_builtin (exp))
841 flags |= ECF_TM_BUILTIN;
842 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
843 || lookup_attribute ("transaction_pure",
844 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
845 flags |= ECF_TM_PURE;
848 flags = special_function_p (exp, flags);
850 else if (TYPE_P (exp))
852 if (TYPE_READONLY (exp))
853 flags |= ECF_CONST;
855 if (flag_tm
856 && ((flags & ECF_CONST) != 0
857 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
858 flags |= ECF_TM_PURE;
860 else
861 gcc_unreachable ();
863 if (TREE_THIS_VOLATILE (exp))
865 flags |= ECF_NORETURN;
866 if (flags & (ECF_CONST|ECF_PURE))
867 flags |= ECF_LOOPING_CONST_OR_PURE;
870 return flags;
873 /* Detect flags from a CALL_EXPR. */
876 call_expr_flags (const_tree t)
878 int flags;
879 tree decl = get_callee_fndecl (t);
881 if (decl)
882 flags = flags_from_decl_or_type (decl);
883 else if (CALL_EXPR_FN (t) == NULL_TREE)
884 flags = internal_fn_flags (CALL_EXPR_IFN (t));
885 else
887 tree type = TREE_TYPE (CALL_EXPR_FN (t));
888 if (type && TREE_CODE (type) == POINTER_TYPE)
889 flags = flags_from_decl_or_type (TREE_TYPE (type));
890 else
891 flags = 0;
892 if (CALL_EXPR_BY_DESCRIPTOR (t))
893 flags |= ECF_BY_DESCRIPTOR;
896 return flags;
899 /* Return true if ARG should be passed by invisible reference. */
901 bool
902 pass_by_reference (CUMULATIVE_ARGS *ca, function_arg_info arg)
904 if (tree type = arg.type)
906 /* If this type contains non-trivial constructors, then it is
907 forbidden for the middle-end to create any new copies. */
908 if (TREE_ADDRESSABLE (type))
909 return true;
911 /* GCC post 3.4 passes *all* variable sized types by reference. */
912 if (!TYPE_SIZE (type) || !poly_int_tree_p (TYPE_SIZE (type)))
913 return true;
915 /* If a record type should be passed the same as its first (and only)
916 member, use the type and mode of that member. */
917 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
919 arg.type = TREE_TYPE (first_field (type));
920 arg.mode = TYPE_MODE (arg.type);
924 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), arg);
927 /* Return true if TYPE should be passed by reference when passed to
928 the "..." arguments of a function. */
930 bool
931 pass_va_arg_by_reference (tree type)
933 return pass_by_reference (NULL, function_arg_info (type, /*named=*/false));
936 /* Decide whether ARG, which occurs in the state described by CA,
937 should be passed by reference. Return true if so and update
938 ARG accordingly. */
940 bool
941 apply_pass_by_reference_rules (CUMULATIVE_ARGS *ca, function_arg_info &arg)
943 if (pass_by_reference (ca, arg))
945 arg.type = build_pointer_type (arg.type);
946 arg.mode = TYPE_MODE (arg.type);
947 arg.pass_by_reference = true;
948 return true;
950 return false;
953 /* Return true if ARG, which is passed by reference, should be callee
954 copied instead of caller copied. */
956 bool
957 reference_callee_copied (CUMULATIVE_ARGS *ca, const function_arg_info &arg)
959 if (arg.type && TREE_ADDRESSABLE (arg.type))
960 return false;
961 return targetm.calls.callee_copies (pack_cumulative_args (ca), arg);
965 /* Precompute all register parameters as described by ARGS, storing values
966 into fields within the ARGS array.
968 NUM_ACTUALS indicates the total number elements in the ARGS array.
970 Set REG_PARM_SEEN if we encounter a register parameter. */
972 static void
973 precompute_register_parameters (int num_actuals, struct arg_data *args,
974 int *reg_parm_seen)
976 int i;
978 *reg_parm_seen = 0;
980 for (i = 0; i < num_actuals; i++)
981 if (args[i].reg != 0 && ! args[i].pass_on_stack)
983 *reg_parm_seen = 1;
985 if (args[i].value == 0)
987 push_temp_slots ();
988 args[i].value = expand_normal (args[i].tree_value);
989 preserve_temp_slots (args[i].value);
990 pop_temp_slots ();
993 /* If we are to promote the function arg to a wider mode,
994 do it now. */
996 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
997 args[i].value
998 = convert_modes (args[i].mode,
999 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1000 args[i].value, args[i].unsignedp);
1002 /* If the value is a non-legitimate constant, force it into a
1003 pseudo now. TLS symbols sometimes need a call to resolve. */
1004 if (CONSTANT_P (args[i].value)
1005 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
1006 args[i].value = force_reg (args[i].mode, args[i].value);
1008 /* If we're going to have to load the value by parts, pull the
1009 parts into pseudos. The part extraction process can involve
1010 non-trivial computation. */
1011 if (GET_CODE (args[i].reg) == PARALLEL)
1013 tree type = TREE_TYPE (args[i].tree_value);
1014 args[i].parallel_value
1015 = emit_group_load_into_temps (args[i].reg, args[i].value,
1016 type, int_size_in_bytes (type));
1019 /* If the value is expensive, and we are inside an appropriately
1020 short loop, put the value into a pseudo and then put the pseudo
1021 into the hard reg.
1023 For small register classes, also do this if this call uses
1024 register parameters. This is to avoid reload conflicts while
1025 loading the parameters registers. */
1027 else if ((! (REG_P (args[i].value)
1028 || (GET_CODE (args[i].value) == SUBREG
1029 && REG_P (SUBREG_REG (args[i].value)))))
1030 && args[i].mode != BLKmode
1031 && (set_src_cost (args[i].value, args[i].mode,
1032 optimize_insn_for_speed_p ())
1033 > COSTS_N_INSNS (1))
1034 && ((*reg_parm_seen
1035 && targetm.small_register_classes_for_mode_p (args[i].mode))
1036 || optimize))
1037 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1041 #ifdef REG_PARM_STACK_SPACE
1043 /* The argument list is the property of the called routine and it
1044 may clobber it. If the fixed area has been used for previous
1045 parameters, we must save and restore it. */
1047 static rtx
1048 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
1050 unsigned int low;
1051 unsigned int high;
1053 /* Compute the boundary of the area that needs to be saved, if any. */
1054 high = reg_parm_stack_space;
1055 if (ARGS_GROW_DOWNWARD)
1056 high += 1;
1058 if (high > highest_outgoing_arg_in_use)
1059 high = highest_outgoing_arg_in_use;
1061 for (low = 0; low < high; low++)
1062 if (stack_usage_map[low] != 0 || low >= stack_usage_watermark)
1064 int num_to_save;
1065 machine_mode save_mode;
1066 int delta;
1067 rtx addr;
1068 rtx stack_area;
1069 rtx save_area;
1071 while (stack_usage_map[--high] == 0)
1074 *low_to_save = low;
1075 *high_to_save = high;
1077 num_to_save = high - low + 1;
1079 /* If we don't have the required alignment, must do this
1080 in BLKmode. */
1081 scalar_int_mode imode;
1082 if (int_mode_for_size (num_to_save * BITS_PER_UNIT, 1).exists (&imode)
1083 && (low & (MIN (GET_MODE_SIZE (imode),
1084 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
1085 save_mode = imode;
1086 else
1087 save_mode = BLKmode;
1089 if (ARGS_GROW_DOWNWARD)
1090 delta = -high;
1091 else
1092 delta = low;
1094 addr = plus_constant (Pmode, argblock, delta);
1095 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1097 set_mem_align (stack_area, PARM_BOUNDARY);
1098 if (save_mode == BLKmode)
1100 save_area = assign_stack_temp (BLKmode, num_to_save);
1101 emit_block_move (validize_mem (save_area), stack_area,
1102 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1104 else
1106 save_area = gen_reg_rtx (save_mode);
1107 emit_move_insn (save_area, stack_area);
1110 return save_area;
1113 return NULL_RTX;
1116 static void
1117 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
1119 machine_mode save_mode = GET_MODE (save_area);
1120 int delta;
1121 rtx addr, stack_area;
1123 if (ARGS_GROW_DOWNWARD)
1124 delta = -high_to_save;
1125 else
1126 delta = low_to_save;
1128 addr = plus_constant (Pmode, argblock, delta);
1129 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1130 set_mem_align (stack_area, PARM_BOUNDARY);
1132 if (save_mode != BLKmode)
1133 emit_move_insn (stack_area, save_area);
1134 else
1135 emit_block_move (stack_area, validize_mem (save_area),
1136 GEN_INT (high_to_save - low_to_save + 1),
1137 BLOCK_OP_CALL_PARM);
1139 #endif /* REG_PARM_STACK_SPACE */
1141 /* If any elements in ARGS refer to parameters that are to be passed in
1142 registers, but not in memory, and whose alignment does not permit a
1143 direct copy into registers. Copy the values into a group of pseudos
1144 which we will later copy into the appropriate hard registers.
1146 Pseudos for each unaligned argument will be stored into the array
1147 args[argnum].aligned_regs. The caller is responsible for deallocating
1148 the aligned_regs array if it is nonzero. */
1150 static void
1151 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
1153 int i, j;
1155 for (i = 0; i < num_actuals; i++)
1156 if (args[i].reg != 0 && ! args[i].pass_on_stack
1157 && GET_CODE (args[i].reg) != PARALLEL
1158 && args[i].mode == BLKmode
1159 && MEM_P (args[i].value)
1160 && (MEM_ALIGN (args[i].value)
1161 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1163 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1164 int endian_correction = 0;
1166 if (args[i].partial)
1168 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1169 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1171 else
1173 args[i].n_aligned_regs
1174 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1177 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
1179 /* Structures smaller than a word are normally aligned to the
1180 least significant byte. On a BYTES_BIG_ENDIAN machine,
1181 this means we must skip the empty high order bytes when
1182 calculating the bit offset. */
1183 if (bytes < UNITS_PER_WORD
1184 #ifdef BLOCK_REG_PADDING
1185 && (BLOCK_REG_PADDING (args[i].mode,
1186 TREE_TYPE (args[i].tree_value), 1)
1187 == PAD_DOWNWARD)
1188 #else
1189 && BYTES_BIG_ENDIAN
1190 #endif
1192 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
1194 for (j = 0; j < args[i].n_aligned_regs; j++)
1196 rtx reg = gen_reg_rtx (word_mode);
1197 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1198 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1200 args[i].aligned_regs[j] = reg;
1201 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1202 word_mode, word_mode, false, NULL);
1204 /* There is no need to restrict this code to loading items
1205 in TYPE_ALIGN sized hunks. The bitfield instructions can
1206 load up entire word sized registers efficiently.
1208 ??? This may not be needed anymore.
1209 We use to emit a clobber here but that doesn't let later
1210 passes optimize the instructions we emit. By storing 0 into
1211 the register later passes know the first AND to zero out the
1212 bitfield being set in the register is unnecessary. The store
1213 of 0 will be deleted as will at least the first AND. */
1215 emit_move_insn (reg, const0_rtx);
1217 bytes -= bitsize / BITS_PER_UNIT;
1218 store_bit_field (reg, bitsize, endian_correction, 0, 0,
1219 word_mode, word, false);
1224 /* The limit set by -Walloc-larger-than=. */
1225 static GTY(()) tree alloc_object_size_limit;
1227 /* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
1228 setting if the option is specified, or to the maximum object size if it
1229 is not. Return the initialized value. */
1231 static tree
1232 alloc_max_size (void)
1234 if (alloc_object_size_limit)
1235 return alloc_object_size_limit;
1237 HOST_WIDE_INT limit = warn_alloc_size_limit;
1238 if (limit == HOST_WIDE_INT_MAX)
1239 limit = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
1241 alloc_object_size_limit = build_int_cst (size_type_node, limit);
1243 return alloc_object_size_limit;
1246 /* Return true when EXP's range can be determined and set RANGE[] to it
1247 after adjusting it if necessary to make EXP a represents a valid size
1248 of object, or a valid size argument to an allocation function declared
1249 with attribute alloc_size (whose argument may be signed), or to a string
1250 manipulation function like memset.
1251 When ALLOW_ZERO is set in FLAGS, allow returning a range of [0, 0] for
1252 a size in an anti-range [1, N] where N > PTRDIFF_MAX. A zero range is
1253 a (nearly) invalid argument to allocation functions like malloc but it
1254 is a valid argument to functions like memset.
1255 When USE_LARGEST is set in FLAGS set RANGE to the largest valid subrange
1256 in a multi-range, otherwise to the smallest valid subrange. */
1258 bool
1259 get_size_range (range_query *query, tree exp, gimple *stmt, tree range[2],
1260 int flags /* = 0 */)
1262 if (!exp)
1263 return false;
1265 if (tree_fits_uhwi_p (exp))
1267 /* EXP is a constant. */
1268 range[0] = range[1] = exp;
1269 return true;
1272 tree exptype = TREE_TYPE (exp);
1273 bool integral = INTEGRAL_TYPE_P (exptype);
1275 wide_int min, max;
1276 enum value_range_kind range_type;
1278 if (integral)
1280 value_range vr;
1281 if (query && query->range_of_expr (vr, exp, stmt))
1283 if (vr.undefined_p ())
1284 vr.set_varying (TREE_TYPE (exp));
1285 range_type = vr.kind ();
1286 min = wi::to_wide (vr.min ());
1287 max = wi::to_wide (vr.max ());
1289 else
1290 range_type = determine_value_range (exp, &min, &max);
1292 else
1293 range_type = VR_VARYING;
1295 if (range_type == VR_VARYING)
1297 if (integral)
1299 /* Use the full range of the type of the expression when
1300 no value range information is available. */
1301 range[0] = TYPE_MIN_VALUE (exptype);
1302 range[1] = TYPE_MAX_VALUE (exptype);
1303 return true;
1306 range[0] = NULL_TREE;
1307 range[1] = NULL_TREE;
1308 return false;
1311 unsigned expprec = TYPE_PRECISION (exptype);
1313 bool signed_p = !TYPE_UNSIGNED (exptype);
1315 if (range_type == VR_ANTI_RANGE)
1317 if (signed_p)
1319 if (wi::les_p (max, 0))
1321 /* EXP is not in a strictly negative range. That means
1322 it must be in some (not necessarily strictly) positive
1323 range which includes zero. Since in signed to unsigned
1324 conversions negative values end up converted to large
1325 positive values, and otherwise they are not valid sizes,
1326 the resulting range is in both cases [0, TYPE_MAX]. */
1327 min = wi::zero (expprec);
1328 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1330 else if (wi::les_p (min - 1, 0))
1332 /* EXP is not in a negative-positive range. That means EXP
1333 is either negative, or greater than max. Since negative
1334 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
1335 min = max + 1;
1336 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1338 else
1340 max = min - 1;
1341 min = wi::zero (expprec);
1344 else
1346 wide_int maxsize = wi::to_wide (max_object_size ());
1347 min = wide_int::from (min, maxsize.get_precision (), UNSIGNED);
1348 max = wide_int::from (max, maxsize.get_precision (), UNSIGNED);
1349 if (wi::eq_p (0, min - 1))
1351 /* EXP is unsigned and not in the range [1, MAX]. That means
1352 it's either zero or greater than MAX. Even though 0 would
1353 normally be detected by -Walloc-zero, unless ALLOW_ZERO
1354 is set, set the range to [MAX, TYPE_MAX] so that when MAX
1355 is greater than the limit the whole range is diagnosed. */
1356 wide_int maxsize = wi::to_wide (max_object_size ());
1357 if (flags & SR_ALLOW_ZERO)
1359 if (wi::leu_p (maxsize, max + 1)
1360 || !(flags & SR_USE_LARGEST))
1361 min = max = wi::zero (expprec);
1362 else
1364 min = max + 1;
1365 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1368 else
1370 min = max + 1;
1371 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1374 else if ((flags & SR_USE_LARGEST)
1375 && wi::ltu_p (max + 1, maxsize))
1377 /* When USE_LARGEST is set and the larger of the two subranges
1378 is a valid size, use it... */
1379 min = max + 1;
1380 max = maxsize;
1382 else
1384 /* ...otherwise use the smaller subrange. */
1385 max = min - 1;
1386 min = wi::zero (expprec);
1391 range[0] = wide_int_to_tree (exptype, min);
1392 range[1] = wide_int_to_tree (exptype, max);
1394 return true;
1397 bool
1398 get_size_range (tree exp, tree range[2], int flags /* = 0 */)
1400 return get_size_range (/*query=*/NULL, exp, /*stmt=*/NULL, range, flags);
1403 /* Diagnose a call EXP to function FN decorated with attribute alloc_size
1404 whose argument numbers given by IDX with values given by ARGS exceed
1405 the maximum object size or cause an unsigned oveflow (wrapping) when
1406 multiplied. FN is null when EXP is a call via a function pointer.
1407 When ARGS[0] is null the function does nothing. ARGS[1] may be null
1408 for functions like malloc, and non-null for those like calloc that
1409 are decorated with a two-argument attribute alloc_size. */
1411 void
1412 maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
1414 /* The range each of the (up to) two arguments is known to be in. */
1415 tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } };
1417 /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */
1418 tree maxobjsize = alloc_max_size ();
1420 location_t loc = EXPR_LOCATION (exp);
1422 tree fntype = fn ? TREE_TYPE (fn) : TREE_TYPE (TREE_TYPE (exp));
1423 bool warned = false;
1425 /* Validate each argument individually. */
1426 for (unsigned i = 0; i != 2 && args[i]; ++i)
1428 if (TREE_CODE (args[i]) == INTEGER_CST)
1430 argrange[i][0] = args[i];
1431 argrange[i][1] = args[i];
1433 if (tree_int_cst_lt (args[i], integer_zero_node))
1435 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1436 "%Kargument %i value %qE is negative",
1437 exp, idx[i] + 1, args[i]);
1439 else if (integer_zerop (args[i]))
1441 /* Avoid issuing -Walloc-zero for allocation functions other
1442 than __builtin_alloca that are declared with attribute
1443 returns_nonnull because there's no portability risk. This
1444 avoids warning for such calls to libiberty's xmalloc and
1445 friends.
1446 Also avoid issuing the warning for calls to function named
1447 "alloca". */
1448 if (fn && fndecl_built_in_p (fn, BUILT_IN_ALLOCA)
1449 ? IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6
1450 : !lookup_attribute ("returns_nonnull",
1451 TYPE_ATTRIBUTES (fntype)))
1452 warned = warning_at (loc, OPT_Walloc_zero,
1453 "%Kargument %i value is zero",
1454 exp, idx[i] + 1);
1456 else if (tree_int_cst_lt (maxobjsize, args[i]))
1458 /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
1459 mode and with -fno-exceptions as a way to indicate array
1460 size overflow. There's no good way to detect C++98 here
1461 so avoid diagnosing these calls for all C++ modes. */
1462 if (i == 0
1463 && fn
1464 && !args[1]
1465 && lang_GNU_CXX ()
1466 && DECL_IS_OPERATOR_NEW_P (fn)
1467 && integer_all_onesp (args[i]))
1468 continue;
1470 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1471 "%Kargument %i value %qE exceeds "
1472 "maximum object size %E",
1473 exp, idx[i] + 1, args[i], maxobjsize);
1476 else if (TREE_CODE (args[i]) == SSA_NAME
1477 && get_size_range (args[i], argrange[i]))
1479 /* Verify that the argument's range is not negative (including
1480 upper bound of zero). */
1481 if (tree_int_cst_lt (argrange[i][0], integer_zero_node)
1482 && tree_int_cst_le (argrange[i][1], integer_zero_node))
1484 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1485 "%Kargument %i range [%E, %E] is negative",
1486 exp, idx[i] + 1,
1487 argrange[i][0], argrange[i][1]);
1489 else if (tree_int_cst_lt (maxobjsize, argrange[i][0]))
1491 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1492 "%Kargument %i range [%E, %E] exceeds "
1493 "maximum object size %E",
1494 exp, idx[i] + 1,
1495 argrange[i][0], argrange[i][1],
1496 maxobjsize);
1501 if (!argrange[0])
1502 return;
1504 /* For a two-argument alloc_size, validate the product of the two
1505 arguments if both of their values or ranges are known. */
1506 if (!warned && tree_fits_uhwi_p (argrange[0][0])
1507 && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0])
1508 && !integer_onep (argrange[0][0])
1509 && !integer_onep (argrange[1][0]))
1511 /* Check for overflow in the product of a function decorated with
1512 attribute alloc_size (X, Y). */
1513 unsigned szprec = TYPE_PRECISION (size_type_node);
1514 wide_int x = wi::to_wide (argrange[0][0], szprec);
1515 wide_int y = wi::to_wide (argrange[1][0], szprec);
1517 wi::overflow_type vflow;
1518 wide_int prod = wi::umul (x, y, &vflow);
1520 if (vflow)
1521 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1522 "%Kproduct %<%E * %E%> of arguments %i and %i "
1523 "exceeds %<SIZE_MAX%>",
1524 exp, argrange[0][0], argrange[1][0],
1525 idx[0] + 1, idx[1] + 1);
1526 else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod))
1527 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1528 "%Kproduct %<%E * %E%> of arguments %i and %i "
1529 "exceeds maximum object size %E",
1530 exp, argrange[0][0], argrange[1][0],
1531 idx[0] + 1, idx[1] + 1,
1532 maxobjsize);
1534 if (warned)
1536 /* Print the full range of each of the two arguments to make
1537 it clear when it is, in fact, in a range and not constant. */
1538 if (argrange[0][0] != argrange [0][1])
1539 inform (loc, "argument %i in the range [%E, %E]",
1540 idx[0] + 1, argrange[0][0], argrange[0][1]);
1541 if (argrange[1][0] != argrange [1][1])
1542 inform (loc, "argument %i in the range [%E, %E]",
1543 idx[1] + 1, argrange[1][0], argrange[1][1]);
1547 if (warned && fn)
1549 location_t fnloc = DECL_SOURCE_LOCATION (fn);
1551 if (DECL_IS_BUILTIN (fn))
1552 inform (loc,
1553 "in a call to built-in allocation function %qD", fn);
1554 else
1555 inform (fnloc,
1556 "in a call to allocation function %qD declared here", fn);
1560 /* If EXPR refers to a character array or pointer declared attribute
1561 nonstring return a decl for that array or pointer and set *REF to
1562 the referenced enclosing object or pointer. Otherwise returns
1563 null. */
1565 tree
1566 get_attr_nonstring_decl (tree expr, tree *ref)
1568 tree decl = expr;
1569 tree var = NULL_TREE;
1570 if (TREE_CODE (decl) == SSA_NAME)
1572 gimple *def = SSA_NAME_DEF_STMT (decl);
1574 if (is_gimple_assign (def))
1576 tree_code code = gimple_assign_rhs_code (def);
1577 if (code == ADDR_EXPR
1578 || code == COMPONENT_REF
1579 || code == VAR_DECL)
1580 decl = gimple_assign_rhs1 (def);
1582 else
1583 var = SSA_NAME_VAR (decl);
1586 if (TREE_CODE (decl) == ADDR_EXPR)
1587 decl = TREE_OPERAND (decl, 0);
1589 /* To simplify calling code, store the referenced DECL regardless of
1590 the attribute determined below, but avoid storing the SSA_NAME_VAR
1591 obtained above (it's not useful for dataflow purposes). */
1592 if (ref)
1593 *ref = decl;
1595 /* Use the SSA_NAME_VAR that was determined above to see if it's
1596 declared nonstring. Otherwise drill down into the referenced
1597 DECL. */
1598 if (var)
1599 decl = var;
1600 else if (TREE_CODE (decl) == ARRAY_REF)
1601 decl = TREE_OPERAND (decl, 0);
1602 else if (TREE_CODE (decl) == COMPONENT_REF)
1603 decl = TREE_OPERAND (decl, 1);
1604 else if (TREE_CODE (decl) == MEM_REF)
1605 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
1607 if (DECL_P (decl)
1608 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
1609 return decl;
1611 return NULL_TREE;
1614 /* Warn about passing a non-string array/pointer to a built-in function
1615 that expects a nul-terminated string argument. Returns true if
1616 a warning has been issued.*/
1618 bool
1619 maybe_warn_nonstring_arg (tree fndecl, tree exp)
1621 if (!fndecl || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1622 return false;
1624 if (TREE_NO_WARNING (exp) || !warn_stringop_overread)
1625 return false;
1627 /* Avoid clearly invalid calls (more checking done below). */
1628 unsigned nargs = call_expr_nargs (exp);
1629 if (!nargs)
1630 return false;
1632 /* The bound argument to a bounded string function like strncpy. */
1633 tree bound = NULL_TREE;
1635 /* The longest known or possible string argument to one of the comparison
1636 functions. If the length is less than the bound it is used instead.
1637 Since the length is only used for warning and not for code generation
1638 disable strict mode in the calls to get_range_strlen below. */
1639 tree maxlen = NULL_TREE;
1641 /* It's safe to call "bounded" string functions with a non-string
1642 argument since the functions provide an explicit bound for this
1643 purpose. The exception is strncat where the bound may refer to
1644 either the destination or the source. */
1645 int fncode = DECL_FUNCTION_CODE (fndecl);
1646 switch (fncode)
1648 case BUILT_IN_STRCMP:
1649 case BUILT_IN_STRNCMP:
1650 case BUILT_IN_STRNCASECMP:
1652 /* For these, if one argument refers to one or more of a set
1653 of string constants or arrays of known size, determine
1654 the range of their known or possible lengths and use it
1655 conservatively as the bound for the unbounded function,
1656 and to adjust the range of the bound of the bounded ones. */
1657 for (unsigned argno = 0;
1658 argno < MIN (nargs, 2)
1659 && !(maxlen && TREE_CODE (maxlen) == INTEGER_CST); argno++)
1661 tree arg = CALL_EXPR_ARG (exp, argno);
1662 if (!get_attr_nonstring_decl (arg))
1664 c_strlen_data lendata = { };
1665 /* Set MAXBOUND to an arbitrary non-null non-integer
1666 node as a request to have it set to the length of
1667 the longest string in a PHI. */
1668 lendata.maxbound = arg;
1669 get_range_strlen (arg, &lendata, /* eltsize = */ 1);
1670 maxlen = lendata.maxbound;
1674 /* Fall through. */
1676 case BUILT_IN_STRNCAT:
1677 case BUILT_IN_STPNCPY:
1678 case BUILT_IN_STRNCPY:
1679 if (nargs > 2)
1680 bound = CALL_EXPR_ARG (exp, 2);
1681 break;
1683 case BUILT_IN_STRNDUP:
1684 if (nargs > 1)
1685 bound = CALL_EXPR_ARG (exp, 1);
1686 break;
1688 case BUILT_IN_STRNLEN:
1690 tree arg = CALL_EXPR_ARG (exp, 0);
1691 if (!get_attr_nonstring_decl (arg))
1693 c_strlen_data lendata = { };
1694 /* Set MAXBOUND to an arbitrary non-null non-integer
1695 node as a request to have it set to the length of
1696 the longest string in a PHI. */
1697 lendata.maxbound = arg;
1698 get_range_strlen (arg, &lendata, /* eltsize = */ 1);
1699 maxlen = lendata.maxbound;
1701 if (nargs > 1)
1702 bound = CALL_EXPR_ARG (exp, 1);
1703 break;
1706 default:
1707 break;
1710 /* Determine the range of the bound argument (if specified). */
1711 tree bndrng[2] = { NULL_TREE, NULL_TREE };
1712 if (bound)
1714 STRIP_NOPS (bound);
1715 get_size_range (bound, bndrng);
1718 location_t loc = EXPR_LOCATION (exp);
1720 if (bndrng[0])
1722 /* Diagnose excessive bound prior to the adjustment below and
1723 regardless of attribute nonstring. */
1724 tree maxobjsize = max_object_size ();
1725 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
1727 bool warned = false;
1728 if (tree_int_cst_equal (bndrng[0], bndrng[1]))
1729 warned = warning_at (loc, OPT_Wstringop_overread,
1730 "%K%qD specified bound %E "
1731 "exceeds maximum object size %E",
1732 exp, fndecl, bndrng[0], maxobjsize);
1733 else
1734 warned = warning_at (loc, OPT_Wstringop_overread,
1735 "%K%qD specified bound [%E, %E] "
1736 "exceeds maximum object size %E",
1737 exp, fndecl, bndrng[0], bndrng[1],
1738 maxobjsize);
1739 if (warned)
1740 TREE_NO_WARNING (exp) = true;
1742 return warned;
1746 if (maxlen && !integer_all_onesp (maxlen))
1748 /* Add one for the nul. */
1749 maxlen = const_binop (PLUS_EXPR, TREE_TYPE (maxlen), maxlen,
1750 size_one_node);
1752 if (!bndrng[0])
1754 /* Conservatively use the upper bound of the lengths for
1755 both the lower and the upper bound of the operation. */
1756 bndrng[0] = maxlen;
1757 bndrng[1] = maxlen;
1758 bound = void_type_node;
1760 else if (maxlen)
1762 /* Replace the bound on the operation with the upper bound
1763 of the length of the string if the latter is smaller. */
1764 if (tree_int_cst_lt (maxlen, bndrng[0]))
1765 bndrng[0] = maxlen;
1766 else if (tree_int_cst_lt (maxlen, bndrng[1]))
1767 bndrng[1] = maxlen;
1771 bool any_arg_warned = false;
1772 /* Iterate over the built-in function's formal arguments and check
1773 each const char* against the actual argument. If the actual
1774 argument is declared attribute non-string issue a warning unless
1775 the argument's maximum length is bounded. */
1776 function_args_iterator it;
1777 function_args_iter_init (&it, TREE_TYPE (fndecl));
1779 for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it))
1781 /* Avoid iterating past the declared argument in a call
1782 to function declared without a prototype. */
1783 if (argno >= nargs)
1784 break;
1786 tree argtype = function_args_iter_cond (&it);
1787 if (!argtype)
1788 break;
1790 if (TREE_CODE (argtype) != POINTER_TYPE)
1791 continue;
1793 argtype = TREE_TYPE (argtype);
1795 if (TREE_CODE (argtype) != INTEGER_TYPE
1796 || !TYPE_READONLY (argtype))
1797 continue;
1799 argtype = TYPE_MAIN_VARIANT (argtype);
1800 if (argtype != char_type_node)
1801 continue;
1803 tree callarg = CALL_EXPR_ARG (exp, argno);
1804 if (TREE_CODE (callarg) == ADDR_EXPR)
1805 callarg = TREE_OPERAND (callarg, 0);
1807 /* See if the destination is declared with attribute "nonstring". */
1808 tree decl = get_attr_nonstring_decl (callarg);
1809 if (!decl)
1810 continue;
1812 /* The maximum number of array elements accessed. */
1813 offset_int wibnd = 0;
1815 if (argno && fncode == BUILT_IN_STRNCAT)
1817 /* See if the bound in strncat is derived from the length
1818 of the strlen of the destination (as it's expected to be).
1819 If so, reset BOUND and FNCODE to trigger a warning. */
1820 tree dstarg = CALL_EXPR_ARG (exp, 0);
1821 if (is_strlen_related_p (dstarg, bound))
1823 /* The bound applies to the destination, not to the source,
1824 so reset these to trigger a warning without mentioning
1825 the bound. */
1826 bound = NULL;
1827 fncode = 0;
1829 else if (bndrng[1])
1830 /* Use the upper bound of the range for strncat. */
1831 wibnd = wi::to_offset (bndrng[1]);
1833 else if (bndrng[0])
1834 /* Use the lower bound of the range for functions other than
1835 strncat. */
1836 wibnd = wi::to_offset (bndrng[0]);
1838 /* Determine the size of the argument array if it is one. */
1839 offset_int asize = wibnd;
1840 bool known_size = false;
1841 tree type = TREE_TYPE (decl);
1843 /* Determine the array size. For arrays of unknown bound and
1844 pointers reset BOUND to trigger the appropriate warning. */
1845 if (TREE_CODE (type) == ARRAY_TYPE)
1847 if (tree arrbnd = TYPE_DOMAIN (type))
1849 if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
1851 asize = wi::to_offset (arrbnd) + 1;
1852 known_size = true;
1855 else if (bound == void_type_node)
1856 bound = NULL_TREE;
1858 else if (bound == void_type_node)
1859 bound = NULL_TREE;
1861 /* In a call to strncat with a bound in a range whose lower but
1862 not upper bound is less than the array size, reset ASIZE to
1863 be the same as the bound and the other variable to trigger
1864 the apprpriate warning below. */
1865 if (fncode == BUILT_IN_STRNCAT
1866 && bndrng[0] != bndrng[1]
1867 && wi::ltu_p (wi::to_offset (bndrng[0]), asize)
1868 && (!known_size
1869 || wi::ltu_p (asize, wibnd)))
1871 asize = wibnd;
1872 bound = NULL_TREE;
1873 fncode = 0;
1876 bool warned = false;
1878 auto_diagnostic_group d;
1879 if (wi::ltu_p (asize, wibnd))
1881 if (bndrng[0] == bndrng[1])
1882 warned = warning_at (loc, OPT_Wstringop_overread,
1883 "%qD argument %i declared attribute "
1884 "%<nonstring%> is smaller than the specified "
1885 "bound %wu",
1886 fndecl, argno + 1, wibnd.to_uhwi ());
1887 else if (wi::ltu_p (asize, wi::to_offset (bndrng[0])))
1888 warned = warning_at (loc, OPT_Wstringop_overread,
1889 "%qD argument %i declared attribute "
1890 "%<nonstring%> is smaller than "
1891 "the specified bound [%E, %E]",
1892 fndecl, argno + 1, bndrng[0], bndrng[1]);
1893 else
1894 warned = warning_at (loc, OPT_Wstringop_overread,
1895 "%qD argument %i declared attribute "
1896 "%<nonstring%> may be smaller than "
1897 "the specified bound [%E, %E]",
1898 fndecl, argno + 1, bndrng[0], bndrng[1]);
1900 else if (fncode == BUILT_IN_STRNCAT)
1901 ; /* Avoid warning for calls to strncat() when the bound
1902 is equal to the size of the non-string argument. */
1903 else if (!bound)
1904 warned = warning_at (loc, OPT_Wstringop_overread,
1905 "%qD argument %i declared attribute %<nonstring%>",
1906 fndecl, argno + 1);
1908 if (warned)
1910 inform (DECL_SOURCE_LOCATION (decl),
1911 "argument %qD declared here", decl);
1912 any_arg_warned = true;
1916 if (any_arg_warned)
1917 TREE_NO_WARNING (exp) = true;
1919 return any_arg_warned;
1922 /* Issue an error if CALL_EXPR was flagged as requiring
1923 tall-call optimization. */
1925 static void
1926 maybe_complain_about_tail_call (tree call_expr, const char *reason)
1928 gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1929 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1930 return;
1932 error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1935 /* Returns the type of the argument ARGNO to function with type FNTYPE
1936 or null when the typoe cannot be determined or no such argument exists. */
1938 static tree
1939 fntype_argno_type (tree fntype, unsigned argno)
1941 if (!prototype_p (fntype))
1942 return NULL_TREE;
1944 tree argtype;
1945 function_args_iterator it;
1946 FOREACH_FUNCTION_ARGS (fntype, argtype, it)
1947 if (argno-- == 0)
1948 return argtype;
1950 return NULL_TREE;
1953 /* Helper to append the "human readable" attribute access specification
1954 described by ACCESS to the array ATTRSTR with size STRSIZE. Used in
1955 diagnostics. */
1957 static inline void
1958 append_attrname (const std::pair<int, attr_access> &access,
1959 char *attrstr, size_t strsize)
1961 if (access.second.internal_p)
1962 return;
1964 tree str = access.second.to_external_string ();
1965 gcc_assert (strsize >= (size_t) TREE_STRING_LENGTH (str));
1966 strcpy (attrstr, TREE_STRING_POINTER (str));
1969 /* Iterate over attribute access read-only, read-write, and write-only
1970 arguments and diagnose past-the-end accesses and related problems
1971 in the function call EXP. */
1973 static void
1974 maybe_warn_rdwr_sizes (rdwr_map *rwm, tree fndecl, tree fntype, tree exp)
1976 auto_diagnostic_group adg;
1978 /* Set if a warning has been issued for any argument (used to decide
1979 whether to emit an informational note at the end). */
1980 bool any_warned = false;
1982 /* A string describing the attributes that the warnings issued by this
1983 function apply to. Used to print one informational note per function
1984 call, rather than one per warning. That reduces clutter. */
1985 char attrstr[80];
1986 attrstr[0] = 0;
1988 for (rdwr_map::iterator it = rwm->begin (); it != rwm->end (); ++it)
1990 std::pair<int, attr_access> access = *it;
1992 /* Get the function call arguments corresponding to the attribute's
1993 positional arguments. When both arguments have been specified
1994 there will be two entries in *RWM, one for each. They are
1995 cross-referenced by their respective argument numbers in
1996 ACCESS.PTRARG and ACCESS.SIZARG. */
1997 const int ptridx = access.second.ptrarg;
1998 const int sizidx = access.second.sizarg;
2000 gcc_assert (ptridx != -1);
2001 gcc_assert (access.first == ptridx || access.first == sizidx);
2003 /* The pointer is set to null for the entry corresponding to
2004 the size argument. Skip it. It's handled when the entry
2005 corresponding to the pointer argument comes up. */
2006 if (!access.second.ptr)
2007 continue;
2009 tree ptrtype = fntype_argno_type (fntype, ptridx);
2010 tree argtype = TREE_TYPE (ptrtype);
2012 /* The size of the access by the call. */
2013 tree access_size;
2014 if (sizidx == -1)
2016 /* If only the pointer attribute operand was specified and
2017 not size, set SIZE to the greater of MINSIZE or size of
2018 one element of the pointed to type to detect smaller
2019 objects (null pointers are diagnosed in this case only
2020 if the pointer is also declared with attribute nonnull. */
2021 if (access.second.minsize
2022 && access.second.minsize != HOST_WIDE_INT_M1U)
2023 access_size = build_int_cstu (sizetype, access.second.minsize);
2024 else
2025 access_size = size_one_node;
2027 else
2028 access_size = rwm->get (sizidx)->size;
2030 /* Format the value or range to avoid an explosion of messages. */
2031 char sizstr[80];
2032 tree sizrng[2] = { size_zero_node, build_all_ones_cst (sizetype) };
2033 if (get_size_range (access_size, sizrng, true))
2035 const char *s0 = print_generic_expr_to_str (sizrng[0]);
2036 if (tree_int_cst_equal (sizrng[0], sizrng[1]))
2038 gcc_checking_assert (strlen (s0) < sizeof sizstr);
2039 strcpy (sizstr, s0);
2041 else
2043 const char *s1 = print_generic_expr_to_str (sizrng[1]);
2044 gcc_checking_assert (strlen (s0) + strlen (s1)
2045 < sizeof sizstr - 4);
2046 sprintf (sizstr, "[%s, %s]", s0, s1);
2049 else
2050 *sizstr = '\0';
2052 /* Set if a warning has been issued for the current argument. */
2053 bool arg_warned = false;
2054 location_t loc = EXPR_LOCATION (exp);
2055 tree ptr = access.second.ptr;
2056 if (*sizstr
2057 && tree_int_cst_sgn (sizrng[0]) < 0
2058 && tree_int_cst_sgn (sizrng[1]) < 0)
2060 /* Warn about negative sizes. */
2061 if (access.second.internal_p)
2063 const std::string argtypestr
2064 = access.second.array_as_string (ptrtype);
2066 arg_warned = warning_at (loc, OPT_Wstringop_overflow_,
2067 "%Kbound argument %i value %s is "
2068 "negative for a variable length array "
2069 "argument %i of type %s",
2070 exp, sizidx + 1, sizstr,
2071 ptridx + 1, argtypestr.c_str ());
2073 else
2074 arg_warned = warning_at (loc, OPT_Wstringop_overflow_,
2075 "%Kargument %i value %s is negative",
2076 exp, sizidx + 1, sizstr);
2078 if (arg_warned)
2080 append_attrname (access, attrstr, sizeof attrstr);
2081 /* Remember a warning has been issued and avoid warning
2082 again below for the same attribute. */
2083 any_warned = true;
2084 continue;
2088 if (tree_int_cst_sgn (sizrng[0]) >= 0)
2090 if (COMPLETE_TYPE_P (argtype))
2092 /* Multiply ACCESS_SIZE by the size of the type the pointer
2093 argument points to. If it's incomplete the size is used
2094 as is. */
2095 if (tree argsize = TYPE_SIZE_UNIT (argtype))
2096 if (TREE_CODE (argsize) == INTEGER_CST)
2098 const int prec = TYPE_PRECISION (sizetype);
2099 wide_int minsize = wi::to_wide (sizrng[0], prec);
2100 minsize *= wi::to_wide (argsize, prec);
2101 access_size = wide_int_to_tree (sizetype, minsize);
2105 else
2106 access_size = NULL_TREE;
2108 if (integer_zerop (ptr))
2110 if (sizidx >= 0 && tree_int_cst_sgn (sizrng[0]) > 0)
2112 /* Warn about null pointers with positive sizes. This is
2113 different from also declaring the pointer argument with
2114 attribute nonnull when the function accepts null pointers
2115 only when the corresponding size is zero. */
2116 if (access.second.internal_p)
2118 const std::string argtypestr
2119 = access.second.array_as_string (ptrtype);
2121 arg_warned = warning_at (loc, OPT_Wnonnull,
2122 "%Kargument %i of variable length "
2123 "array %s is null but "
2124 "the corresponding bound argument "
2125 "%i value is %s",
2126 exp, sizidx + 1, argtypestr.c_str (),
2127 ptridx + 1, sizstr);
2129 else
2130 arg_warned = warning_at (loc, OPT_Wnonnull,
2131 "%Kargument %i is null but "
2132 "the corresponding size argument "
2133 "%i value is %s",
2134 exp, ptridx + 1, sizidx + 1,
2135 sizstr);
2137 else if (access_size && access.second.static_p)
2139 /* Warn about null pointers for [static N] array arguments
2140 but do not warn for ordinary (i.e., nonstatic) arrays. */
2141 arg_warned = warning_at (loc, OPT_Wnonnull,
2142 "%Kargument %i to %<%T[static %E]%> "
2143 "is null where non-null expected",
2144 exp, ptridx + 1, argtype,
2145 access_size);
2148 if (arg_warned)
2150 append_attrname (access, attrstr, sizeof attrstr);
2151 /* Remember a warning has been issued and avoid warning
2152 again below for the same attribute. */
2153 any_warned = true;
2154 continue;
2158 access_data data (ptr, access.second.mode, NULL_TREE, false,
2159 NULL_TREE, false);
2160 access_ref* const pobj = (access.second.mode == access_write_only
2161 ? &data.dst : &data.src);
2162 tree objsize = compute_objsize (ptr, 1, pobj);
2164 /* The size of the destination or source object. */
2165 tree dstsize = NULL_TREE, srcsize = NULL_TREE;
2166 if (access.second.mode == access_read_only
2167 || access.second.mode == access_none)
2169 /* For a read-only argument there is no destination. For
2170 no access, set the source as well and differentiate via
2171 the access flag below. */
2172 srcsize = objsize;
2173 if (access.second.mode == access_read_only
2174 || access.second.mode == access_none)
2176 /* For a read-only attribute there is no destination so
2177 clear OBJSIZE. This emits "reading N bytes" kind of
2178 diagnostics instead of the "writing N bytes" kind,
2179 unless MODE is none. */
2180 objsize = NULL_TREE;
2183 else
2184 dstsize = objsize;
2186 /* Clear the no-warning bit in case it was set by check_access
2187 in a prior iteration so that accesses via different arguments
2188 are diagnosed. */
2189 TREE_NO_WARNING (exp) = false;
2190 access_mode mode = data.mode;
2191 if (mode == access_deferred)
2192 mode = TYPE_READONLY (argtype) ? access_read_only : access_read_write;
2193 check_access (exp, access_size, /*maxread=*/ NULL_TREE, srcsize,
2194 dstsize, mode, &data);
2196 if (TREE_NO_WARNING (exp))
2198 any_warned = true;
2200 if (access.second.internal_p)
2201 inform (loc, "referencing argument %u of type %qT",
2202 ptridx + 1, ptrtype);
2203 else
2204 /* If check_access issued a warning above, append the relevant
2205 attribute to the string. */
2206 append_attrname (access, attrstr, sizeof attrstr);
2210 if (*attrstr)
2212 if (fndecl)
2213 inform (DECL_SOURCE_LOCATION (fndecl),
2214 "in a call to function %qD declared with attribute %qs",
2215 fndecl, attrstr);
2216 else
2217 inform (EXPR_LOCATION (fndecl),
2218 "in a call with type %qT and attribute %qs",
2219 fntype, attrstr);
2221 else if (any_warned)
2223 if (fndecl)
2224 inform (DECL_SOURCE_LOCATION (fndecl),
2225 "in a call to function %qD", fndecl);
2226 else
2227 inform (EXPR_LOCATION (fndecl),
2228 "in a call with type %qT", fntype);
2231 /* Set the bit in case if was cleared and not set above. */
2232 TREE_NO_WARNING (exp) = true;
2235 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
2236 CALL_EXPR EXP.
2238 NUM_ACTUALS is the total number of parameters.
2240 N_NAMED_ARGS is the total number of named arguments.
2242 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
2243 value, or null.
2245 FNDECL is the tree code for the target of this call (if known)
2247 ARGS_SO_FAR holds state needed by the target to know where to place
2248 the next argument.
2250 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
2251 for arguments which are passed in registers.
2253 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
2254 and may be modified by this routine.
2256 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
2257 flags which may be modified by this routine.
2259 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
2260 that requires allocation of stack space.
2262 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
2263 the thunked-to function. */
2265 static void
2266 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
2267 struct arg_data *args,
2268 struct args_size *args_size,
2269 int n_named_args ATTRIBUTE_UNUSED,
2270 tree exp, tree struct_value_addr_value,
2271 tree fndecl, tree fntype,
2272 cumulative_args_t args_so_far,
2273 int reg_parm_stack_space,
2274 rtx *old_stack_level,
2275 poly_int64_pod *old_pending_adj,
2276 int *must_preallocate, int *ecf_flags,
2277 bool *may_tailcall, bool call_from_thunk_p)
2279 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
2280 location_t loc = EXPR_LOCATION (exp);
2282 /* Count arg position in order args appear. */
2283 int argpos;
2285 int i;
2287 args_size->constant = 0;
2288 args_size->var = 0;
2290 bitmap_obstack_initialize (NULL);
2292 /* In this loop, we consider args in the order they are written.
2293 We fill up ARGS from the back. */
2295 i = num_actuals - 1;
2297 int j = i;
2298 call_expr_arg_iterator iter;
2299 tree arg;
2300 bitmap slots = NULL;
2302 if (struct_value_addr_value)
2304 args[j].tree_value = struct_value_addr_value;
2305 j--;
2307 argpos = 0;
2308 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2310 tree argtype = TREE_TYPE (arg);
2312 if (targetm.calls.split_complex_arg
2313 && argtype
2314 && TREE_CODE (argtype) == COMPLEX_TYPE
2315 && targetm.calls.split_complex_arg (argtype))
2317 tree subtype = TREE_TYPE (argtype);
2318 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
2319 j--;
2320 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
2322 else
2323 args[j].tree_value = arg;
2324 j--;
2325 argpos++;
2328 if (slots)
2329 BITMAP_FREE (slots);
2332 bitmap_obstack_release (NULL);
2334 tree fntypeattrs = TYPE_ATTRIBUTES (fntype);
2335 /* Extract attribute alloc_size from the type of the called expression
2336 (which could be a function or a function pointer) and if set, store
2337 the indices of the corresponding arguments in ALLOC_IDX, and then
2338 the actual argument(s) at those indices in ALLOC_ARGS. */
2339 int alloc_idx[2] = { -1, -1 };
2340 if (tree alloc_size = lookup_attribute ("alloc_size", fntypeattrs))
2342 tree args = TREE_VALUE (alloc_size);
2343 alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
2344 if (TREE_CHAIN (args))
2345 alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
2348 /* Array for up to the two attribute alloc_size arguments. */
2349 tree alloc_args[] = { NULL_TREE, NULL_TREE };
2351 /* Map of attribute accewss specifications for function arguments. */
2352 rdwr_map rdwr_idx;
2353 init_attr_rdwr_indices (&rdwr_idx, fntypeattrs);
2355 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
2356 for (argpos = 0; argpos < num_actuals; i--, argpos++)
2358 tree type = TREE_TYPE (args[i].tree_value);
2359 int unsignedp;
2361 /* Replace erroneous argument with constant zero. */
2362 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
2363 args[i].tree_value = integer_zero_node, type = integer_type_node;
2365 /* If TYPE is a transparent union or record, pass things the way
2366 we would pass the first field of the union or record. We have
2367 already verified that the modes are the same. */
2368 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
2369 type = TREE_TYPE (first_field (type));
2371 /* Decide where to pass this arg.
2373 args[i].reg is nonzero if all or part is passed in registers.
2375 args[i].partial is nonzero if part but not all is passed in registers,
2376 and the exact value says how many bytes are passed in registers.
2378 args[i].pass_on_stack is nonzero if the argument must at least be
2379 computed on the stack. It may then be loaded back into registers
2380 if args[i].reg is nonzero.
2382 These decisions are driven by the FUNCTION_... macros and must agree
2383 with those made by function.c. */
2385 /* See if this argument should be passed by invisible reference. */
2386 function_arg_info arg (type, argpos < n_named_args);
2387 if (pass_by_reference (args_so_far_pnt, arg))
2389 bool callee_copies;
2390 tree base = NULL_TREE;
2392 callee_copies = reference_callee_copied (args_so_far_pnt, arg);
2394 /* If we're compiling a thunk, pass through invisible references
2395 instead of making a copy. */
2396 if (call_from_thunk_p
2397 || (callee_copies
2398 && !TREE_ADDRESSABLE (type)
2399 && (base = get_base_address (args[i].tree_value))
2400 && TREE_CODE (base) != SSA_NAME
2401 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
2403 /* We may have turned the parameter value into an SSA name.
2404 Go back to the original parameter so we can take the
2405 address. */
2406 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
2408 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
2409 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
2410 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
2412 /* Argument setup code may have copied the value to register. We
2413 revert that optimization now because the tail call code must
2414 use the original location. */
2415 if (TREE_CODE (args[i].tree_value) == PARM_DECL
2416 && !MEM_P (DECL_RTL (args[i].tree_value))
2417 && DECL_INCOMING_RTL (args[i].tree_value)
2418 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
2419 set_decl_rtl (args[i].tree_value,
2420 DECL_INCOMING_RTL (args[i].tree_value));
2422 mark_addressable (args[i].tree_value);
2424 /* We can't use sibcalls if a callee-copied argument is
2425 stored in the current function's frame. */
2426 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
2428 *may_tailcall = false;
2429 maybe_complain_about_tail_call (exp,
2430 "a callee-copied argument is"
2431 " stored in the current"
2432 " function's frame");
2435 args[i].tree_value = build_fold_addr_expr_loc (loc,
2436 args[i].tree_value);
2437 type = TREE_TYPE (args[i].tree_value);
2439 if (*ecf_flags & ECF_CONST)
2440 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
2442 else
2444 /* We make a copy of the object and pass the address to the
2445 function being called. */
2446 rtx copy;
2448 if (!COMPLETE_TYPE_P (type)
2449 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2450 || (flag_stack_check == GENERIC_STACK_CHECK
2451 && compare_tree_int (TYPE_SIZE_UNIT (type),
2452 STACK_CHECK_MAX_VAR_SIZE) > 0))
2454 /* This is a variable-sized object. Make space on the stack
2455 for it. */
2456 rtx size_rtx = expr_size (args[i].tree_value);
2458 if (*old_stack_level == 0)
2460 emit_stack_save (SAVE_BLOCK, old_stack_level);
2461 *old_pending_adj = pending_stack_adjust;
2462 pending_stack_adjust = 0;
2465 /* We can pass TRUE as the 4th argument because we just
2466 saved the stack pointer and will restore it right after
2467 the call. */
2468 copy = allocate_dynamic_stack_space (size_rtx,
2469 TYPE_ALIGN (type),
2470 TYPE_ALIGN (type),
2471 max_int_size_in_bytes
2472 (type),
2473 true);
2474 copy = gen_rtx_MEM (BLKmode, copy);
2475 set_mem_attributes (copy, type, 1);
2477 else
2478 copy = assign_temp (type, 1, 0);
2480 store_expr (args[i].tree_value, copy, 0, false, false);
2482 /* Just change the const function to pure and then let
2483 the next test clear the pure based on
2484 callee_copies. */
2485 if (*ecf_flags & ECF_CONST)
2487 *ecf_flags &= ~ECF_CONST;
2488 *ecf_flags |= ECF_PURE;
2491 if (!callee_copies && *ecf_flags & ECF_PURE)
2492 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2494 args[i].tree_value
2495 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
2496 type = TREE_TYPE (args[i].tree_value);
2497 *may_tailcall = false;
2498 maybe_complain_about_tail_call (exp,
2499 "argument must be passed"
2500 " by copying");
2502 arg.pass_by_reference = true;
2505 unsignedp = TYPE_UNSIGNED (type);
2506 arg.type = type;
2507 arg.mode
2508 = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
2509 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
2511 args[i].unsignedp = unsignedp;
2512 args[i].mode = arg.mode;
2514 targetm.calls.warn_parameter_passing_abi (args_so_far, type);
2516 args[i].reg = targetm.calls.function_arg (args_so_far, arg);
2518 if (args[i].reg && CONST_INT_P (args[i].reg))
2519 args[i].reg = NULL;
2521 /* If this is a sibling call and the machine has register windows, the
2522 register window has to be unwinded before calling the routine, so
2523 arguments have to go into the incoming registers. */
2524 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
2525 args[i].tail_call_reg
2526 = targetm.calls.function_incoming_arg (args_so_far, arg);
2527 else
2528 args[i].tail_call_reg = args[i].reg;
2530 if (args[i].reg)
2531 args[i].partial = targetm.calls.arg_partial_bytes (args_so_far, arg);
2533 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (arg);
2535 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
2536 it means that we are to pass this arg in the register(s) designated
2537 by the PARALLEL, but also to pass it in the stack. */
2538 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
2539 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
2540 args[i].pass_on_stack = 1;
2542 /* If this is an addressable type, we must preallocate the stack
2543 since we must evaluate the object into its final location.
2545 If this is to be passed in both registers and the stack, it is simpler
2546 to preallocate. */
2547 if (TREE_ADDRESSABLE (type)
2548 || (args[i].pass_on_stack && args[i].reg != 0))
2549 *must_preallocate = 1;
2551 /* Compute the stack-size of this argument. */
2552 if (args[i].reg == 0 || args[i].partial != 0
2553 || reg_parm_stack_space > 0
2554 || args[i].pass_on_stack)
2555 locate_and_pad_parm (arg.mode, type,
2556 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2558 #else
2559 args[i].reg != 0,
2560 #endif
2561 reg_parm_stack_space,
2562 args[i].pass_on_stack ? 0 : args[i].partial,
2563 fndecl, args_size, &args[i].locate);
2564 #ifdef BLOCK_REG_PADDING
2565 else
2566 /* The argument is passed entirely in registers. See at which
2567 end it should be padded. */
2568 args[i].locate.where_pad =
2569 BLOCK_REG_PADDING (arg.mode, type,
2570 int_size_in_bytes (type) <= UNITS_PER_WORD);
2571 #endif
2573 /* Update ARGS_SIZE, the total stack space for args so far. */
2575 args_size->constant += args[i].locate.size.constant;
2576 if (args[i].locate.size.var)
2577 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
2579 /* Increment ARGS_SO_FAR, which has info about which arg-registers
2580 have been used, etc. */
2582 /* ??? Traditionally we've passed TYPE_MODE here, instead of the
2583 promoted_mode used for function_arg above. However, the
2584 corresponding handling of incoming arguments in function.c
2585 does pass the promoted mode. */
2586 arg.mode = TYPE_MODE (type);
2587 targetm.calls.function_arg_advance (args_so_far, arg);
2589 /* Store argument values for functions decorated with attribute
2590 alloc_size. */
2591 if (argpos == alloc_idx[0])
2592 alloc_args[0] = args[i].tree_value;
2593 else if (argpos == alloc_idx[1])
2594 alloc_args[1] = args[i].tree_value;
2596 /* Save the actual argument that corresponds to the access attribute
2597 operand for later processing. */
2598 if (attr_access *access = rdwr_idx.get (argpos))
2600 if (POINTER_TYPE_P (type))
2602 access->ptr = args[i].tree_value;
2603 // A nonnull ACCESS->SIZE contains VLA bounds. */
2605 else
2607 access->size = args[i].tree_value;
2608 gcc_assert (access->ptr == NULL_TREE);
2613 if (alloc_args[0])
2615 /* Check the arguments of functions decorated with attribute
2616 alloc_size. */
2617 maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
2620 /* Detect passing non-string arguments to functions expecting
2621 nul-terminated strings. */
2622 maybe_warn_nonstring_arg (fndecl, exp);
2624 /* Check attribute access arguments. */
2625 maybe_warn_rdwr_sizes (&rdwr_idx, fndecl, fntype, exp);
2628 /* Update ARGS_SIZE to contain the total size for the argument block.
2629 Return the original constant component of the argument block's size.
2631 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
2632 for arguments passed in registers. */
2634 static poly_int64
2635 compute_argument_block_size (int reg_parm_stack_space,
2636 struct args_size *args_size,
2637 tree fndecl ATTRIBUTE_UNUSED,
2638 tree fntype ATTRIBUTE_UNUSED,
2639 int preferred_stack_boundary ATTRIBUTE_UNUSED)
2641 poly_int64 unadjusted_args_size = args_size->constant;
2643 /* For accumulate outgoing args mode we don't need to align, since the frame
2644 will be already aligned. Align to STACK_BOUNDARY in order to prevent
2645 backends from generating misaligned frame sizes. */
2646 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
2647 preferred_stack_boundary = STACK_BOUNDARY;
2649 /* Compute the actual size of the argument block required. The variable
2650 and constant sizes must be combined, the size may have to be rounded,
2651 and there may be a minimum required size. */
2653 if (args_size->var)
2655 args_size->var = ARGS_SIZE_TREE (*args_size);
2656 args_size->constant = 0;
2658 preferred_stack_boundary /= BITS_PER_UNIT;
2659 if (preferred_stack_boundary > 1)
2661 /* We don't handle this case yet. To handle it correctly we have
2662 to add the delta, round and subtract the delta.
2663 Currently no machine description requires this support. */
2664 gcc_assert (multiple_p (stack_pointer_delta,
2665 preferred_stack_boundary));
2666 args_size->var = round_up (args_size->var, preferred_stack_boundary);
2669 if (reg_parm_stack_space > 0)
2671 args_size->var
2672 = size_binop (MAX_EXPR, args_size->var,
2673 ssize_int (reg_parm_stack_space));
2675 /* The area corresponding to register parameters is not to count in
2676 the size of the block we need. So make the adjustment. */
2677 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2678 args_size->var
2679 = size_binop (MINUS_EXPR, args_size->var,
2680 ssize_int (reg_parm_stack_space));
2683 else
2685 preferred_stack_boundary /= BITS_PER_UNIT;
2686 if (preferred_stack_boundary < 1)
2687 preferred_stack_boundary = 1;
2688 args_size->constant = (aligned_upper_bound (args_size->constant
2689 + stack_pointer_delta,
2690 preferred_stack_boundary)
2691 - stack_pointer_delta);
2693 args_size->constant = upper_bound (args_size->constant,
2694 reg_parm_stack_space);
2696 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2697 args_size->constant -= reg_parm_stack_space;
2699 return unadjusted_args_size;
2702 /* Precompute parameters as needed for a function call.
2704 FLAGS is mask of ECF_* constants.
2706 NUM_ACTUALS is the number of arguments.
2708 ARGS is an array containing information for each argument; this
2709 routine fills in the INITIAL_VALUE and VALUE fields for each
2710 precomputed argument. */
2712 static void
2713 precompute_arguments (int num_actuals, struct arg_data *args)
2715 int i;
2717 /* If this is a libcall, then precompute all arguments so that we do not
2718 get extraneous instructions emitted as part of the libcall sequence. */
2720 /* If we preallocated the stack space, and some arguments must be passed
2721 on the stack, then we must precompute any parameter which contains a
2722 function call which will store arguments on the stack.
2723 Otherwise, evaluating the parameter may clobber previous parameters
2724 which have already been stored into the stack. (we have code to avoid
2725 such case by saving the outgoing stack arguments, but it results in
2726 worse code) */
2727 if (!ACCUMULATE_OUTGOING_ARGS)
2728 return;
2730 for (i = 0; i < num_actuals; i++)
2732 tree type;
2733 machine_mode mode;
2735 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
2736 continue;
2738 /* If this is an addressable type, we cannot pre-evaluate it. */
2739 type = TREE_TYPE (args[i].tree_value);
2740 gcc_assert (!TREE_ADDRESSABLE (type));
2742 args[i].initial_value = args[i].value
2743 = expand_normal (args[i].tree_value);
2745 mode = TYPE_MODE (type);
2746 if (mode != args[i].mode)
2748 int unsignedp = args[i].unsignedp;
2749 args[i].value
2750 = convert_modes (args[i].mode, mode,
2751 args[i].value, args[i].unsignedp);
2753 /* CSE will replace this only if it contains args[i].value
2754 pseudo, so convert it down to the declared mode using
2755 a SUBREG. */
2756 if (REG_P (args[i].value)
2757 && GET_MODE_CLASS (args[i].mode) == MODE_INT
2758 && promote_mode (type, mode, &unsignedp) != args[i].mode)
2760 args[i].initial_value
2761 = gen_lowpart_SUBREG (mode, args[i].value);
2762 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
2763 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
2769 /* Given the current state of MUST_PREALLOCATE and information about
2770 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
2771 compute and return the final value for MUST_PREALLOCATE. */
2773 static int
2774 finalize_must_preallocate (int must_preallocate, int num_actuals,
2775 struct arg_data *args, struct args_size *args_size)
2777 /* See if we have or want to preallocate stack space.
2779 If we would have to push a partially-in-regs parm
2780 before other stack parms, preallocate stack space instead.
2782 If the size of some parm is not a multiple of the required stack
2783 alignment, we must preallocate.
2785 If the total size of arguments that would otherwise create a copy in
2786 a temporary (such as a CALL) is more than half the total argument list
2787 size, preallocation is faster.
2789 Another reason to preallocate is if we have a machine (like the m88k)
2790 where stack alignment is required to be maintained between every
2791 pair of insns, not just when the call is made. However, we assume here
2792 that such machines either do not have push insns (and hence preallocation
2793 would occur anyway) or the problem is taken care of with
2794 PUSH_ROUNDING. */
2796 if (! must_preallocate)
2798 int partial_seen = 0;
2799 poly_int64 copy_to_evaluate_size = 0;
2800 int i;
2802 for (i = 0; i < num_actuals && ! must_preallocate; i++)
2804 if (args[i].partial > 0 && ! args[i].pass_on_stack)
2805 partial_seen = 1;
2806 else if (partial_seen && args[i].reg == 0)
2807 must_preallocate = 1;
2809 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
2810 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
2811 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
2812 || TREE_CODE (args[i].tree_value) == COND_EXPR
2813 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
2814 copy_to_evaluate_size
2815 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2818 if (maybe_ne (args_size->constant, 0)
2819 && maybe_ge (copy_to_evaluate_size * 2, args_size->constant))
2820 must_preallocate = 1;
2822 return must_preallocate;
2825 /* If we preallocated stack space, compute the address of each argument
2826 and store it into the ARGS array.
2828 We need not ensure it is a valid memory address here; it will be
2829 validized when it is used.
2831 ARGBLOCK is an rtx for the address of the outgoing arguments. */
2833 static void
2834 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
2836 if (argblock)
2838 rtx arg_reg = argblock;
2839 int i;
2840 poly_int64 arg_offset = 0;
2842 if (GET_CODE (argblock) == PLUS)
2844 arg_reg = XEXP (argblock, 0);
2845 arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
2848 for (i = 0; i < num_actuals; i++)
2850 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
2851 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
2852 rtx addr;
2853 unsigned int align, boundary;
2854 poly_uint64 units_on_stack = 0;
2855 machine_mode partial_mode = VOIDmode;
2857 /* Skip this parm if it will not be passed on the stack. */
2858 if (! args[i].pass_on_stack
2859 && args[i].reg != 0
2860 && args[i].partial == 0)
2861 continue;
2863 if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
2864 continue;
2866 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
2867 addr = plus_constant (Pmode, addr, arg_offset);
2869 if (args[i].partial != 0)
2871 /* Only part of the parameter is being passed on the stack.
2872 Generate a simple memory reference of the correct size. */
2873 units_on_stack = args[i].locate.size.constant;
2874 poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT;
2875 partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk ();
2876 args[i].stack = gen_rtx_MEM (partial_mode, addr);
2877 set_mem_size (args[i].stack, units_on_stack);
2879 else
2881 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
2882 set_mem_attributes (args[i].stack,
2883 TREE_TYPE (args[i].tree_value), 1);
2885 align = BITS_PER_UNIT;
2886 boundary = args[i].locate.boundary;
2887 poly_int64 offset_val;
2888 if (args[i].locate.where_pad != PAD_DOWNWARD)
2889 align = boundary;
2890 else if (poly_int_rtx_p (offset, &offset_val))
2892 align = least_bit_hwi (boundary);
2893 unsigned int offset_align
2894 = known_alignment (offset_val) * BITS_PER_UNIT;
2895 if (offset_align != 0)
2896 align = MIN (align, offset_align);
2898 set_mem_align (args[i].stack, align);
2900 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset);
2901 addr = plus_constant (Pmode, addr, arg_offset);
2903 if (args[i].partial != 0)
2905 /* Only part of the parameter is being passed on the stack.
2906 Generate a simple memory reference of the correct size.
2908 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
2909 set_mem_size (args[i].stack_slot, units_on_stack);
2911 else
2913 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
2914 set_mem_attributes (args[i].stack_slot,
2915 TREE_TYPE (args[i].tree_value), 1);
2917 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
2919 /* Function incoming arguments may overlap with sibling call
2920 outgoing arguments and we cannot allow reordering of reads
2921 from function arguments with stores to outgoing arguments
2922 of sibling calls. */
2923 set_mem_alias_set (args[i].stack, 0);
2924 set_mem_alias_set (args[i].stack_slot, 0);
2929 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
2930 in a call instruction.
2932 FNDECL is the tree node for the target function. For an indirect call
2933 FNDECL will be NULL_TREE.
2935 ADDR is the operand 0 of CALL_EXPR for this call. */
2937 static rtx
2938 rtx_for_function_call (tree fndecl, tree addr)
2940 rtx funexp;
2942 /* Get the function to call, in the form of RTL. */
2943 if (fndecl)
2945 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
2946 TREE_USED (fndecl) = 1;
2948 /* Get a SYMBOL_REF rtx for the function address. */
2949 funexp = XEXP (DECL_RTL (fndecl), 0);
2951 else
2952 /* Generate an rtx (probably a pseudo-register) for the address. */
2954 push_temp_slots ();
2955 funexp = expand_normal (addr);
2956 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
2958 return funexp;
2961 /* Return the static chain for this function, if any. */
2964 rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
2966 if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
2967 return NULL;
2969 return targetm.calls.static_chain (fndecl_or_type, incoming_p);
2972 /* Internal state for internal_arg_pointer_based_exp and its helpers. */
2973 static struct
2975 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
2976 or NULL_RTX if none has been scanned yet. */
2977 rtx_insn *scan_start;
2978 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
2979 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
2980 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
2981 with fixed offset, or PC if this is with variable or unknown offset. */
2982 vec<rtx> cache;
2983 } internal_arg_pointer_exp_state;
2985 static rtx internal_arg_pointer_based_exp (const_rtx, bool);
2987 /* Helper function for internal_arg_pointer_based_exp. Scan insns in
2988 the tail call sequence, starting with first insn that hasn't been
2989 scanned yet, and note for each pseudo on the LHS whether it is based
2990 on crtl->args.internal_arg_pointer or not, and what offset from that
2991 that pointer it has. */
2993 static void
2994 internal_arg_pointer_based_exp_scan (void)
2996 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
2998 if (scan_start == NULL_RTX)
2999 insn = get_insns ();
3000 else
3001 insn = NEXT_INSN (scan_start);
3003 while (insn)
3005 rtx set = single_set (insn);
3006 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
3008 rtx val = NULL_RTX;
3009 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
3010 /* Punt on pseudos set multiple times. */
3011 if (idx < internal_arg_pointer_exp_state.cache.length ()
3012 && (internal_arg_pointer_exp_state.cache[idx]
3013 != NULL_RTX))
3014 val = pc_rtx;
3015 else
3016 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
3017 if (val != NULL_RTX)
3019 if (idx >= internal_arg_pointer_exp_state.cache.length ())
3020 internal_arg_pointer_exp_state.cache
3021 .safe_grow_cleared (idx + 1, true);
3022 internal_arg_pointer_exp_state.cache[idx] = val;
3025 if (NEXT_INSN (insn) == NULL_RTX)
3026 scan_start = insn;
3027 insn = NEXT_INSN (insn);
3030 internal_arg_pointer_exp_state.scan_start = scan_start;
3033 /* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
3034 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
3035 it with fixed offset, or PC if this is with variable or unknown offset.
3036 TOPLEVEL is true if the function is invoked at the topmost level. */
3038 static rtx
3039 internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
3041 if (CONSTANT_P (rtl))
3042 return NULL_RTX;
3044 if (rtl == crtl->args.internal_arg_pointer)
3045 return const0_rtx;
3047 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
3048 return NULL_RTX;
3050 poly_int64 offset;
3051 if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), &offset))
3053 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
3054 if (val == NULL_RTX || val == pc_rtx)
3055 return val;
3056 return plus_constant (Pmode, val, offset);
3059 /* When called at the topmost level, scan pseudo assignments in between the
3060 last scanned instruction in the tail call sequence and the latest insn
3061 in that sequence. */
3062 if (toplevel)
3063 internal_arg_pointer_based_exp_scan ();
3065 if (REG_P (rtl))
3067 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
3068 if (idx < internal_arg_pointer_exp_state.cache.length ())
3069 return internal_arg_pointer_exp_state.cache[idx];
3071 return NULL_RTX;
3074 subrtx_iterator::array_type array;
3075 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
3077 const_rtx x = *iter;
3078 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
3079 return pc_rtx;
3080 if (MEM_P (x))
3081 iter.skip_subrtxes ();
3084 return NULL_RTX;
3087 /* Return true if SIZE bytes starting from address ADDR might overlap an
3088 already-clobbered argument area. This function is used to determine
3089 if we should give up a sibcall. */
3091 static bool
3092 mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size)
3094 poly_int64 i;
3095 unsigned HOST_WIDE_INT start, end;
3096 rtx val;
3098 if (bitmap_empty_p (stored_args_map)
3099 && stored_args_watermark == HOST_WIDE_INT_M1U)
3100 return false;
3101 val = internal_arg_pointer_based_exp (addr, true);
3102 if (val == NULL_RTX)
3103 return false;
3104 else if (!poly_int_rtx_p (val, &i))
3105 return true;
3107 if (known_eq (size, 0U))
3108 return false;
3110 if (STACK_GROWS_DOWNWARD)
3111 i -= crtl->args.pretend_args_size;
3112 else
3113 i += crtl->args.pretend_args_size;
3115 if (ARGS_GROW_DOWNWARD)
3116 i = -i - size;
3118 /* We can ignore any references to the function's pretend args,
3119 which at this point would manifest as negative values of I. */
3120 if (known_le (i, 0) && known_le (size, poly_uint64 (-i)))
3121 return false;
3123 start = maybe_lt (i, 0) ? 0 : constant_lower_bound (i);
3124 if (!(i + size).is_constant (&end))
3125 end = HOST_WIDE_INT_M1U;
3127 if (end > stored_args_watermark)
3128 return true;
3130 end = MIN (end, SBITMAP_SIZE (stored_args_map));
3131 for (unsigned HOST_WIDE_INT k = start; k < end; ++k)
3132 if (bitmap_bit_p (stored_args_map, k))
3133 return true;
3135 return false;
3138 /* Do the register loads required for any wholly-register parms or any
3139 parms which are passed both on the stack and in a register. Their
3140 expressions were already evaluated.
3142 Mark all register-parms as living through the call, putting these USE
3143 insns in the CALL_INSN_FUNCTION_USAGE field.
3145 When IS_SIBCALL, perform the check_sibcall_argument_overlap
3146 checking, setting *SIBCALL_FAILURE if appropriate. */
3148 static void
3149 load_register_parameters (struct arg_data *args, int num_actuals,
3150 rtx *call_fusage, int flags, int is_sibcall,
3151 int *sibcall_failure)
3153 int i, j;
3155 for (i = 0; i < num_actuals; i++)
3157 rtx reg = ((flags & ECF_SIBCALL)
3158 ? args[i].tail_call_reg : args[i].reg);
3159 if (reg)
3161 int partial = args[i].partial;
3162 int nregs;
3163 poly_int64 size = 0;
3164 HOST_WIDE_INT const_size = 0;
3165 rtx_insn *before_arg = get_last_insn ();
3166 tree type = TREE_TYPE (args[i].tree_value);
3167 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
3168 type = TREE_TYPE (first_field (type));
3169 /* Set non-negative if we must move a word at a time, even if
3170 just one word (e.g, partial == 4 && mode == DFmode). Set
3171 to -1 if we just use a normal move insn. This value can be
3172 zero if the argument is a zero size structure. */
3173 nregs = -1;
3174 if (GET_CODE (reg) == PARALLEL)
3176 else if (partial)
3178 gcc_assert (partial % UNITS_PER_WORD == 0);
3179 nregs = partial / UNITS_PER_WORD;
3181 else if (TYPE_MODE (type) == BLKmode)
3183 /* Variable-sized parameters should be described by a
3184 PARALLEL instead. */
3185 const_size = int_size_in_bytes (type);
3186 gcc_assert (const_size >= 0);
3187 nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3188 size = const_size;
3190 else
3191 size = GET_MODE_SIZE (args[i].mode);
3193 /* Handle calls that pass values in multiple non-contiguous
3194 locations. The Irix 6 ABI has examples of this. */
3196 if (GET_CODE (reg) == PARALLEL)
3197 emit_group_move (reg, args[i].parallel_value);
3199 /* If simple case, just do move. If normal partial, store_one_arg
3200 has already loaded the register for us. In all other cases,
3201 load the register(s) from memory. */
3203 else if (nregs == -1)
3205 emit_move_insn (reg, args[i].value);
3206 #ifdef BLOCK_REG_PADDING
3207 /* Handle case where we have a value that needs shifting
3208 up to the msb. eg. a QImode value and we're padding
3209 upward on a BYTES_BIG_ENDIAN machine. */
3210 if (args[i].locate.where_pad
3211 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
3213 gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
3214 if (maybe_lt (size, UNITS_PER_WORD))
3216 rtx x;
3217 poly_int64 shift
3218 = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3220 /* Assigning REG here rather than a temp makes
3221 CALL_FUSAGE report the whole reg as used.
3222 Strictly speaking, the call only uses SIZE
3223 bytes at the msb end, but it doesn't seem worth
3224 generating rtl to say that. */
3225 reg = gen_rtx_REG (word_mode, REGNO (reg));
3226 x = expand_shift (LSHIFT_EXPR, word_mode,
3227 reg, shift, reg, 1);
3228 if (x != reg)
3229 emit_move_insn (reg, x);
3232 #endif
3235 /* If we have pre-computed the values to put in the registers in
3236 the case of non-aligned structures, copy them in now. */
3238 else if (args[i].n_aligned_regs != 0)
3239 for (j = 0; j < args[i].n_aligned_regs; j++)
3240 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
3241 args[i].aligned_regs[j]);
3243 else if (partial == 0 || args[i].pass_on_stack)
3245 /* SIZE and CONST_SIZE are 0 for partial arguments and
3246 the size of a BLKmode type otherwise. */
3247 gcc_checking_assert (known_eq (size, const_size));
3248 rtx mem = validize_mem (copy_rtx (args[i].value));
3250 /* Check for overlap with already clobbered argument area,
3251 providing that this has non-zero size. */
3252 if (is_sibcall
3253 && const_size != 0
3254 && (mem_might_overlap_already_clobbered_arg_p
3255 (XEXP (args[i].value, 0), const_size)))
3256 *sibcall_failure = 1;
3258 if (const_size % UNITS_PER_WORD == 0
3259 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
3260 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
3261 else
3263 if (nregs > 1)
3264 move_block_to_reg (REGNO (reg), mem, nregs - 1,
3265 args[i].mode);
3266 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
3267 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
3268 unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
3269 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
3270 word_mode, word_mode, false,
3271 NULL);
3272 if (BYTES_BIG_ENDIAN)
3273 x = expand_shift (LSHIFT_EXPR, word_mode, x,
3274 BITS_PER_WORD - bitsize, dest, 1);
3275 if (x != dest)
3276 emit_move_insn (dest, x);
3279 /* Handle a BLKmode that needs shifting. */
3280 if (nregs == 1 && const_size < UNITS_PER_WORD
3281 #ifdef BLOCK_REG_PADDING
3282 && args[i].locate.where_pad == PAD_DOWNWARD
3283 #else
3284 && BYTES_BIG_ENDIAN
3285 #endif
3288 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
3289 int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
3290 enum tree_code dir = (BYTES_BIG_ENDIAN
3291 ? RSHIFT_EXPR : LSHIFT_EXPR);
3292 rtx x;
3294 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
3295 if (x != dest)
3296 emit_move_insn (dest, x);
3300 /* When a parameter is a block, and perhaps in other cases, it is
3301 possible that it did a load from an argument slot that was
3302 already clobbered. */
3303 if (is_sibcall
3304 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
3305 *sibcall_failure = 1;
3307 /* Handle calls that pass values in multiple non-contiguous
3308 locations. The Irix 6 ABI has examples of this. */
3309 if (GET_CODE (reg) == PARALLEL)
3310 use_group_regs (call_fusage, reg);
3311 else if (nregs == -1)
3312 use_reg_mode (call_fusage, reg, TYPE_MODE (type));
3313 else if (nregs > 0)
3314 use_regs (call_fusage, REGNO (reg), nregs);
3319 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
3320 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
3321 bytes, then we would need to push some additional bytes to pad the
3322 arguments. So, we try to compute an adjust to the stack pointer for an
3323 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
3324 bytes. Then, when the arguments are pushed the stack will be perfectly
3325 aligned.
3327 Return true if this optimization is possible, storing the adjustment
3328 in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
3329 bytes that should be popped after the call. */
3331 static bool
3332 combine_pending_stack_adjustment_and_call (poly_int64_pod *adjustment_out,
3333 poly_int64 unadjusted_args_size,
3334 struct args_size *args_size,
3335 unsigned int preferred_unit_stack_boundary)
3337 /* The number of bytes to pop so that the stack will be
3338 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
3339 poly_int64 adjustment;
3340 /* The alignment of the stack after the arguments are pushed, if we
3341 just pushed the arguments without adjust the stack here. */
3342 unsigned HOST_WIDE_INT unadjusted_alignment;
3344 if (!known_misalignment (stack_pointer_delta + unadjusted_args_size,
3345 preferred_unit_stack_boundary,
3346 &unadjusted_alignment))
3347 return false;
3349 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
3350 as possible -- leaving just enough left to cancel out the
3351 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
3352 PENDING_STACK_ADJUST is non-negative, and congruent to
3353 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
3355 /* Begin by trying to pop all the bytes. */
3356 unsigned HOST_WIDE_INT tmp_misalignment;
3357 if (!known_misalignment (pending_stack_adjust,
3358 preferred_unit_stack_boundary,
3359 &tmp_misalignment))
3360 return false;
3361 unadjusted_alignment -= tmp_misalignment;
3362 adjustment = pending_stack_adjust;
3363 /* Push enough additional bytes that the stack will be aligned
3364 after the arguments are pushed. */
3365 if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
3366 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
3368 /* We need to know whether the adjusted argument size
3369 (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
3370 or a deallocation. */
3371 if (!ordered_p (adjustment, unadjusted_args_size))
3372 return false;
3374 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
3375 bytes after the call. The right number is the entire
3376 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
3377 by the arguments in the first place. */
3378 args_size->constant
3379 = pending_stack_adjust - adjustment + unadjusted_args_size;
3381 *adjustment_out = adjustment;
3382 return true;
3385 /* Scan X expression if it does not dereference any argument slots
3386 we already clobbered by tail call arguments (as noted in stored_args_map
3387 bitmap).
3388 Return nonzero if X expression dereferences such argument slots,
3389 zero otherwise. */
3391 static int
3392 check_sibcall_argument_overlap_1 (rtx x)
3394 RTX_CODE code;
3395 int i, j;
3396 const char *fmt;
3398 if (x == NULL_RTX)
3399 return 0;
3401 code = GET_CODE (x);
3403 /* We need not check the operands of the CALL expression itself. */
3404 if (code == CALL)
3405 return 0;
3407 if (code == MEM)
3408 return (mem_might_overlap_already_clobbered_arg_p
3409 (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x))));
3411 /* Scan all subexpressions. */
3412 fmt = GET_RTX_FORMAT (code);
3413 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3415 if (*fmt == 'e')
3417 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
3418 return 1;
3420 else if (*fmt == 'E')
3422 for (j = 0; j < XVECLEN (x, i); j++)
3423 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
3424 return 1;
3427 return 0;
3430 /* Scan sequence after INSN if it does not dereference any argument slots
3431 we already clobbered by tail call arguments (as noted in stored_args_map
3432 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
3433 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
3434 should be 0). Return nonzero if sequence after INSN dereferences such argument
3435 slots, zero otherwise. */
3437 static int
3438 check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
3439 int mark_stored_args_map)
3441 poly_uint64 low, high;
3442 unsigned HOST_WIDE_INT const_low, const_high;
3444 if (insn == NULL_RTX)
3445 insn = get_insns ();
3446 else
3447 insn = NEXT_INSN (insn);
3449 for (; insn; insn = NEXT_INSN (insn))
3450 if (INSN_P (insn)
3451 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
3452 break;
3454 if (mark_stored_args_map)
3456 if (ARGS_GROW_DOWNWARD)
3457 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
3458 else
3459 low = arg->locate.slot_offset.constant;
3460 high = low + arg->locate.size.constant;
3462 const_low = constant_lower_bound (low);
3463 if (high.is_constant (&const_high))
3464 for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i)
3465 bitmap_set_bit (stored_args_map, i);
3466 else
3467 stored_args_watermark = MIN (stored_args_watermark, const_low);
3469 return insn != NULL_RTX;
3472 /* Given that a function returns a value of mode MODE at the most
3473 significant end of hard register VALUE, shift VALUE left or right
3474 as specified by LEFT_P. Return true if some action was needed. */
3476 bool
3477 shift_return_value (machine_mode mode, bool left_p, rtx value)
3479 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
3480 machine_mode value_mode = GET_MODE (value);
3481 poly_int64 shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
3483 if (known_eq (shift, 0))
3484 return false;
3486 /* Use ashr rather than lshr for right shifts. This is for the benefit
3487 of the MIPS port, which requires SImode values to be sign-extended
3488 when stored in 64-bit registers. */
3489 if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
3490 value, gen_int_shift_amount (value_mode, shift),
3491 value, 1, OPTAB_WIDEN))
3492 gcc_unreachable ();
3493 return true;
3496 /* If X is a likely-spilled register value, copy it to a pseudo
3497 register and return that register. Return X otherwise. */
3499 static rtx
3500 avoid_likely_spilled_reg (rtx x)
3502 rtx new_rtx;
3504 if (REG_P (x)
3505 && HARD_REGISTER_P (x)
3506 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
3508 /* Make sure that we generate a REG rather than a CONCAT.
3509 Moves into CONCATs can need nontrivial instructions,
3510 and the whole point of this function is to avoid
3511 using the hard register directly in such a situation. */
3512 generating_concat_p = 0;
3513 new_rtx = gen_reg_rtx (GET_MODE (x));
3514 generating_concat_p = 1;
3515 emit_move_insn (new_rtx, x);
3516 return new_rtx;
3518 return x;
3521 /* Helper function for expand_call.
3522 Return false is EXP is not implementable as a sibling call. */
3524 static bool
3525 can_implement_as_sibling_call_p (tree exp,
3526 rtx structure_value_addr,
3527 tree funtype,
3528 int reg_parm_stack_space ATTRIBUTE_UNUSED,
3529 tree fndecl,
3530 int flags,
3531 tree addr,
3532 const args_size &args_size)
3534 if (!targetm.have_sibcall_epilogue ())
3536 maybe_complain_about_tail_call
3537 (exp,
3538 "machine description does not have"
3539 " a sibcall_epilogue instruction pattern");
3540 return false;
3543 /* Doing sibling call optimization needs some work, since
3544 structure_value_addr can be allocated on the stack.
3545 It does not seem worth the effort since few optimizable
3546 sibling calls will return a structure. */
3547 if (structure_value_addr != NULL_RTX)
3549 maybe_complain_about_tail_call (exp, "callee returns a structure");
3550 return false;
3553 #ifdef REG_PARM_STACK_SPACE
3554 /* If outgoing reg parm stack space changes, we cannot do sibcall. */
3555 if (OUTGOING_REG_PARM_STACK_SPACE (funtype)
3556 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))
3557 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl)))
3559 maybe_complain_about_tail_call (exp,
3560 "inconsistent size of stack space"
3561 " allocated for arguments which are"
3562 " passed in registers");
3563 return false;
3565 #endif
3567 /* Check whether the target is able to optimize the call
3568 into a sibcall. */
3569 if (!targetm.function_ok_for_sibcall (fndecl, exp))
3571 maybe_complain_about_tail_call (exp,
3572 "target is not able to optimize the"
3573 " call into a sibling call");
3574 return false;
3577 /* Functions that do not return exactly once may not be sibcall
3578 optimized. */
3579 if (flags & ECF_RETURNS_TWICE)
3581 maybe_complain_about_tail_call (exp, "callee returns twice");
3582 return false;
3584 if (flags & ECF_NORETURN)
3586 maybe_complain_about_tail_call (exp, "callee does not return");
3587 return false;
3590 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
3592 maybe_complain_about_tail_call (exp, "volatile function type");
3593 return false;
3596 /* If the called function is nested in the current one, it might access
3597 some of the caller's arguments, but could clobber them beforehand if
3598 the argument areas are shared. */
3599 if (fndecl && decl_function_context (fndecl) == current_function_decl)
3601 maybe_complain_about_tail_call (exp, "nested function");
3602 return false;
3605 /* If this function requires more stack slots than the current
3606 function, we cannot change it into a sibling call.
3607 crtl->args.pretend_args_size is not part of the
3608 stack allocated by our caller. */
3609 if (maybe_gt (args_size.constant,
3610 crtl->args.size - crtl->args.pretend_args_size))
3612 maybe_complain_about_tail_call (exp,
3613 "callee required more stack slots"
3614 " than the caller");
3615 return false;
3618 /* If the callee pops its own arguments, then it must pop exactly
3619 the same number of arguments as the current function. */
3620 if (maybe_ne (targetm.calls.return_pops_args (fndecl, funtype,
3621 args_size.constant),
3622 targetm.calls.return_pops_args (current_function_decl,
3623 TREE_TYPE
3624 (current_function_decl),
3625 crtl->args.size)))
3627 maybe_complain_about_tail_call (exp,
3628 "inconsistent number of"
3629 " popped arguments");
3630 return false;
3633 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
3635 maybe_complain_about_tail_call (exp, "frontend does not support"
3636 " sibling call");
3637 return false;
3640 /* All checks passed. */
3641 return true;
3644 /* Update stack alignment when the parameter is passed in the stack
3645 since the outgoing parameter requires extra alignment on the calling
3646 function side. */
3648 static void
3649 update_stack_alignment_for_call (struct locate_and_pad_arg_data *locate)
3651 if (crtl->stack_alignment_needed < locate->boundary)
3652 crtl->stack_alignment_needed = locate->boundary;
3653 if (crtl->preferred_stack_boundary < locate->boundary)
3654 crtl->preferred_stack_boundary = locate->boundary;
3657 /* Generate all the code for a CALL_EXPR exp
3658 and return an rtx for its value.
3659 Store the value in TARGET (specified as an rtx) if convenient.
3660 If the value is stored in TARGET then TARGET is returned.
3661 If IGNORE is nonzero, then we ignore the value of the function call. */
3664 expand_call (tree exp, rtx target, int ignore)
3666 /* Nonzero if we are currently expanding a call. */
3667 static int currently_expanding_call = 0;
3669 /* RTX for the function to be called. */
3670 rtx funexp;
3671 /* Sequence of insns to perform a normal "call". */
3672 rtx_insn *normal_call_insns = NULL;
3673 /* Sequence of insns to perform a tail "call". */
3674 rtx_insn *tail_call_insns = NULL;
3675 /* Data type of the function. */
3676 tree funtype;
3677 tree type_arg_types;
3678 tree rettype;
3679 /* Declaration of the function being called,
3680 or 0 if the function is computed (not known by name). */
3681 tree fndecl = 0;
3682 /* The type of the function being called. */
3683 tree fntype;
3684 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
3685 bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
3686 int pass;
3688 /* Register in which non-BLKmode value will be returned,
3689 or 0 if no value or if value is BLKmode. */
3690 rtx valreg;
3691 /* Address where we should return a BLKmode value;
3692 0 if value not BLKmode. */
3693 rtx structure_value_addr = 0;
3694 /* Nonzero if that address is being passed by treating it as
3695 an extra, implicit first parameter. Otherwise,
3696 it is passed by being copied directly into struct_value_rtx. */
3697 int structure_value_addr_parm = 0;
3698 /* Holds the value of implicit argument for the struct value. */
3699 tree structure_value_addr_value = NULL_TREE;
3700 /* Size of aggregate value wanted, or zero if none wanted
3701 or if we are using the non-reentrant PCC calling convention
3702 or expecting the value in registers. */
3703 poly_int64 struct_value_size = 0;
3704 /* Nonzero if called function returns an aggregate in memory PCC style,
3705 by returning the address of where to find it. */
3706 int pcc_struct_value = 0;
3707 rtx struct_value = 0;
3709 /* Number of actual parameters in this call, including struct value addr. */
3710 int num_actuals;
3711 /* Number of named args. Args after this are anonymous ones
3712 and they must all go on the stack. */
3713 int n_named_args;
3714 /* Number of complex actual arguments that need to be split. */
3715 int num_complex_actuals = 0;
3717 /* Vector of information about each argument.
3718 Arguments are numbered in the order they will be pushed,
3719 not the order they are written. */
3720 struct arg_data *args;
3722 /* Total size in bytes of all the stack-parms scanned so far. */
3723 struct args_size args_size;
3724 struct args_size adjusted_args_size;
3725 /* Size of arguments before any adjustments (such as rounding). */
3726 poly_int64 unadjusted_args_size;
3727 /* Data on reg parms scanned so far. */
3728 CUMULATIVE_ARGS args_so_far_v;
3729 cumulative_args_t args_so_far;
3730 /* Nonzero if a reg parm has been scanned. */
3731 int reg_parm_seen;
3732 /* Nonzero if this is an indirect function call. */
3734 /* Nonzero if we must avoid push-insns in the args for this call.
3735 If stack space is allocated for register parameters, but not by the
3736 caller, then it is preallocated in the fixed part of the stack frame.
3737 So the entire argument block must then be preallocated (i.e., we
3738 ignore PUSH_ROUNDING in that case). */
3740 int must_preallocate = !PUSH_ARGS;
3742 /* Size of the stack reserved for parameter registers. */
3743 int reg_parm_stack_space = 0;
3745 /* Address of space preallocated for stack parms
3746 (on machines that lack push insns), or 0 if space not preallocated. */
3747 rtx argblock = 0;
3749 /* Mask of ECF_ and ERF_ flags. */
3750 int flags = 0;
3751 int return_flags = 0;
3752 #ifdef REG_PARM_STACK_SPACE
3753 /* Define the boundary of the register parm stack space that needs to be
3754 saved, if any. */
3755 int low_to_save, high_to_save;
3756 rtx save_area = 0; /* Place that it is saved */
3757 #endif
3759 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3760 char *initial_stack_usage_map = stack_usage_map;
3761 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
3762 char *stack_usage_map_buf = NULL;
3764 poly_int64 old_stack_allocated;
3766 /* State variables to track stack modifications. */
3767 rtx old_stack_level = 0;
3768 int old_stack_arg_under_construction = 0;
3769 poly_int64 old_pending_adj = 0;
3770 int old_inhibit_defer_pop = inhibit_defer_pop;
3772 /* Some stack pointer alterations we make are performed via
3773 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
3774 which we then also need to save/restore along the way. */
3775 poly_int64 old_stack_pointer_delta = 0;
3777 rtx call_fusage;
3778 tree addr = CALL_EXPR_FN (exp);
3779 int i;
3780 /* The alignment of the stack, in bits. */
3781 unsigned HOST_WIDE_INT preferred_stack_boundary;
3782 /* The alignment of the stack, in bytes. */
3783 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
3784 /* The static chain value to use for this call. */
3785 rtx static_chain_value;
3786 /* See if this is "nothrow" function call. */
3787 if (TREE_NOTHROW (exp))
3788 flags |= ECF_NOTHROW;
3790 /* See if we can find a DECL-node for the actual function, and get the
3791 function attributes (flags) from the function decl or type node. */
3792 fndecl = get_callee_fndecl (exp);
3793 if (fndecl)
3795 fntype = TREE_TYPE (fndecl);
3796 flags |= flags_from_decl_or_type (fndecl);
3797 return_flags |= decl_return_flags (fndecl);
3799 else
3801 fntype = TREE_TYPE (TREE_TYPE (addr));
3802 flags |= flags_from_decl_or_type (fntype);
3803 if (CALL_EXPR_BY_DESCRIPTOR (exp))
3804 flags |= ECF_BY_DESCRIPTOR;
3806 rettype = TREE_TYPE (exp);
3808 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
3810 /* Warn if this value is an aggregate type,
3811 regardless of which calling convention we are using for it. */
3812 if (AGGREGATE_TYPE_P (rettype))
3813 warning (OPT_Waggregate_return, "function call has aggregate value");
3815 /* If the result of a non looping pure or const function call is
3816 ignored (or void), and none of its arguments are volatile, we can
3817 avoid expanding the call and just evaluate the arguments for
3818 side-effects. */
3819 if ((flags & (ECF_CONST | ECF_PURE))
3820 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
3821 && (ignore || target == const0_rtx
3822 || TYPE_MODE (rettype) == VOIDmode))
3824 bool volatilep = false;
3825 tree arg;
3826 call_expr_arg_iterator iter;
3828 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3829 if (TREE_THIS_VOLATILE (arg))
3831 volatilep = true;
3832 break;
3835 if (! volatilep)
3837 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3838 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
3839 return const0_rtx;
3843 #ifdef REG_PARM_STACK_SPACE
3844 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
3845 #endif
3847 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
3848 && reg_parm_stack_space > 0 && PUSH_ARGS)
3849 must_preallocate = 1;
3851 /* Set up a place to return a structure. */
3853 /* Cater to broken compilers. */
3854 if (aggregate_value_p (exp, fntype))
3856 /* This call returns a big structure. */
3857 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3859 #ifdef PCC_STATIC_STRUCT_RETURN
3861 pcc_struct_value = 1;
3863 #else /* not PCC_STATIC_STRUCT_RETURN */
3865 if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), &struct_value_size))
3866 struct_value_size = -1;
3868 /* Even if it is semantically safe to use the target as the return
3869 slot, it may be not sufficiently aligned for the return type. */
3870 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
3871 && target
3872 && MEM_P (target)
3873 /* If rettype is addressable, we may not create a temporary.
3874 If target is properly aligned at runtime and the compiler
3875 just doesn't know about it, it will work fine, otherwise it
3876 will be UB. */
3877 && (TREE_ADDRESSABLE (rettype)
3878 || !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
3879 && targetm.slow_unaligned_access (TYPE_MODE (rettype),
3880 MEM_ALIGN (target)))))
3881 structure_value_addr = XEXP (target, 0);
3882 else
3884 /* For variable-sized objects, we must be called with a target
3885 specified. If we were to allocate space on the stack here,
3886 we would have no way of knowing when to free it. */
3887 rtx d = assign_temp (rettype, 1, 1);
3888 structure_value_addr = XEXP (d, 0);
3889 target = 0;
3892 #endif /* not PCC_STATIC_STRUCT_RETURN */
3895 /* Figure out the amount to which the stack should be aligned. */
3896 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3897 if (fndecl)
3899 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
3900 /* Without automatic stack alignment, we can't increase preferred
3901 stack boundary. With automatic stack alignment, it is
3902 unnecessary since unless we can guarantee that all callers will
3903 align the outgoing stack properly, callee has to align its
3904 stack anyway. */
3905 if (i
3906 && i->preferred_incoming_stack_boundary
3907 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
3908 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
3911 /* Operand 0 is a pointer-to-function; get the type of the function. */
3912 funtype = TREE_TYPE (addr);
3913 gcc_assert (POINTER_TYPE_P (funtype));
3914 funtype = TREE_TYPE (funtype);
3916 /* Count whether there are actual complex arguments that need to be split
3917 into their real and imaginary parts. Munge the type_arg_types
3918 appropriately here as well. */
3919 if (targetm.calls.split_complex_arg)
3921 call_expr_arg_iterator iter;
3922 tree arg;
3923 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3925 tree type = TREE_TYPE (arg);
3926 if (type && TREE_CODE (type) == COMPLEX_TYPE
3927 && targetm.calls.split_complex_arg (type))
3928 num_complex_actuals++;
3930 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
3932 else
3933 type_arg_types = TYPE_ARG_TYPES (funtype);
3935 if (flags & ECF_MAY_BE_ALLOCA)
3936 cfun->calls_alloca = 1;
3938 /* If struct_value_rtx is 0, it means pass the address
3939 as if it were an extra parameter. Put the argument expression
3940 in structure_value_addr_value. */
3941 if (structure_value_addr && struct_value == 0)
3943 /* If structure_value_addr is a REG other than
3944 virtual_outgoing_args_rtx, we can use always use it. If it
3945 is not a REG, we must always copy it into a register.
3946 If it is virtual_outgoing_args_rtx, we must copy it to another
3947 register in some cases. */
3948 rtx temp = (!REG_P (structure_value_addr)
3949 || (ACCUMULATE_OUTGOING_ARGS
3950 && stack_arg_under_construction
3951 && structure_value_addr == virtual_outgoing_args_rtx)
3952 ? copy_addr_to_reg (convert_memory_address
3953 (Pmode, structure_value_addr))
3954 : structure_value_addr);
3956 structure_value_addr_value =
3957 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
3958 structure_value_addr_parm = 1;
3961 /* Count the arguments and set NUM_ACTUALS. */
3962 num_actuals =
3963 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
3965 /* Compute number of named args.
3966 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
3968 if (type_arg_types != 0)
3969 n_named_args
3970 = (list_length (type_arg_types)
3971 /* Count the struct value address, if it is passed as a parm. */
3972 + structure_value_addr_parm);
3973 else
3974 /* If we know nothing, treat all args as named. */
3975 n_named_args = num_actuals;
3977 /* Start updating where the next arg would go.
3979 On some machines (such as the PA) indirect calls have a different
3980 calling convention than normal calls. The fourth argument in
3981 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
3982 or not. */
3983 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
3984 args_so_far = pack_cumulative_args (&args_so_far_v);
3986 /* Now possibly adjust the number of named args.
3987 Normally, don't include the last named arg if anonymous args follow.
3988 We do include the last named arg if
3989 targetm.calls.strict_argument_naming() returns nonzero.
3990 (If no anonymous args follow, the result of list_length is actually
3991 one too large. This is harmless.)
3993 If targetm.calls.pretend_outgoing_varargs_named() returns
3994 nonzero, and targetm.calls.strict_argument_naming() returns zero,
3995 this machine will be able to place unnamed args that were passed
3996 in registers into the stack. So treat all args as named. This
3997 allows the insns emitting for a specific argument list to be
3998 independent of the function declaration.
4000 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
4001 we do not have any reliable way to pass unnamed args in
4002 registers, so we must force them into memory. */
4004 if (type_arg_types != 0
4005 && targetm.calls.strict_argument_naming (args_so_far))
4007 else if (type_arg_types != 0
4008 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
4009 /* Don't include the last named arg. */
4010 --n_named_args;
4011 else
4012 /* Treat all args as named. */
4013 n_named_args = num_actuals;
4015 /* Make a vector to hold all the information about each arg. */
4016 args = XCNEWVEC (struct arg_data, num_actuals);
4018 /* Build up entries in the ARGS array, compute the size of the
4019 arguments into ARGS_SIZE, etc. */
4020 initialize_argument_information (num_actuals, args, &args_size,
4021 n_named_args, exp,
4022 structure_value_addr_value, fndecl, fntype,
4023 args_so_far, reg_parm_stack_space,
4024 &old_stack_level, &old_pending_adj,
4025 &must_preallocate, &flags,
4026 &try_tail_call, CALL_FROM_THUNK_P (exp));
4028 if (args_size.var)
4029 must_preallocate = 1;
4031 /* Now make final decision about preallocating stack space. */
4032 must_preallocate = finalize_must_preallocate (must_preallocate,
4033 num_actuals, args,
4034 &args_size);
4036 /* If the structure value address will reference the stack pointer, we
4037 must stabilize it. We don't need to do this if we know that we are
4038 not going to adjust the stack pointer in processing this call. */
4040 if (structure_value_addr
4041 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
4042 || reg_mentioned_p (virtual_outgoing_args_rtx,
4043 structure_value_addr))
4044 && (args_size.var
4045 || (!ACCUMULATE_OUTGOING_ARGS
4046 && maybe_ne (args_size.constant, 0))))
4047 structure_value_addr = copy_to_reg (structure_value_addr);
4049 /* Tail calls can make things harder to debug, and we've traditionally
4050 pushed these optimizations into -O2. Don't try if we're already
4051 expanding a call, as that means we're an argument. Don't try if
4052 there's cleanups, as we know there's code to follow the call. */
4053 if (currently_expanding_call++ != 0
4054 || (!flag_optimize_sibling_calls && !CALL_FROM_THUNK_P (exp))
4055 || args_size.var
4056 || dbg_cnt (tail_call) == false)
4057 try_tail_call = 0;
4059 /* Workaround buggy C/C++ wrappers around Fortran routines with
4060 character(len=constant) arguments if the hidden string length arguments
4061 are passed on the stack; if the callers forget to pass those arguments,
4062 attempting to tail call in such routines leads to stack corruption.
4063 Avoid tail calls in functions where at least one such hidden string
4064 length argument is passed (partially or fully) on the stack in the
4065 caller and the callee needs to pass any arguments on the stack.
4066 See PR90329. */
4067 if (try_tail_call && maybe_ne (args_size.constant, 0))
4068 for (tree arg = DECL_ARGUMENTS (current_function_decl);
4069 arg; arg = DECL_CHAIN (arg))
4070 if (DECL_HIDDEN_STRING_LENGTH (arg) && DECL_INCOMING_RTL (arg))
4072 subrtx_iterator::array_type array;
4073 FOR_EACH_SUBRTX (iter, array, DECL_INCOMING_RTL (arg), NONCONST)
4074 if (MEM_P (*iter))
4076 try_tail_call = 0;
4077 break;
4081 /* If the user has marked the function as requiring tail-call
4082 optimization, attempt it. */
4083 if (must_tail_call)
4084 try_tail_call = 1;
4086 /* Rest of purposes for tail call optimizations to fail. */
4087 if (try_tail_call)
4088 try_tail_call = can_implement_as_sibling_call_p (exp,
4089 structure_value_addr,
4090 funtype,
4091 reg_parm_stack_space,
4092 fndecl,
4093 flags, addr, args_size);
4095 /* Check if caller and callee disagree in promotion of function
4096 return value. */
4097 if (try_tail_call)
4099 machine_mode caller_mode, caller_promoted_mode;
4100 machine_mode callee_mode, callee_promoted_mode;
4101 int caller_unsignedp, callee_unsignedp;
4102 tree caller_res = DECL_RESULT (current_function_decl);
4104 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
4105 caller_mode = DECL_MODE (caller_res);
4106 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
4107 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
4108 caller_promoted_mode
4109 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
4110 &caller_unsignedp,
4111 TREE_TYPE (current_function_decl), 1);
4112 callee_promoted_mode
4113 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
4114 &callee_unsignedp,
4115 funtype, 1);
4116 if (caller_mode != VOIDmode
4117 && (caller_promoted_mode != callee_promoted_mode
4118 || ((caller_mode != caller_promoted_mode
4119 || callee_mode != callee_promoted_mode)
4120 && (caller_unsignedp != callee_unsignedp
4121 || partial_subreg_p (caller_mode, callee_mode)))))
4123 try_tail_call = 0;
4124 maybe_complain_about_tail_call (exp,
4125 "caller and callee disagree in"
4126 " promotion of function"
4127 " return value");
4131 /* Ensure current function's preferred stack boundary is at least
4132 what we need. Stack alignment may also increase preferred stack
4133 boundary. */
4134 for (i = 0; i < num_actuals; i++)
4135 if (reg_parm_stack_space > 0
4136 || args[i].reg == 0
4137 || args[i].partial != 0
4138 || args[i].pass_on_stack)
4139 update_stack_alignment_for_call (&args[i].locate);
4140 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
4141 crtl->preferred_stack_boundary = preferred_stack_boundary;
4142 else
4143 preferred_stack_boundary = crtl->preferred_stack_boundary;
4145 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
4147 if (flag_callgraph_info)
4148 record_final_call (fndecl, EXPR_LOCATION (exp));
4150 /* We want to make two insn chains; one for a sibling call, the other
4151 for a normal call. We will select one of the two chains after
4152 initial RTL generation is complete. */
4153 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
4155 int sibcall_failure = 0;
4156 /* We want to emit any pending stack adjustments before the tail
4157 recursion "call". That way we know any adjustment after the tail
4158 recursion call can be ignored if we indeed use the tail
4159 call expansion. */
4160 saved_pending_stack_adjust save;
4161 rtx_insn *insns, *before_call, *after_args;
4162 rtx next_arg_reg;
4164 if (pass == 0)
4166 /* State variables we need to save and restore between
4167 iterations. */
4168 save_pending_stack_adjust (&save);
4170 if (pass)
4171 flags &= ~ECF_SIBCALL;
4172 else
4173 flags |= ECF_SIBCALL;
4175 /* Other state variables that we must reinitialize each time
4176 through the loop (that are not initialized by the loop itself). */
4177 argblock = 0;
4178 call_fusage = 0;
4180 /* Start a new sequence for the normal call case.
4182 From this point on, if the sibling call fails, we want to set
4183 sibcall_failure instead of continuing the loop. */
4184 start_sequence ();
4186 /* Don't let pending stack adjusts add up to too much.
4187 Also, do all pending adjustments now if there is any chance
4188 this might be a call to alloca or if we are expanding a sibling
4189 call sequence.
4190 Also do the adjustments before a throwing call, otherwise
4191 exception handling can fail; PR 19225. */
4192 if (maybe_ge (pending_stack_adjust, 32)
4193 || (maybe_ne (pending_stack_adjust, 0)
4194 && (flags & ECF_MAY_BE_ALLOCA))
4195 || (maybe_ne (pending_stack_adjust, 0)
4196 && flag_exceptions && !(flags & ECF_NOTHROW))
4197 || pass == 0)
4198 do_pending_stack_adjust ();
4200 /* Precompute any arguments as needed. */
4201 if (pass)
4202 precompute_arguments (num_actuals, args);
4204 /* Now we are about to start emitting insns that can be deleted
4205 if a libcall is deleted. */
4206 if (pass && (flags & ECF_MALLOC))
4207 start_sequence ();
4209 if (pass == 0
4210 && crtl->stack_protect_guard
4211 && targetm.stack_protect_runtime_enabled_p ())
4212 stack_protect_epilogue ();
4214 adjusted_args_size = args_size;
4215 /* Compute the actual size of the argument block required. The variable
4216 and constant sizes must be combined, the size may have to be rounded,
4217 and there may be a minimum required size. When generating a sibcall
4218 pattern, do not round up, since we'll be re-using whatever space our
4219 caller provided. */
4220 unadjusted_args_size
4221 = compute_argument_block_size (reg_parm_stack_space,
4222 &adjusted_args_size,
4223 fndecl, fntype,
4224 (pass == 0 ? 0
4225 : preferred_stack_boundary));
4227 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
4229 /* The argument block when performing a sibling call is the
4230 incoming argument block. */
4231 if (pass == 0)
4233 argblock = crtl->args.internal_arg_pointer;
4234 if (STACK_GROWS_DOWNWARD)
4235 argblock
4236 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
4237 else
4238 argblock
4239 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
4241 HOST_WIDE_INT map_size = constant_lower_bound (args_size.constant);
4242 stored_args_map = sbitmap_alloc (map_size);
4243 bitmap_clear (stored_args_map);
4244 stored_args_watermark = HOST_WIDE_INT_M1U;
4247 /* If we have no actual push instructions, or shouldn't use them,
4248 make space for all args right now. */
4249 else if (adjusted_args_size.var != 0)
4251 if (old_stack_level == 0)
4253 emit_stack_save (SAVE_BLOCK, &old_stack_level);
4254 old_stack_pointer_delta = stack_pointer_delta;
4255 old_pending_adj = pending_stack_adjust;
4256 pending_stack_adjust = 0;
4257 /* stack_arg_under_construction says whether a stack arg is
4258 being constructed at the old stack level. Pushing the stack
4259 gets a clean outgoing argument block. */
4260 old_stack_arg_under_construction = stack_arg_under_construction;
4261 stack_arg_under_construction = 0;
4263 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
4264 if (flag_stack_usage_info)
4265 current_function_has_unbounded_dynamic_stack_size = 1;
4267 else
4269 /* Note that we must go through the motions of allocating an argument
4270 block even if the size is zero because we may be storing args
4271 in the area reserved for register arguments, which may be part of
4272 the stack frame. */
4274 poly_int64 needed = adjusted_args_size.constant;
4276 /* Store the maximum argument space used. It will be pushed by
4277 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
4278 checking). */
4280 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
4281 needed);
4283 if (must_preallocate)
4285 if (ACCUMULATE_OUTGOING_ARGS)
4287 /* Since the stack pointer will never be pushed, it is
4288 possible for the evaluation of a parm to clobber
4289 something we have already written to the stack.
4290 Since most function calls on RISC machines do not use
4291 the stack, this is uncommon, but must work correctly.
4293 Therefore, we save any area of the stack that was already
4294 written and that we are using. Here we set up to do this
4295 by making a new stack usage map from the old one. The
4296 actual save will be done by store_one_arg.
4298 Another approach might be to try to reorder the argument
4299 evaluations to avoid this conflicting stack usage. */
4301 /* Since we will be writing into the entire argument area,
4302 the map must be allocated for its entire size, not just
4303 the part that is the responsibility of the caller. */
4304 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4305 needed += reg_parm_stack_space;
4307 poly_int64 limit = needed;
4308 if (ARGS_GROW_DOWNWARD)
4309 limit += 1;
4311 /* For polynomial sizes, this is the maximum possible
4312 size needed for arguments with a constant size
4313 and offset. */
4314 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
4315 highest_outgoing_arg_in_use
4316 = MAX (initial_highest_arg_in_use, const_limit);
4318 free (stack_usage_map_buf);
4319 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
4320 stack_usage_map = stack_usage_map_buf;
4322 if (initial_highest_arg_in_use)
4323 memcpy (stack_usage_map, initial_stack_usage_map,
4324 initial_highest_arg_in_use);
4326 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
4327 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
4328 (highest_outgoing_arg_in_use
4329 - initial_highest_arg_in_use));
4330 needed = 0;
4332 /* The address of the outgoing argument list must not be
4333 copied to a register here, because argblock would be left
4334 pointing to the wrong place after the call to
4335 allocate_dynamic_stack_space below. */
4337 argblock = virtual_outgoing_args_rtx;
4339 else
4341 /* Try to reuse some or all of the pending_stack_adjust
4342 to get this space. */
4343 if (inhibit_defer_pop == 0
4344 && (combine_pending_stack_adjustment_and_call
4345 (&needed,
4346 unadjusted_args_size,
4347 &adjusted_args_size,
4348 preferred_unit_stack_boundary)))
4350 /* combine_pending_stack_adjustment_and_call computes
4351 an adjustment before the arguments are allocated.
4352 Account for them and see whether or not the stack
4353 needs to go up or down. */
4354 needed = unadjusted_args_size - needed;
4356 /* Checked by
4357 combine_pending_stack_adjustment_and_call. */
4358 gcc_checking_assert (ordered_p (needed, 0));
4359 if (maybe_lt (needed, 0))
4361 /* We're releasing stack space. */
4362 /* ??? We can avoid any adjustment at all if we're
4363 already aligned. FIXME. */
4364 pending_stack_adjust = -needed;
4365 do_pending_stack_adjust ();
4366 needed = 0;
4368 else
4369 /* We need to allocate space. We'll do that in
4370 push_block below. */
4371 pending_stack_adjust = 0;
4374 /* Special case this because overhead of `push_block' in
4375 this case is non-trivial. */
4376 if (known_eq (needed, 0))
4377 argblock = virtual_outgoing_args_rtx;
4378 else
4380 rtx needed_rtx = gen_int_mode (needed, Pmode);
4381 argblock = push_block (needed_rtx, 0, 0);
4382 if (ARGS_GROW_DOWNWARD)
4383 argblock = plus_constant (Pmode, argblock, needed);
4386 /* We only really need to call `copy_to_reg' in the case
4387 where push insns are going to be used to pass ARGBLOCK
4388 to a function call in ARGS. In that case, the stack
4389 pointer changes value from the allocation point to the
4390 call point, and hence the value of
4391 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
4392 as well always do it. */
4393 argblock = copy_to_reg (argblock);
4398 if (ACCUMULATE_OUTGOING_ARGS)
4400 /* The save/restore code in store_one_arg handles all
4401 cases except one: a constructor call (including a C
4402 function returning a BLKmode struct) to initialize
4403 an argument. */
4404 if (stack_arg_under_construction)
4406 rtx push_size
4407 = (gen_int_mode
4408 (adjusted_args_size.constant
4409 + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype
4410 : TREE_TYPE (fndecl))
4411 ? 0 : reg_parm_stack_space), Pmode));
4412 if (old_stack_level == 0)
4414 emit_stack_save (SAVE_BLOCK, &old_stack_level);
4415 old_stack_pointer_delta = stack_pointer_delta;
4416 old_pending_adj = pending_stack_adjust;
4417 pending_stack_adjust = 0;
4418 /* stack_arg_under_construction says whether a stack
4419 arg is being constructed at the old stack level.
4420 Pushing the stack gets a clean outgoing argument
4421 block. */
4422 old_stack_arg_under_construction
4423 = stack_arg_under_construction;
4424 stack_arg_under_construction = 0;
4425 /* Make a new map for the new argument list. */
4426 free (stack_usage_map_buf);
4427 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
4428 stack_usage_map = stack_usage_map_buf;
4429 highest_outgoing_arg_in_use = 0;
4430 stack_usage_watermark = HOST_WIDE_INT_M1U;
4432 /* We can pass TRUE as the 4th argument because we just
4433 saved the stack pointer and will restore it right after
4434 the call. */
4435 allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
4436 -1, true);
4439 /* If argument evaluation might modify the stack pointer,
4440 copy the address of the argument list to a register. */
4441 for (i = 0; i < num_actuals; i++)
4442 if (args[i].pass_on_stack)
4444 argblock = copy_addr_to_reg (argblock);
4445 break;
4449 compute_argument_addresses (args, argblock, num_actuals);
4451 /* Stack is properly aligned, pops can't safely be deferred during
4452 the evaluation of the arguments. */
4453 NO_DEFER_POP;
4455 /* Precompute all register parameters. It isn't safe to compute
4456 anything once we have started filling any specific hard regs.
4457 TLS symbols sometimes need a call to resolve. Precompute
4458 register parameters before any stack pointer manipulation
4459 to avoid unaligned stack in the called function. */
4460 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
4462 OK_DEFER_POP;
4464 /* Perform stack alignment before the first push (the last arg). */
4465 if (argblock == 0
4466 && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space)
4467 && maybe_ne (adjusted_args_size.constant, unadjusted_args_size))
4469 /* When the stack adjustment is pending, we get better code
4470 by combining the adjustments. */
4471 if (maybe_ne (pending_stack_adjust, 0)
4472 && ! inhibit_defer_pop
4473 && (combine_pending_stack_adjustment_and_call
4474 (&pending_stack_adjust,
4475 unadjusted_args_size,
4476 &adjusted_args_size,
4477 preferred_unit_stack_boundary)))
4478 do_pending_stack_adjust ();
4479 else if (argblock == 0)
4480 anti_adjust_stack (gen_int_mode (adjusted_args_size.constant
4481 - unadjusted_args_size,
4482 Pmode));
4484 /* Now that the stack is properly aligned, pops can't safely
4485 be deferred during the evaluation of the arguments. */
4486 NO_DEFER_POP;
4488 /* Record the maximum pushed stack space size. We need to delay
4489 doing it this far to take into account the optimization done
4490 by combine_pending_stack_adjustment_and_call. */
4491 if (flag_stack_usage_info
4492 && !ACCUMULATE_OUTGOING_ARGS
4493 && pass
4494 && adjusted_args_size.var == 0)
4496 poly_int64 pushed = (adjusted_args_size.constant
4497 + pending_stack_adjust);
4498 current_function_pushed_stack_size
4499 = upper_bound (current_function_pushed_stack_size, pushed);
4502 funexp = rtx_for_function_call (fndecl, addr);
4504 if (CALL_EXPR_STATIC_CHAIN (exp))
4505 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
4506 else
4507 static_chain_value = 0;
4509 #ifdef REG_PARM_STACK_SPACE
4510 /* Save the fixed argument area if it's part of the caller's frame and
4511 is clobbered by argument setup for this call. */
4512 if (ACCUMULATE_OUTGOING_ARGS && pass)
4513 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4514 &low_to_save, &high_to_save);
4515 #endif
4517 /* Now store (and compute if necessary) all non-register parms.
4518 These come before register parms, since they can require block-moves,
4519 which could clobber the registers used for register parms.
4520 Parms which have partial registers are not stored here,
4521 but we do preallocate space here if they want that. */
4523 for (i = 0; i < num_actuals; i++)
4525 if (args[i].reg == 0 || args[i].pass_on_stack)
4527 rtx_insn *before_arg = get_last_insn ();
4529 /* We don't allow passing huge (> 2^30 B) arguments
4530 by value. It would cause an overflow later on. */
4531 if (constant_lower_bound (adjusted_args_size.constant)
4532 >= (1 << (HOST_BITS_PER_INT - 2)))
4534 sorry ("passing too large argument on stack");
4535 continue;
4538 if (store_one_arg (&args[i], argblock, flags,
4539 adjusted_args_size.var != 0,
4540 reg_parm_stack_space)
4541 || (pass == 0
4542 && check_sibcall_argument_overlap (before_arg,
4543 &args[i], 1)))
4544 sibcall_failure = 1;
4547 if (args[i].stack)
4548 call_fusage
4549 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
4550 gen_rtx_USE (VOIDmode, args[i].stack),
4551 call_fusage);
4554 /* If we have a parm that is passed in registers but not in memory
4555 and whose alignment does not permit a direct copy into registers,
4556 make a group of pseudos that correspond to each register that we
4557 will later fill. */
4558 if (STRICT_ALIGNMENT)
4559 store_unaligned_arguments_into_pseudos (args, num_actuals);
4561 /* Now store any partially-in-registers parm.
4562 This is the last place a block-move can happen. */
4563 if (reg_parm_seen)
4564 for (i = 0; i < num_actuals; i++)
4565 if (args[i].partial != 0 && ! args[i].pass_on_stack)
4567 rtx_insn *before_arg = get_last_insn ();
4569 /* On targets with weird calling conventions (e.g. PA) it's
4570 hard to ensure that all cases of argument overlap between
4571 stack and registers work. Play it safe and bail out. */
4572 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
4574 sibcall_failure = 1;
4575 break;
4578 if (store_one_arg (&args[i], argblock, flags,
4579 adjusted_args_size.var != 0,
4580 reg_parm_stack_space)
4581 || (pass == 0
4582 && check_sibcall_argument_overlap (before_arg,
4583 &args[i], 1)))
4584 sibcall_failure = 1;
4587 bool any_regs = false;
4588 for (i = 0; i < num_actuals; i++)
4589 if (args[i].reg != NULL_RTX)
4591 any_regs = true;
4592 targetm.calls.call_args (args[i].reg, funtype);
4594 if (!any_regs)
4595 targetm.calls.call_args (pc_rtx, funtype);
4597 /* Figure out the register where the value, if any, will come back. */
4598 valreg = 0;
4599 if (TYPE_MODE (rettype) != VOIDmode
4600 && ! structure_value_addr)
4602 if (pcc_struct_value)
4603 valreg = hard_function_value (build_pointer_type (rettype),
4604 fndecl, NULL, (pass == 0));
4605 else
4606 valreg = hard_function_value (rettype, fndecl, fntype,
4607 (pass == 0));
4609 /* If VALREG is a PARALLEL whose first member has a zero
4610 offset, use that. This is for targets such as m68k that
4611 return the same value in multiple places. */
4612 if (GET_CODE (valreg) == PARALLEL)
4614 rtx elem = XVECEXP (valreg, 0, 0);
4615 rtx where = XEXP (elem, 0);
4616 rtx offset = XEXP (elem, 1);
4617 if (offset == const0_rtx
4618 && GET_MODE (where) == GET_MODE (valreg))
4619 valreg = where;
4623 /* If register arguments require space on the stack and stack space
4624 was not preallocated, allocate stack space here for arguments
4625 passed in registers. */
4626 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
4627 && !ACCUMULATE_OUTGOING_ARGS
4628 && must_preallocate == 0 && reg_parm_stack_space > 0)
4629 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
4631 /* Pass the function the address in which to return a
4632 structure value. */
4633 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
4635 structure_value_addr
4636 = convert_memory_address (Pmode, structure_value_addr);
4637 emit_move_insn (struct_value,
4638 force_reg (Pmode,
4639 force_operand (structure_value_addr,
4640 NULL_RTX)));
4642 if (REG_P (struct_value))
4643 use_reg (&call_fusage, struct_value);
4646 after_args = get_last_insn ();
4647 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
4648 static_chain_value, &call_fusage,
4649 reg_parm_seen, flags);
4651 load_register_parameters (args, num_actuals, &call_fusage, flags,
4652 pass == 0, &sibcall_failure);
4654 /* Save a pointer to the last insn before the call, so that we can
4655 later safely search backwards to find the CALL_INSN. */
4656 before_call = get_last_insn ();
4658 /* Set up next argument register. For sibling calls on machines
4659 with register windows this should be the incoming register. */
4660 if (pass == 0)
4661 next_arg_reg = targetm.calls.function_incoming_arg
4662 (args_so_far, function_arg_info::end_marker ());
4663 else
4664 next_arg_reg = targetm.calls.function_arg
4665 (args_so_far, function_arg_info::end_marker ());
4667 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
4669 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
4670 arg_nr = num_actuals - arg_nr - 1;
4671 if (arg_nr >= 0
4672 && arg_nr < num_actuals
4673 && args[arg_nr].reg
4674 && valreg
4675 && REG_P (valreg)
4676 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
4677 call_fusage
4678 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
4679 gen_rtx_SET (valreg, args[arg_nr].reg),
4680 call_fusage);
4682 /* All arguments and registers used for the call must be set up by
4683 now! */
4685 /* Stack must be properly aligned now. */
4686 gcc_assert (!pass
4687 || multiple_p (stack_pointer_delta,
4688 preferred_unit_stack_boundary));
4690 /* Generate the actual call instruction. */
4691 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
4692 adjusted_args_size.constant, struct_value_size,
4693 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
4694 flags, args_so_far);
4696 if (flag_ipa_ra)
4698 rtx_call_insn *last;
4699 rtx datum = NULL_RTX;
4700 if (fndecl != NULL_TREE)
4702 datum = XEXP (DECL_RTL (fndecl), 0);
4703 gcc_assert (datum != NULL_RTX
4704 && GET_CODE (datum) == SYMBOL_REF);
4706 last = last_call_insn ();
4707 add_reg_note (last, REG_CALL_DECL, datum);
4710 /* If the call setup or the call itself overlaps with anything
4711 of the argument setup we probably clobbered our call address.
4712 In that case we can't do sibcalls. */
4713 if (pass == 0
4714 && check_sibcall_argument_overlap (after_args, 0, 0))
4715 sibcall_failure = 1;
4717 /* If a non-BLKmode value is returned at the most significant end
4718 of a register, shift the register right by the appropriate amount
4719 and update VALREG accordingly. BLKmode values are handled by the
4720 group load/store machinery below. */
4721 if (!structure_value_addr
4722 && !pcc_struct_value
4723 && TYPE_MODE (rettype) != VOIDmode
4724 && TYPE_MODE (rettype) != BLKmode
4725 && REG_P (valreg)
4726 && targetm.calls.return_in_msb (rettype))
4728 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
4729 sibcall_failure = 1;
4730 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
4733 if (pass && (flags & ECF_MALLOC))
4735 rtx temp = gen_reg_rtx (GET_MODE (valreg));
4736 rtx_insn *last, *insns;
4738 /* The return value from a malloc-like function is a pointer. */
4739 if (TREE_CODE (rettype) == POINTER_TYPE)
4740 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
4742 emit_move_insn (temp, valreg);
4744 /* The return value from a malloc-like function cannot alias
4745 anything else. */
4746 last = get_last_insn ();
4747 add_reg_note (last, REG_NOALIAS, temp);
4749 /* Write out the sequence. */
4750 insns = get_insns ();
4751 end_sequence ();
4752 emit_insn (insns);
4753 valreg = temp;
4756 /* For calls to `setjmp', etc., inform
4757 function.c:setjmp_warnings that it should complain if
4758 nonvolatile values are live. For functions that cannot
4759 return, inform flow that control does not fall through. */
4761 if ((flags & ECF_NORETURN) || pass == 0)
4763 /* The barrier must be emitted
4764 immediately after the CALL_INSN. Some ports emit more
4765 than just a CALL_INSN above, so we must search for it here. */
4767 rtx_insn *last = get_last_insn ();
4768 while (!CALL_P (last))
4770 last = PREV_INSN (last);
4771 /* There was no CALL_INSN? */
4772 gcc_assert (last != before_call);
4775 emit_barrier_after (last);
4777 /* Stack adjustments after a noreturn call are dead code.
4778 However when NO_DEFER_POP is in effect, we must preserve
4779 stack_pointer_delta. */
4780 if (inhibit_defer_pop == 0)
4782 stack_pointer_delta = old_stack_allocated;
4783 pending_stack_adjust = 0;
4787 /* If value type not void, return an rtx for the value. */
4789 if (TYPE_MODE (rettype) == VOIDmode
4790 || ignore)
4791 target = const0_rtx;
4792 else if (structure_value_addr)
4794 if (target == 0 || !MEM_P (target))
4796 target
4797 = gen_rtx_MEM (TYPE_MODE (rettype),
4798 memory_address (TYPE_MODE (rettype),
4799 structure_value_addr));
4800 set_mem_attributes (target, rettype, 1);
4803 else if (pcc_struct_value)
4805 /* This is the special C++ case where we need to
4806 know what the true target was. We take care to
4807 never use this value more than once in one expression. */
4808 target = gen_rtx_MEM (TYPE_MODE (rettype),
4809 copy_to_reg (valreg));
4810 set_mem_attributes (target, rettype, 1);
4812 /* Handle calls that return values in multiple non-contiguous locations.
4813 The Irix 6 ABI has examples of this. */
4814 else if (GET_CODE (valreg) == PARALLEL)
4816 if (target == 0)
4817 target = emit_group_move_into_temps (valreg);
4818 else if (rtx_equal_p (target, valreg))
4820 else if (GET_CODE (target) == PARALLEL)
4821 /* Handle the result of a emit_group_move_into_temps
4822 call in the previous pass. */
4823 emit_group_move (target, valreg);
4824 else
4825 emit_group_store (target, valreg, rettype,
4826 int_size_in_bytes (rettype));
4828 else if (target
4829 && GET_MODE (target) == TYPE_MODE (rettype)
4830 && GET_MODE (target) == GET_MODE (valreg))
4832 bool may_overlap = false;
4834 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
4835 reg to a plain register. */
4836 if (!REG_P (target) || HARD_REGISTER_P (target))
4837 valreg = avoid_likely_spilled_reg (valreg);
4839 /* If TARGET is a MEM in the argument area, and we have
4840 saved part of the argument area, then we can't store
4841 directly into TARGET as it may get overwritten when we
4842 restore the argument save area below. Don't work too
4843 hard though and simply force TARGET to a register if it
4844 is a MEM; the optimizer is quite likely to sort it out. */
4845 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
4846 for (i = 0; i < num_actuals; i++)
4847 if (args[i].save_area)
4849 may_overlap = true;
4850 break;
4853 if (may_overlap)
4854 target = copy_to_reg (valreg);
4855 else
4857 /* TARGET and VALREG cannot be equal at this point
4858 because the latter would not have
4859 REG_FUNCTION_VALUE_P true, while the former would if
4860 it were referring to the same register.
4862 If they refer to the same register, this move will be
4863 a no-op, except when function inlining is being
4864 done. */
4865 emit_move_insn (target, valreg);
4867 /* If we are setting a MEM, this code must be executed.
4868 Since it is emitted after the call insn, sibcall
4869 optimization cannot be performed in that case. */
4870 if (MEM_P (target))
4871 sibcall_failure = 1;
4874 else
4875 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
4877 /* If we promoted this return value, make the proper SUBREG.
4878 TARGET might be const0_rtx here, so be careful. */
4879 if (REG_P (target)
4880 && TYPE_MODE (rettype) != BLKmode
4881 && GET_MODE (target) != TYPE_MODE (rettype))
4883 tree type = rettype;
4884 int unsignedp = TYPE_UNSIGNED (type);
4885 machine_mode pmode;
4887 /* Ensure we promote as expected, and get the new unsignedness. */
4888 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
4889 funtype, 1);
4890 gcc_assert (GET_MODE (target) == pmode);
4892 poly_uint64 offset = subreg_lowpart_offset (TYPE_MODE (type),
4893 GET_MODE (target));
4894 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
4895 SUBREG_PROMOTED_VAR_P (target) = 1;
4896 SUBREG_PROMOTED_SET (target, unsignedp);
4899 /* If size of args is variable or this was a constructor call for a stack
4900 argument, restore saved stack-pointer value. */
4902 if (old_stack_level)
4904 rtx_insn *prev = get_last_insn ();
4906 emit_stack_restore (SAVE_BLOCK, old_stack_level);
4907 stack_pointer_delta = old_stack_pointer_delta;
4909 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
4911 pending_stack_adjust = old_pending_adj;
4912 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
4913 stack_arg_under_construction = old_stack_arg_under_construction;
4914 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4915 stack_usage_map = initial_stack_usage_map;
4916 stack_usage_watermark = initial_stack_usage_watermark;
4917 sibcall_failure = 1;
4919 else if (ACCUMULATE_OUTGOING_ARGS && pass)
4921 #ifdef REG_PARM_STACK_SPACE
4922 if (save_area)
4923 restore_fixed_argument_area (save_area, argblock,
4924 high_to_save, low_to_save);
4925 #endif
4927 /* If we saved any argument areas, restore them. */
4928 for (i = 0; i < num_actuals; i++)
4929 if (args[i].save_area)
4931 machine_mode save_mode = GET_MODE (args[i].save_area);
4932 rtx stack_area
4933 = gen_rtx_MEM (save_mode,
4934 memory_address (save_mode,
4935 XEXP (args[i].stack_slot, 0)));
4937 if (save_mode != BLKmode)
4938 emit_move_insn (stack_area, args[i].save_area);
4939 else
4940 emit_block_move (stack_area, args[i].save_area,
4941 (gen_int_mode
4942 (args[i].locate.size.constant, Pmode)),
4943 BLOCK_OP_CALL_PARM);
4946 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4947 stack_usage_map = initial_stack_usage_map;
4948 stack_usage_watermark = initial_stack_usage_watermark;
4951 /* If this was alloca, record the new stack level. */
4952 if (flags & ECF_MAY_BE_ALLOCA)
4953 record_new_stack_level ();
4955 /* Free up storage we no longer need. */
4956 for (i = 0; i < num_actuals; ++i)
4957 free (args[i].aligned_regs);
4959 targetm.calls.end_call_args ();
4961 insns = get_insns ();
4962 end_sequence ();
4964 if (pass == 0)
4966 tail_call_insns = insns;
4968 /* Restore the pending stack adjustment now that we have
4969 finished generating the sibling call sequence. */
4971 restore_pending_stack_adjust (&save);
4973 /* Prepare arg structure for next iteration. */
4974 for (i = 0; i < num_actuals; i++)
4976 args[i].value = 0;
4977 args[i].aligned_regs = 0;
4978 args[i].stack = 0;
4981 sbitmap_free (stored_args_map);
4982 internal_arg_pointer_exp_state.scan_start = NULL;
4983 internal_arg_pointer_exp_state.cache.release ();
4985 else
4987 normal_call_insns = insns;
4989 /* Verify that we've deallocated all the stack we used. */
4990 gcc_assert ((flags & ECF_NORETURN)
4991 || known_eq (old_stack_allocated,
4992 stack_pointer_delta
4993 - pending_stack_adjust));
4996 /* If something prevents making this a sibling call,
4997 zero out the sequence. */
4998 if (sibcall_failure)
4999 tail_call_insns = NULL;
5000 else
5001 break;
5004 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
5005 arguments too, as argument area is now clobbered by the call. */
5006 if (tail_call_insns)
5008 emit_insn (tail_call_insns);
5009 crtl->tail_call_emit = true;
5011 else
5013 emit_insn (normal_call_insns);
5014 if (try_tail_call)
5015 /* Ideally we'd emit a message for all of the ways that it could
5016 have failed. */
5017 maybe_complain_about_tail_call (exp, "tail call production failed");
5020 currently_expanding_call--;
5022 free (stack_usage_map_buf);
5023 free (args);
5024 return target;
5027 /* A sibling call sequence invalidates any REG_EQUIV notes made for
5028 this function's incoming arguments.
5030 At the start of RTL generation we know the only REG_EQUIV notes
5031 in the rtl chain are those for incoming arguments, so we can look
5032 for REG_EQUIV notes between the start of the function and the
5033 NOTE_INSN_FUNCTION_BEG.
5035 This is (slight) overkill. We could keep track of the highest
5036 argument we clobber and be more selective in removing notes, but it
5037 does not seem to be worth the effort. */
5039 void
5040 fixup_tail_calls (void)
5042 rtx_insn *insn;
5044 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5046 rtx note;
5048 /* There are never REG_EQUIV notes for the incoming arguments
5049 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
5050 if (NOTE_P (insn)
5051 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
5052 break;
5054 note = find_reg_note (insn, REG_EQUIV, 0);
5055 if (note)
5056 remove_note (insn, note);
5057 note = find_reg_note (insn, REG_EQUIV, 0);
5058 gcc_assert (!note);
5062 /* Traverse a list of TYPES and expand all complex types into their
5063 components. */
5064 static tree
5065 split_complex_types (tree types)
5067 tree p;
5069 /* Before allocating memory, check for the common case of no complex. */
5070 for (p = types; p; p = TREE_CHAIN (p))
5072 tree type = TREE_VALUE (p);
5073 if (TREE_CODE (type) == COMPLEX_TYPE
5074 && targetm.calls.split_complex_arg (type))
5075 goto found;
5077 return types;
5079 found:
5080 types = copy_list (types);
5082 for (p = types; p; p = TREE_CHAIN (p))
5084 tree complex_type = TREE_VALUE (p);
5086 if (TREE_CODE (complex_type) == COMPLEX_TYPE
5087 && targetm.calls.split_complex_arg (complex_type))
5089 tree next, imag;
5091 /* Rewrite complex type with component type. */
5092 TREE_VALUE (p) = TREE_TYPE (complex_type);
5093 next = TREE_CHAIN (p);
5095 /* Add another component type for the imaginary part. */
5096 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
5097 TREE_CHAIN (p) = imag;
5098 TREE_CHAIN (imag) = next;
5100 /* Skip the newly created node. */
5101 p = TREE_CHAIN (p);
5105 return types;
5108 /* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
5109 for a value of mode OUTMODE,
5110 with NARGS different arguments, passed as ARGS.
5111 Store the return value if RETVAL is nonzero: store it in VALUE if
5112 VALUE is nonnull, otherwise pick a convenient location. In either
5113 case return the location of the stored value.
5115 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
5116 `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
5117 other types of library calls. */
5120 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
5121 enum libcall_type fn_type,
5122 machine_mode outmode, int nargs, rtx_mode_t *args)
5124 /* Total size in bytes of all the stack-parms scanned so far. */
5125 struct args_size args_size;
5126 /* Size of arguments before any adjustments (such as rounding). */
5127 struct args_size original_args_size;
5128 int argnum;
5129 rtx fun;
5130 /* Todo, choose the correct decl type of orgfun. Sadly this information
5131 isn't present here, so we default to native calling abi here. */
5132 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
5133 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
5134 int count;
5135 rtx argblock = 0;
5136 CUMULATIVE_ARGS args_so_far_v;
5137 cumulative_args_t args_so_far;
5138 struct arg
5140 rtx value;
5141 machine_mode mode;
5142 rtx reg;
5143 int partial;
5144 struct locate_and_pad_arg_data locate;
5145 rtx save_area;
5147 struct arg *argvec;
5148 int old_inhibit_defer_pop = inhibit_defer_pop;
5149 rtx call_fusage = 0;
5150 rtx mem_value = 0;
5151 rtx valreg;
5152 int pcc_struct_value = 0;
5153 poly_int64 struct_value_size = 0;
5154 int flags;
5155 int reg_parm_stack_space = 0;
5156 poly_int64 needed;
5157 rtx_insn *before_call;
5158 bool have_push_fusage;
5159 tree tfom; /* type_for_mode (outmode, 0) */
5161 #ifdef REG_PARM_STACK_SPACE
5162 /* Define the boundary of the register parm stack space that needs to be
5163 save, if any. */
5164 int low_to_save = 0, high_to_save = 0;
5165 rtx save_area = 0; /* Place that it is saved. */
5166 #endif
5168 /* Size of the stack reserved for parameter registers. */
5169 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
5170 char *initial_stack_usage_map = stack_usage_map;
5171 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
5172 char *stack_usage_map_buf = NULL;
5174 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
5176 #ifdef REG_PARM_STACK_SPACE
5177 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
5178 #endif
5180 /* By default, library functions cannot throw. */
5181 flags = ECF_NOTHROW;
5183 switch (fn_type)
5185 case LCT_NORMAL:
5186 break;
5187 case LCT_CONST:
5188 flags |= ECF_CONST;
5189 break;
5190 case LCT_PURE:
5191 flags |= ECF_PURE;
5192 break;
5193 case LCT_NORETURN:
5194 flags |= ECF_NORETURN;
5195 break;
5196 case LCT_THROW:
5197 flags &= ~ECF_NOTHROW;
5198 break;
5199 case LCT_RETURNS_TWICE:
5200 flags = ECF_RETURNS_TWICE;
5201 break;
5203 fun = orgfun;
5205 /* Ensure current function's preferred stack boundary is at least
5206 what we need. */
5207 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
5208 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5210 /* If this kind of value comes back in memory,
5211 decide where in memory it should come back. */
5212 if (outmode != VOIDmode)
5214 tfom = lang_hooks.types.type_for_mode (outmode, 0);
5215 if (aggregate_value_p (tfom, 0))
5217 #ifdef PCC_STATIC_STRUCT_RETURN
5218 rtx pointer_reg
5219 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
5220 mem_value = gen_rtx_MEM (outmode, pointer_reg);
5221 pcc_struct_value = 1;
5222 if (value == 0)
5223 value = gen_reg_rtx (outmode);
5224 #else /* not PCC_STATIC_STRUCT_RETURN */
5225 struct_value_size = GET_MODE_SIZE (outmode);
5226 if (value != 0 && MEM_P (value))
5227 mem_value = value;
5228 else
5229 mem_value = assign_temp (tfom, 1, 1);
5230 #endif
5231 /* This call returns a big structure. */
5232 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
5235 else
5236 tfom = void_type_node;
5238 /* ??? Unfinished: must pass the memory address as an argument. */
5240 /* Copy all the libcall-arguments out of the varargs data
5241 and into a vector ARGVEC.
5243 Compute how to pass each argument. We only support a very small subset
5244 of the full argument passing conventions to limit complexity here since
5245 library functions shouldn't have many args. */
5247 argvec = XALLOCAVEC (struct arg, nargs + 1);
5248 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
5250 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
5251 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
5252 #else
5253 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
5254 #endif
5255 args_so_far = pack_cumulative_args (&args_so_far_v);
5257 args_size.constant = 0;
5258 args_size.var = 0;
5260 count = 0;
5262 push_temp_slots ();
5264 /* If there's a structure value address to be passed,
5265 either pass it in the special place, or pass it as an extra argument. */
5266 if (mem_value && struct_value == 0 && ! pcc_struct_value)
5268 rtx addr = XEXP (mem_value, 0);
5270 nargs++;
5272 /* Make sure it is a reasonable operand for a move or push insn. */
5273 if (!REG_P (addr) && !MEM_P (addr)
5274 && !(CONSTANT_P (addr)
5275 && targetm.legitimate_constant_p (Pmode, addr)))
5276 addr = force_operand (addr, NULL_RTX);
5278 argvec[count].value = addr;
5279 argvec[count].mode = Pmode;
5280 argvec[count].partial = 0;
5282 function_arg_info ptr_arg (Pmode, /*named=*/true);
5283 argvec[count].reg = targetm.calls.function_arg (args_so_far, ptr_arg);
5284 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, ptr_arg) == 0);
5286 locate_and_pad_parm (Pmode, NULL_TREE,
5287 #ifdef STACK_PARMS_IN_REG_PARM_AREA
5289 #else
5290 argvec[count].reg != 0,
5291 #endif
5292 reg_parm_stack_space, 0,
5293 NULL_TREE, &args_size, &argvec[count].locate);
5295 if (argvec[count].reg == 0 || argvec[count].partial != 0
5296 || reg_parm_stack_space > 0)
5297 args_size.constant += argvec[count].locate.size.constant;
5299 targetm.calls.function_arg_advance (args_so_far, ptr_arg);
5301 count++;
5304 for (unsigned int i = 0; count < nargs; i++, count++)
5306 rtx val = args[i].first;
5307 function_arg_info arg (args[i].second, /*named=*/true);
5308 int unsigned_p = 0;
5310 /* We cannot convert the arg value to the mode the library wants here;
5311 must do it earlier where we know the signedness of the arg. */
5312 gcc_assert (arg.mode != BLKmode
5313 && (GET_MODE (val) == arg.mode
5314 || GET_MODE (val) == VOIDmode));
5316 /* Make sure it is a reasonable operand for a move or push insn. */
5317 if (!REG_P (val) && !MEM_P (val)
5318 && !(CONSTANT_P (val)
5319 && targetm.legitimate_constant_p (arg.mode, val)))
5320 val = force_operand (val, NULL_RTX);
5322 if (pass_by_reference (&args_so_far_v, arg))
5324 rtx slot;
5325 int must_copy = !reference_callee_copied (&args_so_far_v, arg);
5327 /* If this was a CONST function, it is now PURE since it now
5328 reads memory. */
5329 if (flags & ECF_CONST)
5331 flags &= ~ECF_CONST;
5332 flags |= ECF_PURE;
5335 if (MEM_P (val) && !must_copy)
5337 tree val_expr = MEM_EXPR (val);
5338 if (val_expr)
5339 mark_addressable (val_expr);
5340 slot = val;
5342 else
5344 slot = assign_temp (lang_hooks.types.type_for_mode (arg.mode, 0),
5345 1, 1);
5346 emit_move_insn (slot, val);
5349 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
5350 gen_rtx_USE (VOIDmode, slot),
5351 call_fusage);
5352 if (must_copy)
5353 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
5354 gen_rtx_CLOBBER (VOIDmode,
5355 slot),
5356 call_fusage);
5358 arg.mode = Pmode;
5359 arg.pass_by_reference = true;
5360 val = force_operand (XEXP (slot, 0), NULL_RTX);
5363 arg.mode = promote_function_mode (NULL_TREE, arg.mode, &unsigned_p,
5364 NULL_TREE, 0);
5365 argvec[count].mode = arg.mode;
5366 argvec[count].value = convert_modes (arg.mode, GET_MODE (val), val,
5367 unsigned_p);
5368 argvec[count].reg = targetm.calls.function_arg (args_so_far, arg);
5370 argvec[count].partial
5371 = targetm.calls.arg_partial_bytes (args_so_far, arg);
5373 if (argvec[count].reg == 0
5374 || argvec[count].partial != 0
5375 || reg_parm_stack_space > 0)
5377 locate_and_pad_parm (arg.mode, NULL_TREE,
5378 #ifdef STACK_PARMS_IN_REG_PARM_AREA
5380 #else
5381 argvec[count].reg != 0,
5382 #endif
5383 reg_parm_stack_space, argvec[count].partial,
5384 NULL_TREE, &args_size, &argvec[count].locate);
5385 args_size.constant += argvec[count].locate.size.constant;
5386 gcc_assert (!argvec[count].locate.size.var);
5388 #ifdef BLOCK_REG_PADDING
5389 else
5390 /* The argument is passed entirely in registers. See at which
5391 end it should be padded. */
5392 argvec[count].locate.where_pad =
5393 BLOCK_REG_PADDING (arg.mode, NULL_TREE,
5394 known_le (GET_MODE_SIZE (arg.mode),
5395 UNITS_PER_WORD));
5396 #endif
5398 targetm.calls.function_arg_advance (args_so_far, arg);
5401 for (int i = 0; i < nargs; i++)
5402 if (reg_parm_stack_space > 0
5403 || argvec[i].reg == 0
5404 || argvec[i].partial != 0)
5405 update_stack_alignment_for_call (&argvec[i].locate);
5407 /* If this machine requires an external definition for library
5408 functions, write one out. */
5409 assemble_external_libcall (fun);
5411 original_args_size = args_size;
5412 args_size.constant = (aligned_upper_bound (args_size.constant
5413 + stack_pointer_delta,
5414 STACK_BYTES)
5415 - stack_pointer_delta);
5417 args_size.constant = upper_bound (args_size.constant,
5418 reg_parm_stack_space);
5420 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
5421 args_size.constant -= reg_parm_stack_space;
5423 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
5424 args_size.constant);
5426 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
5428 poly_int64 pushed = args_size.constant + pending_stack_adjust;
5429 current_function_pushed_stack_size
5430 = upper_bound (current_function_pushed_stack_size, pushed);
5433 if (ACCUMULATE_OUTGOING_ARGS)
5435 /* Since the stack pointer will never be pushed, it is possible for
5436 the evaluation of a parm to clobber something we have already
5437 written to the stack. Since most function calls on RISC machines
5438 do not use the stack, this is uncommon, but must work correctly.
5440 Therefore, we save any area of the stack that was already written
5441 and that we are using. Here we set up to do this by making a new
5442 stack usage map from the old one.
5444 Another approach might be to try to reorder the argument
5445 evaluations to avoid this conflicting stack usage. */
5447 needed = args_size.constant;
5449 /* Since we will be writing into the entire argument area, the
5450 map must be allocated for its entire size, not just the part that
5451 is the responsibility of the caller. */
5452 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
5453 needed += reg_parm_stack_space;
5455 poly_int64 limit = needed;
5456 if (ARGS_GROW_DOWNWARD)
5457 limit += 1;
5459 /* For polynomial sizes, this is the maximum possible size needed
5460 for arguments with a constant size and offset. */
5461 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
5462 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
5463 const_limit);
5465 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
5466 stack_usage_map = stack_usage_map_buf;
5468 if (initial_highest_arg_in_use)
5469 memcpy (stack_usage_map, initial_stack_usage_map,
5470 initial_highest_arg_in_use);
5472 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
5473 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
5474 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
5475 needed = 0;
5477 /* We must be careful to use virtual regs before they're instantiated,
5478 and real regs afterwards. Loop optimization, for example, can create
5479 new libcalls after we've instantiated the virtual regs, and if we
5480 use virtuals anyway, they won't match the rtl patterns. */
5482 if (virtuals_instantiated)
5483 argblock = plus_constant (Pmode, stack_pointer_rtx,
5484 STACK_POINTER_OFFSET);
5485 else
5486 argblock = virtual_outgoing_args_rtx;
5488 else
5490 if (!PUSH_ARGS)
5491 argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
5494 /* We push args individually in reverse order, perform stack alignment
5495 before the first push (the last arg). */
5496 if (argblock == 0)
5497 anti_adjust_stack (gen_int_mode (args_size.constant
5498 - original_args_size.constant,
5499 Pmode));
5501 argnum = nargs - 1;
5503 #ifdef REG_PARM_STACK_SPACE
5504 if (ACCUMULATE_OUTGOING_ARGS)
5506 /* The argument list is the property of the called routine and it
5507 may clobber it. If the fixed area has been used for previous
5508 parameters, we must save and restore it. */
5509 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
5510 &low_to_save, &high_to_save);
5512 #endif
5514 /* When expanding a normal call, args are stored in push order,
5515 which is the reverse of what we have here. */
5516 bool any_regs = false;
5517 for (int i = nargs; i-- > 0; )
5518 if (argvec[i].reg != NULL_RTX)
5520 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
5521 any_regs = true;
5523 if (!any_regs)
5524 targetm.calls.call_args (pc_rtx, NULL_TREE);
5526 /* Push the args that need to be pushed. */
5528 have_push_fusage = false;
5530 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5531 are to be pushed. */
5532 for (count = 0; count < nargs; count++, argnum--)
5534 machine_mode mode = argvec[argnum].mode;
5535 rtx val = argvec[argnum].value;
5536 rtx reg = argvec[argnum].reg;
5537 int partial = argvec[argnum].partial;
5538 unsigned int parm_align = argvec[argnum].locate.boundary;
5539 poly_int64 lower_bound = 0, upper_bound = 0;
5541 if (! (reg != 0 && partial == 0))
5543 rtx use;
5545 if (ACCUMULATE_OUTGOING_ARGS)
5547 /* If this is being stored into a pre-allocated, fixed-size,
5548 stack area, save any previous data at that location. */
5550 if (ARGS_GROW_DOWNWARD)
5552 /* stack_slot is negative, but we want to index stack_usage_map
5553 with positive values. */
5554 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
5555 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
5557 else
5559 lower_bound = argvec[argnum].locate.slot_offset.constant;
5560 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
5563 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5564 reg_parm_stack_space))
5566 /* We need to make a save area. */
5567 poly_uint64 size
5568 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
5569 machine_mode save_mode
5570 = int_mode_for_size (size, 1).else_blk ();
5571 rtx adr
5572 = plus_constant (Pmode, argblock,
5573 argvec[argnum].locate.offset.constant);
5574 rtx stack_area
5575 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
5577 if (save_mode == BLKmode)
5579 argvec[argnum].save_area
5580 = assign_stack_temp (BLKmode,
5581 argvec[argnum].locate.size.constant
5584 emit_block_move (validize_mem
5585 (copy_rtx (argvec[argnum].save_area)),
5586 stack_area,
5587 (gen_int_mode
5588 (argvec[argnum].locate.size.constant,
5589 Pmode)),
5590 BLOCK_OP_CALL_PARM);
5592 else
5594 argvec[argnum].save_area = gen_reg_rtx (save_mode);
5596 emit_move_insn (argvec[argnum].save_area, stack_area);
5601 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
5602 partial, reg, 0, argblock,
5603 (gen_int_mode
5604 (argvec[argnum].locate.offset.constant, Pmode)),
5605 reg_parm_stack_space,
5606 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
5608 /* Now mark the segment we just used. */
5609 if (ACCUMULATE_OUTGOING_ARGS)
5610 mark_stack_region_used (lower_bound, upper_bound);
5612 NO_DEFER_POP;
5614 /* Indicate argument access so that alias.c knows that these
5615 values are live. */
5616 if (argblock)
5617 use = plus_constant (Pmode, argblock,
5618 argvec[argnum].locate.offset.constant);
5619 else if (have_push_fusage)
5620 continue;
5621 else
5623 /* When arguments are pushed, trying to tell alias.c where
5624 exactly this argument is won't work, because the
5625 auto-increment causes confusion. So we merely indicate
5626 that we access something with a known mode somewhere on
5627 the stack. */
5628 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5629 gen_rtx_SCRATCH (Pmode));
5630 have_push_fusage = true;
5632 use = gen_rtx_MEM (argvec[argnum].mode, use);
5633 use = gen_rtx_USE (VOIDmode, use);
5634 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
5638 argnum = nargs - 1;
5640 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
5642 /* Now load any reg parms into their regs. */
5644 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5645 are to be pushed. */
5646 for (count = 0; count < nargs; count++, argnum--)
5648 machine_mode mode = argvec[argnum].mode;
5649 rtx val = argvec[argnum].value;
5650 rtx reg = argvec[argnum].reg;
5651 int partial = argvec[argnum].partial;
5653 /* Handle calls that pass values in multiple non-contiguous
5654 locations. The PA64 has examples of this for library calls. */
5655 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5656 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
5657 else if (reg != 0 && partial == 0)
5659 emit_move_insn (reg, val);
5660 #ifdef BLOCK_REG_PADDING
5661 poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
5663 /* Copied from load_register_parameters. */
5665 /* Handle case where we have a value that needs shifting
5666 up to the msb. eg. a QImode value and we're padding
5667 upward on a BYTES_BIG_ENDIAN machine. */
5668 if (known_lt (size, UNITS_PER_WORD)
5669 && (argvec[argnum].locate.where_pad
5670 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
5672 rtx x;
5673 poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
5675 /* Assigning REG here rather than a temp makes CALL_FUSAGE
5676 report the whole reg as used. Strictly speaking, the
5677 call only uses SIZE bytes at the msb end, but it doesn't
5678 seem worth generating rtl to say that. */
5679 reg = gen_rtx_REG (word_mode, REGNO (reg));
5680 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
5681 if (x != reg)
5682 emit_move_insn (reg, x);
5684 #endif
5687 NO_DEFER_POP;
5690 /* Any regs containing parms remain in use through the call. */
5691 for (count = 0; count < nargs; count++)
5693 rtx reg = argvec[count].reg;
5694 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5695 use_group_regs (&call_fusage, reg);
5696 else if (reg != 0)
5698 int partial = argvec[count].partial;
5699 if (partial)
5701 int nregs;
5702 gcc_assert (partial % UNITS_PER_WORD == 0);
5703 nregs = partial / UNITS_PER_WORD;
5704 use_regs (&call_fusage, REGNO (reg), nregs);
5706 else
5707 use_reg (&call_fusage, reg);
5711 /* Pass the function the address in which to return a structure value. */
5712 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
5714 emit_move_insn (struct_value,
5715 force_reg (Pmode,
5716 force_operand (XEXP (mem_value, 0),
5717 NULL_RTX)));
5718 if (REG_P (struct_value))
5719 use_reg (&call_fusage, struct_value);
5722 /* Don't allow popping to be deferred, since then
5723 cse'ing of library calls could delete a call and leave the pop. */
5724 NO_DEFER_POP;
5725 valreg = (mem_value == 0 && outmode != VOIDmode
5726 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
5728 /* Stack must be properly aligned now. */
5729 gcc_assert (multiple_p (stack_pointer_delta,
5730 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
5732 before_call = get_last_insn ();
5734 if (flag_callgraph_info)
5735 record_final_call (SYMBOL_REF_DECL (orgfun), UNKNOWN_LOCATION);
5737 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
5738 will set inhibit_defer_pop to that value. */
5739 /* The return type is needed to decide how many bytes the function pops.
5740 Signedness plays no role in that, so for simplicity, we pretend it's
5741 always signed. We also assume that the list of arguments passed has
5742 no impact, so we pretend it is unknown. */
5744 emit_call_1 (fun, NULL,
5745 get_identifier (XSTR (orgfun, 0)),
5746 build_function_type (tfom, NULL_TREE),
5747 original_args_size.constant, args_size.constant,
5748 struct_value_size,
5749 targetm.calls.function_arg (args_so_far,
5750 function_arg_info::end_marker ()),
5751 valreg,
5752 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
5754 if (flag_ipa_ra)
5756 rtx datum = orgfun;
5757 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
5758 rtx_call_insn *last = last_call_insn ();
5759 add_reg_note (last, REG_CALL_DECL, datum);
5762 /* Right-shift returned value if necessary. */
5763 if (!pcc_struct_value
5764 && TYPE_MODE (tfom) != BLKmode
5765 && targetm.calls.return_in_msb (tfom))
5767 shift_return_value (TYPE_MODE (tfom), false, valreg);
5768 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
5771 targetm.calls.end_call_args ();
5773 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
5774 that it should complain if nonvolatile values are live. For
5775 functions that cannot return, inform flow that control does not
5776 fall through. */
5777 if (flags & ECF_NORETURN)
5779 /* The barrier note must be emitted
5780 immediately after the CALL_INSN. Some ports emit more than
5781 just a CALL_INSN above, so we must search for it here. */
5782 rtx_insn *last = get_last_insn ();
5783 while (!CALL_P (last))
5785 last = PREV_INSN (last);
5786 /* There was no CALL_INSN? */
5787 gcc_assert (last != before_call);
5790 emit_barrier_after (last);
5793 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
5794 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
5795 if (flags & ECF_NOTHROW)
5797 rtx_insn *last = get_last_insn ();
5798 while (!CALL_P (last))
5800 last = PREV_INSN (last);
5801 /* There was no CALL_INSN? */
5802 gcc_assert (last != before_call);
5805 make_reg_eh_region_note_nothrow_nononlocal (last);
5808 /* Now restore inhibit_defer_pop to its actual original value. */
5809 OK_DEFER_POP;
5811 pop_temp_slots ();
5813 /* Copy the value to the right place. */
5814 if (outmode != VOIDmode && retval)
5816 if (mem_value)
5818 if (value == 0)
5819 value = mem_value;
5820 if (value != mem_value)
5821 emit_move_insn (value, mem_value);
5823 else if (GET_CODE (valreg) == PARALLEL)
5825 if (value == 0)
5826 value = gen_reg_rtx (outmode);
5827 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
5829 else
5831 /* Convert to the proper mode if a promotion has been active. */
5832 if (GET_MODE (valreg) != outmode)
5834 int unsignedp = TYPE_UNSIGNED (tfom);
5836 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
5837 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
5838 == GET_MODE (valreg));
5839 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
5842 if (value != 0)
5843 emit_move_insn (value, valreg);
5844 else
5845 value = valreg;
5849 if (ACCUMULATE_OUTGOING_ARGS)
5851 #ifdef REG_PARM_STACK_SPACE
5852 if (save_area)
5853 restore_fixed_argument_area (save_area, argblock,
5854 high_to_save, low_to_save);
5855 #endif
5857 /* If we saved any argument areas, restore them. */
5858 for (count = 0; count < nargs; count++)
5859 if (argvec[count].save_area)
5861 machine_mode save_mode = GET_MODE (argvec[count].save_area);
5862 rtx adr = plus_constant (Pmode, argblock,
5863 argvec[count].locate.offset.constant);
5864 rtx stack_area = gen_rtx_MEM (save_mode,
5865 memory_address (save_mode, adr));
5867 if (save_mode == BLKmode)
5868 emit_block_move (stack_area,
5869 validize_mem
5870 (copy_rtx (argvec[count].save_area)),
5871 (gen_int_mode
5872 (argvec[count].locate.size.constant, Pmode)),
5873 BLOCK_OP_CALL_PARM);
5874 else
5875 emit_move_insn (stack_area, argvec[count].save_area);
5878 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
5879 stack_usage_map = initial_stack_usage_map;
5880 stack_usage_watermark = initial_stack_usage_watermark;
5883 free (stack_usage_map_buf);
5885 return value;
5890 /* Store a single argument for a function call
5891 into the register or memory area where it must be passed.
5892 *ARG describes the argument value and where to pass it.
5894 ARGBLOCK is the address of the stack-block for all the arguments,
5895 or 0 on a machine where arguments are pushed individually.
5897 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
5898 so must be careful about how the stack is used.
5900 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
5901 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
5902 that we need not worry about saving and restoring the stack.
5904 FNDECL is the declaration of the function we are calling.
5906 Return nonzero if this arg should cause sibcall failure,
5907 zero otherwise. */
5909 static int
5910 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
5911 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
5913 tree pval = arg->tree_value;
5914 rtx reg = 0;
5915 int partial = 0;
5916 poly_int64 used = 0;
5917 poly_int64 lower_bound = 0, upper_bound = 0;
5918 int sibcall_failure = 0;
5920 if (TREE_CODE (pval) == ERROR_MARK)
5921 return 1;
5923 /* Push a new temporary level for any temporaries we make for
5924 this argument. */
5925 push_temp_slots ();
5927 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
5929 /* If this is being stored into a pre-allocated, fixed-size, stack area,
5930 save any previous data at that location. */
5931 if (argblock && ! variable_size && arg->stack)
5933 if (ARGS_GROW_DOWNWARD)
5935 /* stack_slot is negative, but we want to index stack_usage_map
5936 with positive values. */
5937 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5939 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5940 upper_bound = -rtx_to_poly_int64 (offset) + 1;
5942 else
5943 upper_bound = 0;
5945 lower_bound = upper_bound - arg->locate.size.constant;
5947 else
5949 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5951 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5952 lower_bound = rtx_to_poly_int64 (offset);
5954 else
5955 lower_bound = 0;
5957 upper_bound = lower_bound + arg->locate.size.constant;
5960 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5961 reg_parm_stack_space))
5963 /* We need to make a save area. */
5964 poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT;
5965 machine_mode save_mode
5966 = int_mode_for_size (size, 1).else_blk ();
5967 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
5968 rtx stack_area = gen_rtx_MEM (save_mode, adr);
5970 if (save_mode == BLKmode)
5972 arg->save_area
5973 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
5974 preserve_temp_slots (arg->save_area);
5975 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
5976 stack_area,
5977 (gen_int_mode
5978 (arg->locate.size.constant, Pmode)),
5979 BLOCK_OP_CALL_PARM);
5981 else
5983 arg->save_area = gen_reg_rtx (save_mode);
5984 emit_move_insn (arg->save_area, stack_area);
5990 /* If this isn't going to be placed on both the stack and in registers,
5991 set up the register and number of words. */
5992 if (! arg->pass_on_stack)
5994 if (flags & ECF_SIBCALL)
5995 reg = arg->tail_call_reg;
5996 else
5997 reg = arg->reg;
5998 partial = arg->partial;
6001 /* Being passed entirely in a register. We shouldn't be called in
6002 this case. */
6003 gcc_assert (reg == 0 || partial != 0);
6005 /* If this arg needs special alignment, don't load the registers
6006 here. */
6007 if (arg->n_aligned_regs != 0)
6008 reg = 0;
6010 /* If this is being passed partially in a register, we can't evaluate
6011 it directly into its stack slot. Otherwise, we can. */
6012 if (arg->value == 0)
6014 /* stack_arg_under_construction is nonzero if a function argument is
6015 being evaluated directly into the outgoing argument list and
6016 expand_call must take special action to preserve the argument list
6017 if it is called recursively.
6019 For scalar function arguments stack_usage_map is sufficient to
6020 determine which stack slots must be saved and restored. Scalar
6021 arguments in general have pass_on_stack == 0.
6023 If this argument is initialized by a function which takes the
6024 address of the argument (a C++ constructor or a C function
6025 returning a BLKmode structure), then stack_usage_map is
6026 insufficient and expand_call must push the stack around the
6027 function call. Such arguments have pass_on_stack == 1.
6029 Note that it is always safe to set stack_arg_under_construction,
6030 but this generates suboptimal code if set when not needed. */
6032 if (arg->pass_on_stack)
6033 stack_arg_under_construction++;
6035 arg->value = expand_expr (pval,
6036 (partial
6037 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
6038 ? NULL_RTX : arg->stack,
6039 VOIDmode, EXPAND_STACK_PARM);
6041 /* If we are promoting object (or for any other reason) the mode
6042 doesn't agree, convert the mode. */
6044 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
6045 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
6046 arg->value, arg->unsignedp);
6048 if (arg->pass_on_stack)
6049 stack_arg_under_construction--;
6052 /* Check for overlap with already clobbered argument area. */
6053 if ((flags & ECF_SIBCALL)
6054 && MEM_P (arg->value)
6055 && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0),
6056 arg->locate.size.constant))
6057 sibcall_failure = 1;
6059 /* Don't allow anything left on stack from computation
6060 of argument to alloca. */
6061 if (flags & ECF_MAY_BE_ALLOCA)
6062 do_pending_stack_adjust ();
6064 if (arg->value == arg->stack)
6065 /* If the value is already in the stack slot, we are done. */
6067 else if (arg->mode != BLKmode)
6069 unsigned int parm_align;
6071 /* Argument is a scalar, not entirely passed in registers.
6072 (If part is passed in registers, arg->partial says how much
6073 and emit_push_insn will take care of putting it there.)
6075 Push it, and if its size is less than the
6076 amount of space allocated to it,
6077 also bump stack pointer by the additional space.
6078 Note that in C the default argument promotions
6079 will prevent such mismatches. */
6081 poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval))
6082 ? 0 : GET_MODE_SIZE (arg->mode));
6084 /* Compute how much space the push instruction will push.
6085 On many machines, pushing a byte will advance the stack
6086 pointer by a halfword. */
6087 #ifdef PUSH_ROUNDING
6088 size = PUSH_ROUNDING (size);
6089 #endif
6090 used = size;
6092 /* Compute how much space the argument should get:
6093 round up to a multiple of the alignment for arguments. */
6094 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6095 != PAD_NONE)
6096 /* At the moment we don't (need to) support ABIs for which the
6097 padding isn't known at compile time. In principle it should
6098 be easy to add though. */
6099 used = force_align_up (size, PARM_BOUNDARY / BITS_PER_UNIT);
6101 /* Compute the alignment of the pushed argument. */
6102 parm_align = arg->locate.boundary;
6103 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6104 == PAD_DOWNWARD)
6106 poly_int64 pad = used - size;
6107 unsigned int pad_align = known_alignment (pad) * BITS_PER_UNIT;
6108 if (pad_align != 0)
6109 parm_align = MIN (parm_align, pad_align);
6112 /* This isn't already where we want it on the stack, so put it there.
6113 This can either be done with push or copy insns. */
6114 if (maybe_ne (used, 0)
6115 && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
6116 NULL_RTX, parm_align, partial, reg, used - size,
6117 argblock, ARGS_SIZE_RTX (arg->locate.offset),
6118 reg_parm_stack_space,
6119 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
6120 sibcall_failure = 1;
6122 /* Unless this is a partially-in-register argument, the argument is now
6123 in the stack. */
6124 if (partial == 0)
6125 arg->value = arg->stack;
6127 else
6129 /* BLKmode, at least partly to be pushed. */
6131 unsigned int parm_align;
6132 poly_int64 excess;
6133 rtx size_rtx;
6135 /* Pushing a nonscalar.
6136 If part is passed in registers, PARTIAL says how much
6137 and emit_push_insn will take care of putting it there. */
6139 /* Round its size up to a multiple
6140 of the allocation unit for arguments. */
6142 if (arg->locate.size.var != 0)
6144 excess = 0;
6145 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
6147 else
6149 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
6150 for BLKmode is careful to avoid it. */
6151 excess = (arg->locate.size.constant
6152 - arg_int_size_in_bytes (TREE_TYPE (pval))
6153 + partial);
6154 size_rtx = expand_expr (arg_size_in_bytes (TREE_TYPE (pval)),
6155 NULL_RTX, TYPE_MODE (sizetype),
6156 EXPAND_NORMAL);
6159 parm_align = arg->locate.boundary;
6161 /* When an argument is padded down, the block is aligned to
6162 PARM_BOUNDARY, but the actual argument isn't. */
6163 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6164 == PAD_DOWNWARD)
6166 if (arg->locate.size.var)
6167 parm_align = BITS_PER_UNIT;
6168 else
6170 unsigned int excess_align
6171 = known_alignment (excess) * BITS_PER_UNIT;
6172 if (excess_align != 0)
6173 parm_align = MIN (parm_align, excess_align);
6177 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
6179 /* emit_push_insn might not work properly if arg->value and
6180 argblock + arg->locate.offset areas overlap. */
6181 rtx x = arg->value;
6182 poly_int64 i = 0;
6184 if (strip_offset (XEXP (x, 0), &i)
6185 == crtl->args.internal_arg_pointer)
6187 /* arg.locate doesn't contain the pretend_args_size offset,
6188 it's part of argblock. Ensure we don't count it in I. */
6189 if (STACK_GROWS_DOWNWARD)
6190 i -= crtl->args.pretend_args_size;
6191 else
6192 i += crtl->args.pretend_args_size;
6194 /* expand_call should ensure this. */
6195 gcc_assert (!arg->locate.offset.var
6196 && arg->locate.size.var == 0);
6197 poly_int64 size_val = rtx_to_poly_int64 (size_rtx);
6199 if (known_eq (arg->locate.offset.constant, i))
6201 /* Even though they appear to be at the same location,
6202 if part of the outgoing argument is in registers,
6203 they aren't really at the same location. Check for
6204 this by making sure that the incoming size is the
6205 same as the outgoing size. */
6206 if (maybe_ne (arg->locate.size.constant, size_val))
6207 sibcall_failure = 1;
6209 else if (maybe_in_range_p (arg->locate.offset.constant,
6210 i, size_val))
6211 sibcall_failure = 1;
6212 /* Use arg->locate.size.constant instead of size_rtx
6213 because we only care about the part of the argument
6214 on the stack. */
6215 else if (maybe_in_range_p (i, arg->locate.offset.constant,
6216 arg->locate.size.constant))
6217 sibcall_failure = 1;
6221 if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
6222 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
6223 parm_align, partial, reg, excess, argblock,
6224 ARGS_SIZE_RTX (arg->locate.offset),
6225 reg_parm_stack_space,
6226 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
6228 /* Unless this is a partially-in-register argument, the argument is now
6229 in the stack.
6231 ??? Unlike the case above, in which we want the actual
6232 address of the data, so that we can load it directly into a
6233 register, here we want the address of the stack slot, so that
6234 it's properly aligned for word-by-word copying or something
6235 like that. It's not clear that this is always correct. */
6236 if (partial == 0)
6237 arg->value = arg->stack_slot;
6240 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
6242 tree type = TREE_TYPE (arg->tree_value);
6243 arg->parallel_value
6244 = emit_group_load_into_temps (arg->reg, arg->value, type,
6245 int_size_in_bytes (type));
6248 /* Mark all slots this store used. */
6249 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
6250 && argblock && ! variable_size && arg->stack)
6251 mark_stack_region_used (lower_bound, upper_bound);
6253 /* Once we have pushed something, pops can't safely
6254 be deferred during the rest of the arguments. */
6255 NO_DEFER_POP;
6257 /* Free any temporary slots made in processing this argument. */
6258 pop_temp_slots ();
6260 return sibcall_failure;
6263 /* Nonzero if we do not know how to pass ARG solely in registers. */
6265 bool
6266 must_pass_in_stack_var_size (const function_arg_info &arg)
6268 if (!arg.type)
6269 return false;
6271 /* If the type has variable size... */
6272 if (!poly_int_tree_p (TYPE_SIZE (arg.type)))
6273 return true;
6275 /* If the type is marked as addressable (it is required
6276 to be constructed into the stack)... */
6277 if (TREE_ADDRESSABLE (arg.type))
6278 return true;
6280 return false;
6283 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
6284 takes trailing padding of a structure into account. */
6285 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
6287 bool
6288 must_pass_in_stack_var_size_or_pad (const function_arg_info &arg)
6290 if (!arg.type)
6291 return false;
6293 /* If the type has variable size... */
6294 if (TREE_CODE (TYPE_SIZE (arg.type)) != INTEGER_CST)
6295 return true;
6297 /* If the type is marked as addressable (it is required
6298 to be constructed into the stack)... */
6299 if (TREE_ADDRESSABLE (arg.type))
6300 return true;
6302 if (TYPE_EMPTY_P (arg.type))
6303 return false;
6305 /* If the padding and mode of the type is such that a copy into
6306 a register would put it into the wrong part of the register. */
6307 if (arg.mode == BLKmode
6308 && int_size_in_bytes (arg.type) % (PARM_BOUNDARY / BITS_PER_UNIT)
6309 && (targetm.calls.function_arg_padding (arg.mode, arg.type)
6310 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
6311 return true;
6313 return false;
6316 /* Return true if TYPE must be passed on the stack when passed to
6317 the "..." arguments of a function. */
6319 bool
6320 must_pass_va_arg_in_stack (tree type)
6322 function_arg_info arg (type, /*named=*/false);
6323 return targetm.calls.must_pass_in_stack (arg);
6326 /* Return true if FIELD is the C++17 empty base field that should
6327 be ignored for ABI calling convention decisions in order to
6328 maintain ABI compatibility between C++14 and earlier, which doesn't
6329 add this FIELD to classes with empty bases, and C++17 and later
6330 which does. */
6332 bool
6333 cxx17_empty_base_field_p (const_tree field)
6335 return (DECL_FIELD_ABI_IGNORED (field)
6336 && DECL_ARTIFICIAL (field)
6337 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))
6338 && !lookup_attribute ("no_unique_address", DECL_ATTRIBUTES (field)));
6341 /* Tell the garbage collector about GTY markers in this source file. */
6342 #include "gt-calls.h"