testsuite: Correct vec-rlmi-rlnm.c testsuite expected result
[official-gcc.git] / gcc / calls.c
bloba12b84744c0897b27d4da108c8479acbedb73e3e
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #define INCLUDE_STRING
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "stringpool.h"
33 #include "expmed.h"
34 #include "optabs.h"
35 #include "emit-rtl.h"
36 #include "cgraph.h"
37 #include "diagnostic-core.h"
38 #include "fold-const.h"
39 #include "stor-layout.h"
40 #include "varasm.h"
41 #include "internal-fn.h"
42 #include "dojump.h"
43 #include "explow.h"
44 #include "calls.h"
45 #include "expr.h"
46 #include "output.h"
47 #include "langhooks.h"
48 #include "except.h"
49 #include "dbgcnt.h"
50 #include "rtl-iter.h"
51 #include "tree-vrp.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-strlen.h"
54 #include "intl.h"
55 #include "stringpool.h"
56 #include "hash-map.h"
57 #include "hash-traits.h"
58 #include "attribs.h"
59 #include "builtins.h"
60 #include "gimple-fold.h"
61 #include "attr-fnspec.h"
62 #include "value-query.h"
64 #include "tree-pretty-print.h"
66 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
67 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
69 /* Data structure and subroutines used within expand_call. */
71 struct arg_data
73 /* Tree node for this argument. */
74 tree tree_value;
75 /* Mode for value; TYPE_MODE unless promoted. */
76 machine_mode mode;
77 /* Current RTL value for argument, or 0 if it isn't precomputed. */
78 rtx value;
79 /* Initially-compute RTL value for argument; only for const functions. */
80 rtx initial_value;
81 /* Register to pass this argument in, 0 if passed on stack, or an
82 PARALLEL if the arg is to be copied into multiple non-contiguous
83 registers. */
84 rtx reg;
85 /* Register to pass this argument in when generating tail call sequence.
86 This is not the same register as for normal calls on machines with
87 register windows. */
88 rtx tail_call_reg;
89 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
90 form for emit_group_move. */
91 rtx parallel_value;
92 /* If REG was promoted from the actual mode of the argument expression,
93 indicates whether the promotion is sign- or zero-extended. */
94 int unsignedp;
95 /* Number of bytes to put in registers. 0 means put the whole arg
96 in registers. Also 0 if not passed in registers. */
97 int partial;
98 /* Nonzero if argument must be passed on stack.
99 Note that some arguments may be passed on the stack
100 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
101 pass_on_stack identifies arguments that *cannot* go in registers. */
102 int pass_on_stack;
103 /* Some fields packaged up for locate_and_pad_parm. */
104 struct locate_and_pad_arg_data locate;
105 /* Location on the stack at which parameter should be stored. The store
106 has already been done if STACK == VALUE. */
107 rtx stack;
108 /* Location on the stack of the start of this argument slot. This can
109 differ from STACK if this arg pads downward. This location is known
110 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
111 rtx stack_slot;
112 /* Place that this stack area has been saved, if needed. */
113 rtx save_area;
114 /* If an argument's alignment does not permit direct copying into registers,
115 copy in smaller-sized pieces into pseudos. These are stored in a
116 block pointed to by this field. The next field says how many
117 word-sized pseudos we made. */
118 rtx *aligned_regs;
119 int n_aligned_regs;
122 /* A vector of one char per byte of stack space. A byte if nonzero if
123 the corresponding stack location has been used.
124 This vector is used to prevent a function call within an argument from
125 clobbering any stack already set up. */
126 static char *stack_usage_map;
128 /* Size of STACK_USAGE_MAP. */
129 static unsigned int highest_outgoing_arg_in_use;
131 /* Assume that any stack location at this byte index is used,
132 without checking the contents of stack_usage_map. */
133 static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U;
135 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
136 stack location's tail call argument has been already stored into the stack.
137 This bitmap is used to prevent sibling call optimization if function tries
138 to use parent's incoming argument slots when they have been already
139 overwritten with tail call arguments. */
140 static sbitmap stored_args_map;
142 /* Assume that any virtual-incoming location at this byte index has been
143 stored, without checking the contents of stored_args_map. */
144 static unsigned HOST_WIDE_INT stored_args_watermark;
146 /* stack_arg_under_construction is nonzero when an argument may be
147 initialized with a constructor call (including a C function that
148 returns a BLKmode struct) and expand_call must take special action
149 to make sure the object being constructed does not overlap the
150 argument list for the constructor call. */
151 static int stack_arg_under_construction;
153 static void precompute_register_parameters (int, struct arg_data *, int *);
154 static int store_one_arg (struct arg_data *, rtx, int, int, int);
155 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
156 static int finalize_must_preallocate (int, int, struct arg_data *,
157 struct args_size *);
158 static void precompute_arguments (int, struct arg_data *);
159 static void compute_argument_addresses (struct arg_data *, rtx, int);
160 static rtx rtx_for_function_call (tree, tree);
161 static void load_register_parameters (struct arg_data *, int, rtx *, int,
162 int, int *);
163 static int special_function_p (const_tree, int);
164 static int check_sibcall_argument_overlap_1 (rtx);
165 static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
167 static tree split_complex_types (tree);
169 #ifdef REG_PARM_STACK_SPACE
170 static rtx save_fixed_argument_area (int, rtx, int *, int *);
171 static void restore_fixed_argument_area (rtx, rtx, int, int);
172 #endif
174 /* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
175 stack region might already be in use. */
177 static bool
178 stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound,
179 unsigned int reg_parm_stack_space)
181 unsigned HOST_WIDE_INT const_lower, const_upper;
182 const_lower = constant_lower_bound (lower_bound);
183 if (!upper_bound.is_constant (&const_upper))
184 const_upper = HOST_WIDE_INT_M1U;
186 if (const_upper > stack_usage_watermark)
187 return true;
189 /* Don't worry about things in the fixed argument area;
190 it has already been saved. */
191 const_lower = MAX (const_lower, reg_parm_stack_space);
192 const_upper = MIN (const_upper, highest_outgoing_arg_in_use);
193 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
194 if (stack_usage_map[i])
195 return true;
196 return false;
199 /* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
200 stack region are now in use. */
202 static void
203 mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound)
205 unsigned HOST_WIDE_INT const_lower, const_upper;
206 const_lower = constant_lower_bound (lower_bound);
207 if (upper_bound.is_constant (&const_upper))
208 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
209 stack_usage_map[i] = 1;
210 else
211 stack_usage_watermark = MIN (stack_usage_watermark, const_lower);
214 /* Force FUNEXP into a form suitable for the address of a CALL,
215 and return that as an rtx. Also load the static chain register
216 if FNDECL is a nested function.
218 CALL_FUSAGE points to a variable holding the prospective
219 CALL_INSN_FUNCTION_USAGE information. */
222 prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
223 rtx *call_fusage, int reg_parm_seen, int flags)
225 /* Make a valid memory address and copy constants through pseudo-regs,
226 but not for a constant address if -fno-function-cse. */
227 if (GET_CODE (funexp) != SYMBOL_REF)
229 /* If it's an indirect call by descriptor, generate code to perform
230 runtime identification of the pointer and load the descriptor. */
231 if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
233 const int bit_val = targetm.calls.custom_function_descriptors;
234 rtx call_lab = gen_label_rtx ();
236 gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
237 fndecl_or_type
238 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
239 fndecl_or_type);
240 DECL_STATIC_CHAIN (fndecl_or_type) = 1;
241 rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
243 if (GET_MODE (funexp) != Pmode)
244 funexp = convert_memory_address (Pmode, funexp);
246 /* Avoid long live ranges around function calls. */
247 funexp = copy_to_mode_reg (Pmode, funexp);
249 if (REG_P (chain))
250 emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
252 /* Emit the runtime identification pattern. */
253 rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
254 emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
255 call_lab);
257 /* Statically predict the branch to very likely taken. */
258 rtx_insn *insn = get_last_insn ();
259 if (JUMP_P (insn))
260 predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
262 /* Load the descriptor. */
263 rtx mem = gen_rtx_MEM (ptr_mode,
264 plus_constant (Pmode, funexp, - bit_val));
265 MEM_NOTRAP_P (mem) = 1;
266 mem = convert_memory_address (Pmode, mem);
267 emit_move_insn (chain, mem);
269 mem = gen_rtx_MEM (ptr_mode,
270 plus_constant (Pmode, funexp,
271 POINTER_SIZE / BITS_PER_UNIT
272 - bit_val));
273 MEM_NOTRAP_P (mem) = 1;
274 mem = convert_memory_address (Pmode, mem);
275 emit_move_insn (funexp, mem);
277 emit_label (call_lab);
279 if (REG_P (chain))
281 use_reg (call_fusage, chain);
282 STATIC_CHAIN_REG_P (chain) = 1;
285 /* Make sure we're not going to be overwritten below. */
286 gcc_assert (!static_chain_value);
289 /* If we are using registers for parameters, force the
290 function address into a register now. */
291 funexp = ((reg_parm_seen
292 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
293 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
294 : memory_address (FUNCTION_MODE, funexp));
296 else
298 /* funexp could be a SYMBOL_REF represents a function pointer which is
299 of ptr_mode. In this case, it should be converted into address mode
300 to be a valid address for memory rtx pattern. See PR 64971. */
301 if (GET_MODE (funexp) != Pmode)
302 funexp = convert_memory_address (Pmode, funexp);
304 if (!(flags & ECF_SIBCALL))
306 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
307 funexp = force_reg (Pmode, funexp);
311 if (static_chain_value != 0
312 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
313 || DECL_STATIC_CHAIN (fndecl_or_type)))
315 rtx chain;
317 chain = targetm.calls.static_chain (fndecl_or_type, false);
318 static_chain_value = convert_memory_address (Pmode, static_chain_value);
320 emit_move_insn (chain, static_chain_value);
321 if (REG_P (chain))
323 use_reg (call_fusage, chain);
324 STATIC_CHAIN_REG_P (chain) = 1;
328 return funexp;
331 /* Generate instructions to call function FUNEXP,
332 and optionally pop the results.
333 The CALL_INSN is the first insn generated.
335 FNDECL is the declaration node of the function. This is given to the
336 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
337 its own args.
339 FUNTYPE is the data type of the function. This is given to the hook
340 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
341 own args. We used to allow an identifier for library functions, but
342 that doesn't work when the return type is an aggregate type and the
343 calling convention says that the pointer to this aggregate is to be
344 popped by the callee.
346 STACK_SIZE is the number of bytes of arguments on the stack,
347 ROUNDED_STACK_SIZE is that number rounded up to
348 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
349 both to put into the call insn and to generate explicit popping
350 code if necessary.
352 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
353 It is zero if this call doesn't want a structure value.
355 NEXT_ARG_REG is the rtx that results from executing
356 targetm.calls.function_arg (&args_so_far,
357 function_arg_info::end_marker ());
358 just after all the args have had their registers assigned.
359 This could be whatever you like, but normally it is the first
360 arg-register beyond those used for args in this call,
361 or 0 if all the arg-registers are used in this call.
362 It is passed on to `gen_call' so you can put this info in the call insn.
364 VALREG is a hard register in which a value is returned,
365 or 0 if the call does not return a value.
367 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
368 the args to this call were processed.
369 We restore `inhibit_defer_pop' to that value.
371 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
372 denote registers used by the called function. */
374 static void
375 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
376 tree funtype ATTRIBUTE_UNUSED,
377 poly_int64 stack_size ATTRIBUTE_UNUSED,
378 poly_int64 rounded_stack_size,
379 poly_int64 struct_value_size ATTRIBUTE_UNUSED,
380 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
381 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
382 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
384 rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
385 rtx call, funmem, pat;
386 int already_popped = 0;
387 poly_int64 n_popped = 0;
389 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
390 patterns exist). Any popping that the callee does on return will
391 be from our caller's frame rather than ours. */
392 if (!(ecf_flags & ECF_SIBCALL))
394 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
396 #ifdef CALL_POPS_ARGS
397 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
398 #endif
401 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
402 and we don't want to load it into a register as an optimization,
403 because prepare_call_address already did it if it should be done. */
404 if (GET_CODE (funexp) != SYMBOL_REF)
405 funexp = memory_address (FUNCTION_MODE, funexp);
407 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
408 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
410 tree t = fndecl;
412 /* Although a built-in FUNCTION_DECL and its non-__builtin
413 counterpart compare equal and get a shared mem_attrs, they
414 produce different dump output in compare-debug compilations,
415 if an entry gets garbage collected in one compilation, then
416 adds a different (but equivalent) entry, while the other
417 doesn't run the garbage collector at the same spot and then
418 shares the mem_attr with the equivalent entry. */
419 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
421 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
422 if (t2)
423 t = t2;
426 set_mem_expr (funmem, t);
428 else if (fntree)
429 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
431 if (ecf_flags & ECF_SIBCALL)
433 if (valreg)
434 pat = targetm.gen_sibcall_value (valreg, funmem,
435 rounded_stack_size_rtx,
436 next_arg_reg, NULL_RTX);
437 else
438 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
439 next_arg_reg,
440 gen_int_mode (struct_value_size, Pmode));
442 /* If the target has "call" or "call_value" insns, then prefer them
443 if no arguments are actually popped. If the target does not have
444 "call" or "call_value" insns, then we must use the popping versions
445 even if the call has no arguments to pop. */
446 else if (maybe_ne (n_popped, 0)
447 || !(valreg
448 ? targetm.have_call_value ()
449 : targetm.have_call ()))
451 rtx n_pop = gen_int_mode (n_popped, Pmode);
453 /* If this subroutine pops its own args, record that in the call insn
454 if possible, for the sake of frame pointer elimination. */
456 if (valreg)
457 pat = targetm.gen_call_value_pop (valreg, funmem,
458 rounded_stack_size_rtx,
459 next_arg_reg, n_pop);
460 else
461 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
462 next_arg_reg, n_pop);
464 already_popped = 1;
466 else
468 if (valreg)
469 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
470 next_arg_reg, NULL_RTX);
471 else
472 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
473 gen_int_mode (struct_value_size, Pmode));
475 emit_insn (pat);
477 /* Find the call we just emitted. */
478 rtx_call_insn *call_insn = last_call_insn ();
480 /* Some target create a fresh MEM instead of reusing the one provided
481 above. Set its MEM_EXPR. */
482 call = get_call_rtx_from (call_insn);
483 if (call
484 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
485 && MEM_EXPR (funmem) != NULL_TREE)
486 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
488 /* Put the register usage information there. */
489 add_function_usage_to (call_insn, call_fusage);
491 /* If this is a const call, then set the insn's unchanging bit. */
492 if (ecf_flags & ECF_CONST)
493 RTL_CONST_CALL_P (call_insn) = 1;
495 /* If this is a pure call, then set the insn's unchanging bit. */
496 if (ecf_flags & ECF_PURE)
497 RTL_PURE_CALL_P (call_insn) = 1;
499 /* If this is a const call, then set the insn's unchanging bit. */
500 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
501 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
503 /* Create a nothrow REG_EH_REGION note, if needed. */
504 make_reg_eh_region_note (call_insn, ecf_flags, 0);
506 if (ecf_flags & ECF_NORETURN)
507 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
509 if (ecf_flags & ECF_RETURNS_TWICE)
511 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
512 cfun->calls_setjmp = 1;
515 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
517 /* Restore this now, so that we do defer pops for this call's args
518 if the context of the call as a whole permits. */
519 inhibit_defer_pop = old_inhibit_defer_pop;
521 if (maybe_ne (n_popped, 0))
523 if (!already_popped)
524 CALL_INSN_FUNCTION_USAGE (call_insn)
525 = gen_rtx_EXPR_LIST (VOIDmode,
526 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
527 CALL_INSN_FUNCTION_USAGE (call_insn));
528 rounded_stack_size -= n_popped;
529 rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
530 stack_pointer_delta -= n_popped;
532 add_args_size_note (call_insn, stack_pointer_delta);
534 /* If popup is needed, stack realign must use DRAP */
535 if (SUPPORTS_STACK_ALIGNMENT)
536 crtl->need_drap = true;
538 /* For noreturn calls when not accumulating outgoing args force
539 REG_ARGS_SIZE note to prevent crossjumping of calls with different
540 args sizes. */
541 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
542 add_args_size_note (call_insn, stack_pointer_delta);
544 if (!ACCUMULATE_OUTGOING_ARGS)
546 /* If returning from the subroutine does not automatically pop the args,
547 we need an instruction to pop them sooner or later.
548 Perhaps do it now; perhaps just record how much space to pop later.
550 If returning from the subroutine does pop the args, indicate that the
551 stack pointer will be changed. */
553 if (maybe_ne (rounded_stack_size, 0))
555 if (ecf_flags & ECF_NORETURN)
556 /* Just pretend we did the pop. */
557 stack_pointer_delta -= rounded_stack_size;
558 else if (flag_defer_pop && inhibit_defer_pop == 0
559 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
560 pending_stack_adjust += rounded_stack_size;
561 else
562 adjust_stack (rounded_stack_size_rtx);
565 /* When we accumulate outgoing args, we must avoid any stack manipulations.
566 Restore the stack pointer to its original value now. Usually
567 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
568 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
569 popping variants of functions exist as well.
571 ??? We may optimize similar to defer_pop above, but it is
572 probably not worthwhile.
574 ??? It will be worthwhile to enable combine_stack_adjustments even for
575 such machines. */
576 else if (maybe_ne (n_popped, 0))
577 anti_adjust_stack (gen_int_mode (n_popped, Pmode));
580 /* Determine if the function identified by FNDECL is one with
581 special properties we wish to know about. Modify FLAGS accordingly.
583 For example, if the function might return more than one time (setjmp), then
584 set ECF_RETURNS_TWICE.
586 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
587 space from the stack such as alloca. */
589 static int
590 special_function_p (const_tree fndecl, int flags)
592 tree name_decl = DECL_NAME (fndecl);
594 if (maybe_special_function_p (fndecl)
595 && IDENTIFIER_LENGTH (name_decl) <= 11)
597 const char *name = IDENTIFIER_POINTER (name_decl);
598 const char *tname = name;
600 /* We assume that alloca will always be called by name. It
601 makes no sense to pass it as a pointer-to-function to
602 anything that does not understand its behavior. */
603 if (IDENTIFIER_LENGTH (name_decl) == 6
604 && name[0] == 'a'
605 && ! strcmp (name, "alloca"))
606 flags |= ECF_MAY_BE_ALLOCA;
608 /* Disregard prefix _ or __. */
609 if (name[0] == '_')
611 if (name[1] == '_')
612 tname += 2;
613 else
614 tname += 1;
617 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
618 if (! strcmp (tname, "setjmp")
619 || ! strcmp (tname, "sigsetjmp")
620 || ! strcmp (name, "savectx")
621 || ! strcmp (name, "vfork")
622 || ! strcmp (name, "getcontext"))
623 flags |= ECF_RETURNS_TWICE;
626 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
627 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
628 flags |= ECF_MAY_BE_ALLOCA;
630 return flags;
633 /* Similar to special_function_p; return a set of ERF_ flags for the
634 function FNDECL. */
635 static int
636 decl_return_flags (tree fndecl)
638 tree attr;
639 tree type = TREE_TYPE (fndecl);
640 if (!type)
641 return 0;
643 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
644 if (!attr)
645 return 0;
647 attr_fnspec fnspec (TREE_VALUE (TREE_VALUE (attr)));
649 unsigned int arg;
650 if (fnspec.returns_arg (&arg))
651 return ERF_RETURNS_ARG | arg;
653 if (fnspec.returns_noalias_p ())
654 return ERF_NOALIAS;
655 return 0;
658 /* Return nonzero when FNDECL represents a call to setjmp. */
661 setjmp_call_p (const_tree fndecl)
663 if (DECL_IS_RETURNS_TWICE (fndecl))
664 return ECF_RETURNS_TWICE;
665 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
669 /* Return true if STMT may be an alloca call. */
671 bool
672 gimple_maybe_alloca_call_p (const gimple *stmt)
674 tree fndecl;
676 if (!is_gimple_call (stmt))
677 return false;
679 fndecl = gimple_call_fndecl (stmt);
680 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
681 return true;
683 return false;
686 /* Return true if STMT is a builtin alloca call. */
688 bool
689 gimple_alloca_call_p (const gimple *stmt)
691 tree fndecl;
693 if (!is_gimple_call (stmt))
694 return false;
696 fndecl = gimple_call_fndecl (stmt);
697 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
698 switch (DECL_FUNCTION_CODE (fndecl))
700 CASE_BUILT_IN_ALLOCA:
701 return gimple_call_num_args (stmt) > 0;
702 default:
703 break;
706 return false;
709 /* Return true when exp contains a builtin alloca call. */
711 bool
712 alloca_call_p (const_tree exp)
714 tree fndecl;
715 if (TREE_CODE (exp) == CALL_EXPR
716 && (fndecl = get_callee_fndecl (exp))
717 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
718 switch (DECL_FUNCTION_CODE (fndecl))
720 CASE_BUILT_IN_ALLOCA:
721 return true;
722 default:
723 break;
726 return false;
729 /* Return TRUE if FNDECL is either a TM builtin or a TM cloned
730 function. Return FALSE otherwise. */
732 static bool
733 is_tm_builtin (const_tree fndecl)
735 if (fndecl == NULL)
736 return false;
738 if (decl_is_tm_clone (fndecl))
739 return true;
741 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
743 switch (DECL_FUNCTION_CODE (fndecl))
745 case BUILT_IN_TM_COMMIT:
746 case BUILT_IN_TM_COMMIT_EH:
747 case BUILT_IN_TM_ABORT:
748 case BUILT_IN_TM_IRREVOCABLE:
749 case BUILT_IN_TM_GETTMCLONE_IRR:
750 case BUILT_IN_TM_MEMCPY:
751 case BUILT_IN_TM_MEMMOVE:
752 case BUILT_IN_TM_MEMSET:
753 CASE_BUILT_IN_TM_STORE (1):
754 CASE_BUILT_IN_TM_STORE (2):
755 CASE_BUILT_IN_TM_STORE (4):
756 CASE_BUILT_IN_TM_STORE (8):
757 CASE_BUILT_IN_TM_STORE (FLOAT):
758 CASE_BUILT_IN_TM_STORE (DOUBLE):
759 CASE_BUILT_IN_TM_STORE (LDOUBLE):
760 CASE_BUILT_IN_TM_STORE (M64):
761 CASE_BUILT_IN_TM_STORE (M128):
762 CASE_BUILT_IN_TM_STORE (M256):
763 CASE_BUILT_IN_TM_LOAD (1):
764 CASE_BUILT_IN_TM_LOAD (2):
765 CASE_BUILT_IN_TM_LOAD (4):
766 CASE_BUILT_IN_TM_LOAD (8):
767 CASE_BUILT_IN_TM_LOAD (FLOAT):
768 CASE_BUILT_IN_TM_LOAD (DOUBLE):
769 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
770 CASE_BUILT_IN_TM_LOAD (M64):
771 CASE_BUILT_IN_TM_LOAD (M128):
772 CASE_BUILT_IN_TM_LOAD (M256):
773 case BUILT_IN_TM_LOG:
774 case BUILT_IN_TM_LOG_1:
775 case BUILT_IN_TM_LOG_2:
776 case BUILT_IN_TM_LOG_4:
777 case BUILT_IN_TM_LOG_8:
778 case BUILT_IN_TM_LOG_FLOAT:
779 case BUILT_IN_TM_LOG_DOUBLE:
780 case BUILT_IN_TM_LOG_LDOUBLE:
781 case BUILT_IN_TM_LOG_M64:
782 case BUILT_IN_TM_LOG_M128:
783 case BUILT_IN_TM_LOG_M256:
784 return true;
785 default:
786 break;
789 return false;
792 /* Detect flags (function attributes) from the function decl or type node. */
795 flags_from_decl_or_type (const_tree exp)
797 int flags = 0;
799 if (DECL_P (exp))
801 /* The function exp may have the `malloc' attribute. */
802 if (DECL_IS_MALLOC (exp))
803 flags |= ECF_MALLOC;
805 /* The function exp may have the `returns_twice' attribute. */
806 if (DECL_IS_RETURNS_TWICE (exp))
807 flags |= ECF_RETURNS_TWICE;
809 /* Process the pure and const attributes. */
810 if (TREE_READONLY (exp))
811 flags |= ECF_CONST;
812 if (DECL_PURE_P (exp))
813 flags |= ECF_PURE;
814 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
815 flags |= ECF_LOOPING_CONST_OR_PURE;
817 if (DECL_IS_NOVOPS (exp))
818 flags |= ECF_NOVOPS;
819 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
820 flags |= ECF_LEAF;
821 if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp)))
822 flags |= ECF_COLD;
824 if (TREE_NOTHROW (exp))
825 flags |= ECF_NOTHROW;
827 if (flag_tm)
829 if (is_tm_builtin (exp))
830 flags |= ECF_TM_BUILTIN;
831 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
832 || lookup_attribute ("transaction_pure",
833 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
834 flags |= ECF_TM_PURE;
837 flags = special_function_p (exp, flags);
839 else if (TYPE_P (exp))
841 if (TYPE_READONLY (exp))
842 flags |= ECF_CONST;
844 if (flag_tm
845 && ((flags & ECF_CONST) != 0
846 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
847 flags |= ECF_TM_PURE;
849 else
850 gcc_unreachable ();
852 if (TREE_THIS_VOLATILE (exp))
854 flags |= ECF_NORETURN;
855 if (flags & (ECF_CONST|ECF_PURE))
856 flags |= ECF_LOOPING_CONST_OR_PURE;
859 return flags;
862 /* Detect flags from a CALL_EXPR. */
865 call_expr_flags (const_tree t)
867 int flags;
868 tree decl = get_callee_fndecl (t);
870 if (decl)
871 flags = flags_from_decl_or_type (decl);
872 else if (CALL_EXPR_FN (t) == NULL_TREE)
873 flags = internal_fn_flags (CALL_EXPR_IFN (t));
874 else
876 tree type = TREE_TYPE (CALL_EXPR_FN (t));
877 if (type && TREE_CODE (type) == POINTER_TYPE)
878 flags = flags_from_decl_or_type (TREE_TYPE (type));
879 else
880 flags = 0;
881 if (CALL_EXPR_BY_DESCRIPTOR (t))
882 flags |= ECF_BY_DESCRIPTOR;
885 return flags;
888 /* Return true if ARG should be passed by invisible reference. */
890 bool
891 pass_by_reference (CUMULATIVE_ARGS *ca, function_arg_info arg)
893 if (tree type = arg.type)
895 /* If this type contains non-trivial constructors, then it is
896 forbidden for the middle-end to create any new copies. */
897 if (TREE_ADDRESSABLE (type))
898 return true;
900 /* GCC post 3.4 passes *all* variable sized types by reference. */
901 if (!TYPE_SIZE (type) || !poly_int_tree_p (TYPE_SIZE (type)))
902 return true;
904 /* If a record type should be passed the same as its first (and only)
905 member, use the type and mode of that member. */
906 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
908 arg.type = TREE_TYPE (first_field (type));
909 arg.mode = TYPE_MODE (arg.type);
913 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), arg);
916 /* Return true if TYPE should be passed by reference when passed to
917 the "..." arguments of a function. */
919 bool
920 pass_va_arg_by_reference (tree type)
922 return pass_by_reference (NULL, function_arg_info (type, /*named=*/false));
925 /* Decide whether ARG, which occurs in the state described by CA,
926 should be passed by reference. Return true if so and update
927 ARG accordingly. */
929 bool
930 apply_pass_by_reference_rules (CUMULATIVE_ARGS *ca, function_arg_info &arg)
932 if (pass_by_reference (ca, arg))
934 arg.type = build_pointer_type (arg.type);
935 arg.mode = TYPE_MODE (arg.type);
936 arg.pass_by_reference = true;
937 return true;
939 return false;
942 /* Return true if ARG, which is passed by reference, should be callee
943 copied instead of caller copied. */
945 bool
946 reference_callee_copied (CUMULATIVE_ARGS *ca, const function_arg_info &arg)
948 if (arg.type && TREE_ADDRESSABLE (arg.type))
949 return false;
950 return targetm.calls.callee_copies (pack_cumulative_args (ca), arg);
954 /* Precompute all register parameters as described by ARGS, storing values
955 into fields within the ARGS array.
957 NUM_ACTUALS indicates the total number elements in the ARGS array.
959 Set REG_PARM_SEEN if we encounter a register parameter. */
961 static void
962 precompute_register_parameters (int num_actuals, struct arg_data *args,
963 int *reg_parm_seen)
965 int i;
967 *reg_parm_seen = 0;
969 for (i = 0; i < num_actuals; i++)
970 if (args[i].reg != 0 && ! args[i].pass_on_stack)
972 *reg_parm_seen = 1;
974 if (args[i].value == 0)
976 push_temp_slots ();
977 args[i].value = expand_normal (args[i].tree_value);
978 preserve_temp_slots (args[i].value);
979 pop_temp_slots ();
982 /* If we are to promote the function arg to a wider mode,
983 do it now. */
985 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
986 args[i].value
987 = convert_modes (args[i].mode,
988 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
989 args[i].value, args[i].unsignedp);
991 /* If the value is a non-legitimate constant, force it into a
992 pseudo now. TLS symbols sometimes need a call to resolve. */
993 if (CONSTANT_P (args[i].value)
994 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
995 args[i].value = force_reg (args[i].mode, args[i].value);
997 /* If we're going to have to load the value by parts, pull the
998 parts into pseudos. The part extraction process can involve
999 non-trivial computation. */
1000 if (GET_CODE (args[i].reg) == PARALLEL)
1002 tree type = TREE_TYPE (args[i].tree_value);
1003 args[i].parallel_value
1004 = emit_group_load_into_temps (args[i].reg, args[i].value,
1005 type, int_size_in_bytes (type));
1008 /* If the value is expensive, and we are inside an appropriately
1009 short loop, put the value into a pseudo and then put the pseudo
1010 into the hard reg.
1012 For small register classes, also do this if this call uses
1013 register parameters. This is to avoid reload conflicts while
1014 loading the parameters registers. */
1016 else if ((! (REG_P (args[i].value)
1017 || (GET_CODE (args[i].value) == SUBREG
1018 && REG_P (SUBREG_REG (args[i].value)))))
1019 && args[i].mode != BLKmode
1020 && (set_src_cost (args[i].value, args[i].mode,
1021 optimize_insn_for_speed_p ())
1022 > COSTS_N_INSNS (1))
1023 && ((*reg_parm_seen
1024 && targetm.small_register_classes_for_mode_p (args[i].mode))
1025 || optimize))
1026 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1030 #ifdef REG_PARM_STACK_SPACE
1032 /* The argument list is the property of the called routine and it
1033 may clobber it. If the fixed area has been used for previous
1034 parameters, we must save and restore it. */
1036 static rtx
1037 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
1039 unsigned int low;
1040 unsigned int high;
1042 /* Compute the boundary of the area that needs to be saved, if any. */
1043 high = reg_parm_stack_space;
1044 if (ARGS_GROW_DOWNWARD)
1045 high += 1;
1047 if (high > highest_outgoing_arg_in_use)
1048 high = highest_outgoing_arg_in_use;
1050 for (low = 0; low < high; low++)
1051 if (stack_usage_map[low] != 0 || low >= stack_usage_watermark)
1053 int num_to_save;
1054 machine_mode save_mode;
1055 int delta;
1056 rtx addr;
1057 rtx stack_area;
1058 rtx save_area;
1060 while (stack_usage_map[--high] == 0)
1063 *low_to_save = low;
1064 *high_to_save = high;
1066 num_to_save = high - low + 1;
1068 /* If we don't have the required alignment, must do this
1069 in BLKmode. */
1070 scalar_int_mode imode;
1071 if (int_mode_for_size (num_to_save * BITS_PER_UNIT, 1).exists (&imode)
1072 && (low & (MIN (GET_MODE_SIZE (imode),
1073 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
1074 save_mode = imode;
1075 else
1076 save_mode = BLKmode;
1078 if (ARGS_GROW_DOWNWARD)
1079 delta = -high;
1080 else
1081 delta = low;
1083 addr = plus_constant (Pmode, argblock, delta);
1084 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1086 set_mem_align (stack_area, PARM_BOUNDARY);
1087 if (save_mode == BLKmode)
1089 save_area = assign_stack_temp (BLKmode, num_to_save);
1090 emit_block_move (validize_mem (save_area), stack_area,
1091 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1093 else
1095 save_area = gen_reg_rtx (save_mode);
1096 emit_move_insn (save_area, stack_area);
1099 return save_area;
1102 return NULL_RTX;
1105 static void
1106 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
1108 machine_mode save_mode = GET_MODE (save_area);
1109 int delta;
1110 rtx addr, stack_area;
1112 if (ARGS_GROW_DOWNWARD)
1113 delta = -high_to_save;
1114 else
1115 delta = low_to_save;
1117 addr = plus_constant (Pmode, argblock, delta);
1118 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1119 set_mem_align (stack_area, PARM_BOUNDARY);
1121 if (save_mode != BLKmode)
1122 emit_move_insn (stack_area, save_area);
1123 else
1124 emit_block_move (stack_area, validize_mem (save_area),
1125 GEN_INT (high_to_save - low_to_save + 1),
1126 BLOCK_OP_CALL_PARM);
1128 #endif /* REG_PARM_STACK_SPACE */
1130 /* If any elements in ARGS refer to parameters that are to be passed in
1131 registers, but not in memory, and whose alignment does not permit a
1132 direct copy into registers. Copy the values into a group of pseudos
1133 which we will later copy into the appropriate hard registers.
1135 Pseudos for each unaligned argument will be stored into the array
1136 args[argnum].aligned_regs. The caller is responsible for deallocating
1137 the aligned_regs array if it is nonzero. */
1139 static void
1140 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
1142 int i, j;
1144 for (i = 0; i < num_actuals; i++)
1145 if (args[i].reg != 0 && ! args[i].pass_on_stack
1146 && GET_CODE (args[i].reg) != PARALLEL
1147 && args[i].mode == BLKmode
1148 && MEM_P (args[i].value)
1149 && (MEM_ALIGN (args[i].value)
1150 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1152 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1153 int endian_correction = 0;
1155 if (args[i].partial)
1157 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1158 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1160 else
1162 args[i].n_aligned_regs
1163 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1166 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
1168 /* Structures smaller than a word are normally aligned to the
1169 least significant byte. On a BYTES_BIG_ENDIAN machine,
1170 this means we must skip the empty high order bytes when
1171 calculating the bit offset. */
1172 if (bytes < UNITS_PER_WORD
1173 #ifdef BLOCK_REG_PADDING
1174 && (BLOCK_REG_PADDING (args[i].mode,
1175 TREE_TYPE (args[i].tree_value), 1)
1176 == PAD_DOWNWARD)
1177 #else
1178 && BYTES_BIG_ENDIAN
1179 #endif
1181 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
1183 for (j = 0; j < args[i].n_aligned_regs; j++)
1185 rtx reg = gen_reg_rtx (word_mode);
1186 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1187 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1189 args[i].aligned_regs[j] = reg;
1190 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1191 word_mode, word_mode, false, NULL);
1193 /* There is no need to restrict this code to loading items
1194 in TYPE_ALIGN sized hunks. The bitfield instructions can
1195 load up entire word sized registers efficiently.
1197 ??? This may not be needed anymore.
1198 We use to emit a clobber here but that doesn't let later
1199 passes optimize the instructions we emit. By storing 0 into
1200 the register later passes know the first AND to zero out the
1201 bitfield being set in the register is unnecessary. The store
1202 of 0 will be deleted as will at least the first AND. */
1204 emit_move_insn (reg, const0_rtx);
1206 bytes -= bitsize / BITS_PER_UNIT;
1207 store_bit_field (reg, bitsize, endian_correction, 0, 0,
1208 word_mode, word, false);
1213 /* The limit set by -Walloc-larger-than=. */
1214 static GTY(()) tree alloc_object_size_limit;
1216 /* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
1217 setting if the option is specified, or to the maximum object size if it
1218 is not. Return the initialized value. */
1220 static tree
1221 alloc_max_size (void)
1223 if (alloc_object_size_limit)
1224 return alloc_object_size_limit;
1226 HOST_WIDE_INT limit = warn_alloc_size_limit;
1227 if (limit == HOST_WIDE_INT_MAX)
1228 limit = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
1230 alloc_object_size_limit = build_int_cst (size_type_node, limit);
1232 return alloc_object_size_limit;
1235 /* Return true when EXP's range can be determined and set RANGE[] to it
1236 after adjusting it if necessary to make EXP a represents a valid size
1237 of object, or a valid size argument to an allocation function declared
1238 with attribute alloc_size (whose argument may be signed), or to a string
1239 manipulation function like memset.
1240 When ALLOW_ZERO is set in FLAGS, allow returning a range of [0, 0] for
1241 a size in an anti-range [1, N] where N > PTRDIFF_MAX. A zero range is
1242 a (nearly) invalid argument to allocation functions like malloc but it
1243 is a valid argument to functions like memset.
1244 When USE_LARGEST is set in FLAGS set RANGE to the largest valid subrange
1245 in a multi-range, otherwise to the smallest valid subrange. */
1247 bool
1248 get_size_range (range_query *query, tree exp, gimple *stmt, tree range[2],
1249 int flags /* = 0 */)
1251 if (!exp)
1252 return false;
1254 if (tree_fits_uhwi_p (exp))
1256 /* EXP is a constant. */
1257 range[0] = range[1] = exp;
1258 return true;
1261 tree exptype = TREE_TYPE (exp);
1262 bool integral = INTEGRAL_TYPE_P (exptype);
1264 wide_int min, max;
1265 enum value_range_kind range_type;
1267 if (integral)
1269 value_range vr;
1270 if (query && query->range_of_expr (vr, exp, stmt))
1272 range_type = vr.kind ();
1273 if (!vr.undefined_p ())
1275 min = wi::to_wide (vr.min ());
1276 max = wi::to_wide (vr.max ());
1279 else
1280 range_type = determine_value_range (exp, &min, &max);
1283 else
1284 range_type = VR_VARYING;
1286 if (range_type == VR_VARYING)
1288 if (integral)
1290 /* Use the full range of the type of the expression when
1291 no value range information is available. */
1292 range[0] = TYPE_MIN_VALUE (exptype);
1293 range[1] = TYPE_MAX_VALUE (exptype);
1294 return true;
1297 range[0] = NULL_TREE;
1298 range[1] = NULL_TREE;
1299 return false;
1302 unsigned expprec = TYPE_PRECISION (exptype);
1304 bool signed_p = !TYPE_UNSIGNED (exptype);
1306 if (range_type == VR_ANTI_RANGE)
1308 if (signed_p)
1310 if (wi::les_p (max, 0))
1312 /* EXP is not in a strictly negative range. That means
1313 it must be in some (not necessarily strictly) positive
1314 range which includes zero. Since in signed to unsigned
1315 conversions negative values end up converted to large
1316 positive values, and otherwise they are not valid sizes,
1317 the resulting range is in both cases [0, TYPE_MAX]. */
1318 min = wi::zero (expprec);
1319 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1321 else if (wi::les_p (min - 1, 0))
1323 /* EXP is not in a negative-positive range. That means EXP
1324 is either negative, or greater than max. Since negative
1325 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
1326 min = max + 1;
1327 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1329 else
1331 max = min - 1;
1332 min = wi::zero (expprec);
1335 else
1337 wide_int maxsize = wi::to_wide (max_object_size ());
1338 min = wide_int::from (min, maxsize.get_precision (), UNSIGNED);
1339 max = wide_int::from (max, maxsize.get_precision (), UNSIGNED);
1340 if (wi::eq_p (0, min - 1))
1342 /* EXP is unsigned and not in the range [1, MAX]. That means
1343 it's either zero or greater than MAX. Even though 0 would
1344 normally be detected by -Walloc-zero, unless ALLOW_ZERO
1345 is set, set the range to [MAX, TYPE_MAX] so that when MAX
1346 is greater than the limit the whole range is diagnosed. */
1347 wide_int maxsize = wi::to_wide (max_object_size ());
1348 if (flags & SR_ALLOW_ZERO)
1350 if (wi::leu_p (maxsize, max + 1)
1351 || !(flags & SR_USE_LARGEST))
1352 min = max = wi::zero (expprec);
1353 else
1355 min = max + 1;
1356 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1359 else
1361 min = max + 1;
1362 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1365 else if ((flags & SR_USE_LARGEST)
1366 && wi::ltu_p (max + 1, maxsize))
1368 /* When USE_LARGEST is set and the larger of the two subranges
1369 is a valid size, use it... */
1370 min = max + 1;
1371 max = maxsize;
1373 else
1375 /* ...otherwise use the smaller subrange. */
1376 max = min - 1;
1377 min = wi::zero (expprec);
1382 range[0] = wide_int_to_tree (exptype, min);
1383 range[1] = wide_int_to_tree (exptype, max);
1385 return true;
1388 bool
1389 get_size_range (tree exp, tree range[2], int flags /* = 0 */)
1391 return get_size_range (/*query=*/NULL, exp, /*stmt=*/NULL, range, flags);
1394 /* Diagnose a call EXP to function FN decorated with attribute alloc_size
1395 whose argument numbers given by IDX with values given by ARGS exceed
1396 the maximum object size or cause an unsigned oveflow (wrapping) when
1397 multiplied. FN is null when EXP is a call via a function pointer.
1398 When ARGS[0] is null the function does nothing. ARGS[1] may be null
1399 for functions like malloc, and non-null for those like calloc that
1400 are decorated with a two-argument attribute alloc_size. */
1402 void
1403 maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
1405 /* The range each of the (up to) two arguments is known to be in. */
1406 tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } };
1408 /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */
1409 tree maxobjsize = alloc_max_size ();
1411 location_t loc = EXPR_LOCATION (exp);
1413 tree fntype = fn ? TREE_TYPE (fn) : TREE_TYPE (TREE_TYPE (exp));
1414 bool warned = false;
1416 /* Validate each argument individually. */
1417 for (unsigned i = 0; i != 2 && args[i]; ++i)
1419 if (TREE_CODE (args[i]) == INTEGER_CST)
1421 argrange[i][0] = args[i];
1422 argrange[i][1] = args[i];
1424 if (tree_int_cst_lt (args[i], integer_zero_node))
1426 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1427 "%Kargument %i value %qE is negative",
1428 exp, idx[i] + 1, args[i]);
1430 else if (integer_zerop (args[i]))
1432 /* Avoid issuing -Walloc-zero for allocation functions other
1433 than __builtin_alloca that are declared with attribute
1434 returns_nonnull because there's no portability risk. This
1435 avoids warning for such calls to libiberty's xmalloc and
1436 friends.
1437 Also avoid issuing the warning for calls to function named
1438 "alloca". */
1439 if (fn && fndecl_built_in_p (fn, BUILT_IN_ALLOCA)
1440 ? IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6
1441 : !lookup_attribute ("returns_nonnull",
1442 TYPE_ATTRIBUTES (fntype)))
1443 warned = warning_at (loc, OPT_Walloc_zero,
1444 "%Kargument %i value is zero",
1445 exp, idx[i] + 1);
1447 else if (tree_int_cst_lt (maxobjsize, args[i]))
1449 /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
1450 mode and with -fno-exceptions as a way to indicate array
1451 size overflow. There's no good way to detect C++98 here
1452 so avoid diagnosing these calls for all C++ modes. */
1453 if (i == 0
1454 && fn
1455 && !args[1]
1456 && lang_GNU_CXX ()
1457 && DECL_IS_OPERATOR_NEW_P (fn)
1458 && integer_all_onesp (args[i]))
1459 continue;
1461 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1462 "%Kargument %i value %qE exceeds "
1463 "maximum object size %E",
1464 exp, idx[i] + 1, args[i], maxobjsize);
1467 else if (TREE_CODE (args[i]) == SSA_NAME
1468 && get_size_range (args[i], argrange[i]))
1470 /* Verify that the argument's range is not negative (including
1471 upper bound of zero). */
1472 if (tree_int_cst_lt (argrange[i][0], integer_zero_node)
1473 && tree_int_cst_le (argrange[i][1], integer_zero_node))
1475 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1476 "%Kargument %i range [%E, %E] is negative",
1477 exp, idx[i] + 1,
1478 argrange[i][0], argrange[i][1]);
1480 else if (tree_int_cst_lt (maxobjsize, argrange[i][0]))
1482 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1483 "%Kargument %i range [%E, %E] exceeds "
1484 "maximum object size %E",
1485 exp, idx[i] + 1,
1486 argrange[i][0], argrange[i][1],
1487 maxobjsize);
1492 if (!argrange[0])
1493 return;
1495 /* For a two-argument alloc_size, validate the product of the two
1496 arguments if both of their values or ranges are known. */
1497 if (!warned && tree_fits_uhwi_p (argrange[0][0])
1498 && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0])
1499 && !integer_onep (argrange[0][0])
1500 && !integer_onep (argrange[1][0]))
1502 /* Check for overflow in the product of a function decorated with
1503 attribute alloc_size (X, Y). */
1504 unsigned szprec = TYPE_PRECISION (size_type_node);
1505 wide_int x = wi::to_wide (argrange[0][0], szprec);
1506 wide_int y = wi::to_wide (argrange[1][0], szprec);
1508 wi::overflow_type vflow;
1509 wide_int prod = wi::umul (x, y, &vflow);
1511 if (vflow)
1512 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1513 "%Kproduct %<%E * %E%> of arguments %i and %i "
1514 "exceeds %<SIZE_MAX%>",
1515 exp, argrange[0][0], argrange[1][0],
1516 idx[0] + 1, idx[1] + 1);
1517 else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod))
1518 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1519 "%Kproduct %<%E * %E%> of arguments %i and %i "
1520 "exceeds maximum object size %E",
1521 exp, argrange[0][0], argrange[1][0],
1522 idx[0] + 1, idx[1] + 1,
1523 maxobjsize);
1525 if (warned)
1527 /* Print the full range of each of the two arguments to make
1528 it clear when it is, in fact, in a range and not constant. */
1529 if (argrange[0][0] != argrange [0][1])
1530 inform (loc, "argument %i in the range [%E, %E]",
1531 idx[0] + 1, argrange[0][0], argrange[0][1]);
1532 if (argrange[1][0] != argrange [1][1])
1533 inform (loc, "argument %i in the range [%E, %E]",
1534 idx[1] + 1, argrange[1][0], argrange[1][1]);
1538 if (warned && fn)
1540 location_t fnloc = DECL_SOURCE_LOCATION (fn);
1542 if (DECL_IS_BUILTIN (fn))
1543 inform (loc,
1544 "in a call to built-in allocation function %qD", fn);
1545 else
1546 inform (fnloc,
1547 "in a call to allocation function %qD declared here", fn);
1551 /* If EXPR refers to a character array or pointer declared attribute
1552 nonstring return a decl for that array or pointer and set *REF to
1553 the referenced enclosing object or pointer. Otherwise returns
1554 null. */
1556 tree
1557 get_attr_nonstring_decl (tree expr, tree *ref)
1559 tree decl = expr;
1560 tree var = NULL_TREE;
1561 if (TREE_CODE (decl) == SSA_NAME)
1563 gimple *def = SSA_NAME_DEF_STMT (decl);
1565 if (is_gimple_assign (def))
1567 tree_code code = gimple_assign_rhs_code (def);
1568 if (code == ADDR_EXPR
1569 || code == COMPONENT_REF
1570 || code == VAR_DECL)
1571 decl = gimple_assign_rhs1 (def);
1573 else
1574 var = SSA_NAME_VAR (decl);
1577 if (TREE_CODE (decl) == ADDR_EXPR)
1578 decl = TREE_OPERAND (decl, 0);
1580 /* To simplify calling code, store the referenced DECL regardless of
1581 the attribute determined below, but avoid storing the SSA_NAME_VAR
1582 obtained above (it's not useful for dataflow purposes). */
1583 if (ref)
1584 *ref = decl;
1586 /* Use the SSA_NAME_VAR that was determined above to see if it's
1587 declared nonstring. Otherwise drill down into the referenced
1588 DECL. */
1589 if (var)
1590 decl = var;
1591 else if (TREE_CODE (decl) == ARRAY_REF)
1592 decl = TREE_OPERAND (decl, 0);
1593 else if (TREE_CODE (decl) == COMPONENT_REF)
1594 decl = TREE_OPERAND (decl, 1);
1595 else if (TREE_CODE (decl) == MEM_REF)
1596 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
1598 if (DECL_P (decl)
1599 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
1600 return decl;
1602 return NULL_TREE;
1605 /* Warn about passing a non-string array/pointer to a built-in function
1606 that expects a nul-terminated string argument. Returns true if
1607 a warning has been issued.*/
1609 bool
1610 maybe_warn_nonstring_arg (tree fndecl, tree exp)
1612 if (!fndecl || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1613 return false;
1615 if (TREE_NO_WARNING (exp) || !warn_stringop_overread)
1616 return false;
1618 /* Avoid clearly invalid calls (more checking done below). */
1619 unsigned nargs = call_expr_nargs (exp);
1620 if (!nargs)
1621 return false;
1623 /* The bound argument to a bounded string function like strncpy. */
1624 tree bound = NULL_TREE;
1626 /* The longest known or possible string argument to one of the comparison
1627 functions. If the length is less than the bound it is used instead.
1628 Since the length is only used for warning and not for code generation
1629 disable strict mode in the calls to get_range_strlen below. */
1630 tree maxlen = NULL_TREE;
1632 /* It's safe to call "bounded" string functions with a non-string
1633 argument since the functions provide an explicit bound for this
1634 purpose. The exception is strncat where the bound may refer to
1635 either the destination or the source. */
1636 int fncode = DECL_FUNCTION_CODE (fndecl);
1637 switch (fncode)
1639 case BUILT_IN_STRCMP:
1640 case BUILT_IN_STRNCMP:
1641 case BUILT_IN_STRNCASECMP:
1643 /* For these, if one argument refers to one or more of a set
1644 of string constants or arrays of known size, determine
1645 the range of their known or possible lengths and use it
1646 conservatively as the bound for the unbounded function,
1647 and to adjust the range of the bound of the bounded ones. */
1648 for (unsigned argno = 0;
1649 argno < MIN (nargs, 2)
1650 && !(maxlen && TREE_CODE (maxlen) == INTEGER_CST); argno++)
1652 tree arg = CALL_EXPR_ARG (exp, argno);
1653 if (!get_attr_nonstring_decl (arg))
1655 c_strlen_data lendata = { };
1656 /* Set MAXBOUND to an arbitrary non-null non-integer
1657 node as a request to have it set to the length of
1658 the longest string in a PHI. */
1659 lendata.maxbound = arg;
1660 get_range_strlen (arg, &lendata, /* eltsize = */ 1);
1661 maxlen = lendata.maxbound;
1665 /* Fall through. */
1667 case BUILT_IN_STRNCAT:
1668 case BUILT_IN_STPNCPY:
1669 case BUILT_IN_STRNCPY:
1670 if (nargs > 2)
1671 bound = CALL_EXPR_ARG (exp, 2);
1672 break;
1674 case BUILT_IN_STRNDUP:
1675 if (nargs > 1)
1676 bound = CALL_EXPR_ARG (exp, 1);
1677 break;
1679 case BUILT_IN_STRNLEN:
1681 tree arg = CALL_EXPR_ARG (exp, 0);
1682 if (!get_attr_nonstring_decl (arg))
1684 c_strlen_data lendata = { };
1685 /* Set MAXBOUND to an arbitrary non-null non-integer
1686 node as a request to have it set to the length of
1687 the longest string in a PHI. */
1688 lendata.maxbound = arg;
1689 get_range_strlen (arg, &lendata, /* eltsize = */ 1);
1690 maxlen = lendata.maxbound;
1692 if (nargs > 1)
1693 bound = CALL_EXPR_ARG (exp, 1);
1694 break;
1697 default:
1698 break;
1701 /* Determine the range of the bound argument (if specified). */
1702 tree bndrng[2] = { NULL_TREE, NULL_TREE };
1703 if (bound)
1705 STRIP_NOPS (bound);
1706 get_size_range (bound, bndrng);
1709 location_t loc = EXPR_LOCATION (exp);
1711 if (bndrng[0])
1713 /* Diagnose excessive bound prior to the adjustment below and
1714 regardless of attribute nonstring. */
1715 tree maxobjsize = max_object_size ();
1716 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
1718 bool warned = false;
1719 if (tree_int_cst_equal (bndrng[0], bndrng[1]))
1720 warned = warning_at (loc, OPT_Wstringop_overread,
1721 "%K%qD specified bound %E "
1722 "exceeds maximum object size %E",
1723 exp, fndecl, bndrng[0], maxobjsize);
1724 else
1725 warned = warning_at (loc, OPT_Wstringop_overread,
1726 "%K%qD specified bound [%E, %E] "
1727 "exceeds maximum object size %E",
1728 exp, fndecl, bndrng[0], bndrng[1],
1729 maxobjsize);
1730 if (warned)
1731 TREE_NO_WARNING (exp) = true;
1733 return warned;
1737 if (maxlen && !integer_all_onesp (maxlen))
1739 /* Add one for the nul. */
1740 maxlen = const_binop (PLUS_EXPR, TREE_TYPE (maxlen), maxlen,
1741 size_one_node);
1743 if (!bndrng[0])
1745 /* Conservatively use the upper bound of the lengths for
1746 both the lower and the upper bound of the operation. */
1747 bndrng[0] = maxlen;
1748 bndrng[1] = maxlen;
1749 bound = void_type_node;
1751 else if (maxlen)
1753 /* Replace the bound on the operation with the upper bound
1754 of the length of the string if the latter is smaller. */
1755 if (tree_int_cst_lt (maxlen, bndrng[0]))
1756 bndrng[0] = maxlen;
1757 else if (tree_int_cst_lt (maxlen, bndrng[1]))
1758 bndrng[1] = maxlen;
1762 bool any_arg_warned = false;
1763 /* Iterate over the built-in function's formal arguments and check
1764 each const char* against the actual argument. If the actual
1765 argument is declared attribute non-string issue a warning unless
1766 the argument's maximum length is bounded. */
1767 function_args_iterator it;
1768 function_args_iter_init (&it, TREE_TYPE (fndecl));
1770 for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it))
1772 /* Avoid iterating past the declared argument in a call
1773 to function declared without a prototype. */
1774 if (argno >= nargs)
1775 break;
1777 tree argtype = function_args_iter_cond (&it);
1778 if (!argtype)
1779 break;
1781 if (TREE_CODE (argtype) != POINTER_TYPE)
1782 continue;
1784 argtype = TREE_TYPE (argtype);
1786 if (TREE_CODE (argtype) != INTEGER_TYPE
1787 || !TYPE_READONLY (argtype))
1788 continue;
1790 argtype = TYPE_MAIN_VARIANT (argtype);
1791 if (argtype != char_type_node)
1792 continue;
1794 tree callarg = CALL_EXPR_ARG (exp, argno);
1795 if (TREE_CODE (callarg) == ADDR_EXPR)
1796 callarg = TREE_OPERAND (callarg, 0);
1798 /* See if the destination is declared with attribute "nonstring". */
1799 tree decl = get_attr_nonstring_decl (callarg);
1800 if (!decl)
1801 continue;
1803 /* The maximum number of array elements accessed. */
1804 offset_int wibnd = 0;
1806 if (argno && fncode == BUILT_IN_STRNCAT)
1808 /* See if the bound in strncat is derived from the length
1809 of the strlen of the destination (as it's expected to be).
1810 If so, reset BOUND and FNCODE to trigger a warning. */
1811 tree dstarg = CALL_EXPR_ARG (exp, 0);
1812 if (is_strlen_related_p (dstarg, bound))
1814 /* The bound applies to the destination, not to the source,
1815 so reset these to trigger a warning without mentioning
1816 the bound. */
1817 bound = NULL;
1818 fncode = 0;
1820 else if (bndrng[1])
1821 /* Use the upper bound of the range for strncat. */
1822 wibnd = wi::to_offset (bndrng[1]);
1824 else if (bndrng[0])
1825 /* Use the lower bound of the range for functions other than
1826 strncat. */
1827 wibnd = wi::to_offset (bndrng[0]);
1829 /* Determine the size of the argument array if it is one. */
1830 offset_int asize = wibnd;
1831 bool known_size = false;
1832 tree type = TREE_TYPE (decl);
1834 /* Determine the array size. For arrays of unknown bound and
1835 pointers reset BOUND to trigger the appropriate warning. */
1836 if (TREE_CODE (type) == ARRAY_TYPE)
1838 if (tree arrbnd = TYPE_DOMAIN (type))
1840 if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
1842 asize = wi::to_offset (arrbnd) + 1;
1843 known_size = true;
1846 else if (bound == void_type_node)
1847 bound = NULL_TREE;
1849 else if (bound == void_type_node)
1850 bound = NULL_TREE;
1852 /* In a call to strncat with a bound in a range whose lower but
1853 not upper bound is less than the array size, reset ASIZE to
1854 be the same as the bound and the other variable to trigger
1855 the apprpriate warning below. */
1856 if (fncode == BUILT_IN_STRNCAT
1857 && bndrng[0] != bndrng[1]
1858 && wi::ltu_p (wi::to_offset (bndrng[0]), asize)
1859 && (!known_size
1860 || wi::ltu_p (asize, wibnd)))
1862 asize = wibnd;
1863 bound = NULL_TREE;
1864 fncode = 0;
1867 bool warned = false;
1869 auto_diagnostic_group d;
1870 if (wi::ltu_p (asize, wibnd))
1872 if (bndrng[0] == bndrng[1])
1873 warned = warning_at (loc, OPT_Wstringop_overread,
1874 "%qD argument %i declared attribute "
1875 "%<nonstring%> is smaller than the specified "
1876 "bound %wu",
1877 fndecl, argno + 1, wibnd.to_uhwi ());
1878 else if (wi::ltu_p (asize, wi::to_offset (bndrng[0])))
1879 warned = warning_at (loc, OPT_Wstringop_overread,
1880 "%qD argument %i declared attribute "
1881 "%<nonstring%> is smaller than "
1882 "the specified bound [%E, %E]",
1883 fndecl, argno + 1, bndrng[0], bndrng[1]);
1884 else
1885 warned = warning_at (loc, OPT_Wstringop_overread,
1886 "%qD argument %i declared attribute "
1887 "%<nonstring%> may be smaller than "
1888 "the specified bound [%E, %E]",
1889 fndecl, argno + 1, bndrng[0], bndrng[1]);
1891 else if (fncode == BUILT_IN_STRNCAT)
1892 ; /* Avoid warning for calls to strncat() when the bound
1893 is equal to the size of the non-string argument. */
1894 else if (!bound)
1895 warned = warning_at (loc, OPT_Wstringop_overread,
1896 "%qD argument %i declared attribute %<nonstring%>",
1897 fndecl, argno + 1);
1899 if (warned)
1901 inform (DECL_SOURCE_LOCATION (decl),
1902 "argument %qD declared here", decl);
1903 any_arg_warned = true;
1907 if (any_arg_warned)
1908 TREE_NO_WARNING (exp) = true;
1910 return any_arg_warned;
1913 /* Issue an error if CALL_EXPR was flagged as requiring
1914 tall-call optimization. */
1916 static void
1917 maybe_complain_about_tail_call (tree call_expr, const char *reason)
1919 gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1920 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1921 return;
1923 error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1926 /* Returns the type of the argument ARGNO to function with type FNTYPE
1927 or null when the typoe cannot be determined or no such argument exists. */
1929 static tree
1930 fntype_argno_type (tree fntype, unsigned argno)
1932 if (!prototype_p (fntype))
1933 return NULL_TREE;
1935 tree argtype;
1936 function_args_iterator it;
1937 FOREACH_FUNCTION_ARGS (fntype, argtype, it)
1938 if (argno-- == 0)
1939 return argtype;
1941 return NULL_TREE;
1944 /* Helper to append the "human readable" attribute access specification
1945 described by ACCESS to the array ATTRSTR with size STRSIZE. Used in
1946 diagnostics. */
1948 static inline void
1949 append_attrname (const std::pair<int, attr_access> &access,
1950 char *attrstr, size_t strsize)
1952 if (access.second.internal_p)
1953 return;
1955 tree str = access.second.to_external_string ();
1956 gcc_assert (strsize >= (size_t) TREE_STRING_LENGTH (str));
1957 strcpy (attrstr, TREE_STRING_POINTER (str));
1960 /* Iterate over attribute access read-only, read-write, and write-only
1961 arguments and diagnose past-the-end accesses and related problems
1962 in the function call EXP. */
1964 static void
1965 maybe_warn_rdwr_sizes (rdwr_map *rwm, tree fndecl, tree fntype, tree exp)
1967 auto_diagnostic_group adg;
1969 /* Set if a warning has been issued for any argument (used to decide
1970 whether to emit an informational note at the end). */
1971 bool any_warned = false;
1973 /* A string describing the attributes that the warnings issued by this
1974 function apply to. Used to print one informational note per function
1975 call, rather than one per warning. That reduces clutter. */
1976 char attrstr[80];
1977 attrstr[0] = 0;
1979 for (rdwr_map::iterator it = rwm->begin (); it != rwm->end (); ++it)
1981 std::pair<int, attr_access> access = *it;
1983 /* Get the function call arguments corresponding to the attribute's
1984 positional arguments. When both arguments have been specified
1985 there will be two entries in *RWM, one for each. They are
1986 cross-referenced by their respective argument numbers in
1987 ACCESS.PTRARG and ACCESS.SIZARG. */
1988 const int ptridx = access.second.ptrarg;
1989 const int sizidx = access.second.sizarg;
1991 gcc_assert (ptridx != -1);
1992 gcc_assert (access.first == ptridx || access.first == sizidx);
1994 /* The pointer is set to null for the entry corresponding to
1995 the size argument. Skip it. It's handled when the entry
1996 corresponding to the pointer argument comes up. */
1997 if (!access.second.ptr)
1998 continue;
2000 tree ptrtype = fntype_argno_type (fntype, ptridx);
2001 tree argtype = TREE_TYPE (ptrtype);
2003 /* The size of the access by the call. */
2004 tree access_size;
2005 if (sizidx == -1)
2007 /* If only the pointer attribute operand was specified and
2008 not size, set SIZE to the greater of MINSIZE or size of
2009 one element of the pointed to type to detect smaller
2010 objects (null pointers are diagnosed in this case only
2011 if the pointer is also declared with attribute nonnull. */
2012 if (access.second.minsize
2013 && access.second.minsize != HOST_WIDE_INT_M1U)
2014 access_size = build_int_cstu (sizetype, access.second.minsize);
2015 else
2016 access_size = size_one_node;
2018 else
2019 access_size = rwm->get (sizidx)->size;
2021 /* Format the value or range to avoid an explosion of messages. */
2022 char sizstr[80];
2023 tree sizrng[2] = { size_zero_node, build_all_ones_cst (sizetype) };
2024 if (get_size_range (access_size, sizrng, true))
2026 const char *s0 = print_generic_expr_to_str (sizrng[0]);
2027 if (tree_int_cst_equal (sizrng[0], sizrng[1]))
2029 gcc_checking_assert (strlen (s0) < sizeof sizstr);
2030 strcpy (sizstr, s0);
2032 else
2034 const char *s1 = print_generic_expr_to_str (sizrng[1]);
2035 gcc_checking_assert (strlen (s0) + strlen (s1)
2036 < sizeof sizstr - 4);
2037 sprintf (sizstr, "[%s, %s]", s0, s1);
2040 else
2041 *sizstr = '\0';
2043 /* Set if a warning has been issued for the current argument. */
2044 bool arg_warned = false;
2045 location_t loc = EXPR_LOCATION (exp);
2046 tree ptr = access.second.ptr;
2047 if (*sizstr
2048 && tree_int_cst_sgn (sizrng[0]) < 0
2049 && tree_int_cst_sgn (sizrng[1]) < 0)
2051 /* Warn about negative sizes. */
2052 if (access.second.internal_p)
2054 const std::string argtypestr
2055 = access.second.array_as_string (ptrtype);
2057 arg_warned = warning_at (loc, OPT_Wstringop_overflow_,
2058 "%Kbound argument %i value %s is "
2059 "negative for a variable length array "
2060 "argument %i of type %s",
2061 exp, sizidx + 1, sizstr,
2062 ptridx + 1, argtypestr.c_str ());
2064 else
2065 arg_warned = warning_at (loc, OPT_Wstringop_overflow_,
2066 "%Kargument %i value %s is negative",
2067 exp, sizidx + 1, sizstr);
2069 if (arg_warned)
2071 append_attrname (access, attrstr, sizeof attrstr);
2072 /* Remember a warning has been issued and avoid warning
2073 again below for the same attribute. */
2074 any_warned = true;
2075 continue;
2079 if (tree_int_cst_sgn (sizrng[0]) >= 0)
2081 if (COMPLETE_TYPE_P (argtype))
2083 /* Multiply ACCESS_SIZE by the size of the type the pointer
2084 argument points to. If it's incomplete the size is used
2085 as is. */
2086 if (tree argsize = TYPE_SIZE_UNIT (argtype))
2087 if (TREE_CODE (argsize) == INTEGER_CST)
2089 const int prec = TYPE_PRECISION (sizetype);
2090 wide_int minsize = wi::to_wide (sizrng[0], prec);
2091 minsize *= wi::to_wide (argsize, prec);
2092 access_size = wide_int_to_tree (sizetype, minsize);
2096 else
2097 access_size = NULL_TREE;
2099 if (integer_zerop (ptr))
2101 if (sizidx >= 0 && tree_int_cst_sgn (sizrng[0]) > 0)
2103 /* Warn about null pointers with positive sizes. This is
2104 different from also declaring the pointer argument with
2105 attribute nonnull when the function accepts null pointers
2106 only when the corresponding size is zero. */
2107 if (access.second.internal_p)
2109 const std::string argtypestr
2110 = access.second.array_as_string (ptrtype);
2112 arg_warned = warning_at (loc, OPT_Wnonnull,
2113 "%Kargument %i of variable length "
2114 "array %s is null but "
2115 "the corresponding bound argument "
2116 "%i value is %s",
2117 exp, sizidx + 1, argtypestr.c_str (),
2118 ptridx + 1, sizstr);
2120 else
2121 arg_warned = warning_at (loc, OPT_Wnonnull,
2122 "%Kargument %i is null but "
2123 "the corresponding size argument "
2124 "%i value is %s",
2125 exp, ptridx + 1, sizidx + 1,
2126 sizstr);
2128 else if (access_size && access.second.static_p)
2130 /* Warn about null pointers for [static N] array arguments
2131 but do not warn for ordinary (i.e., nonstatic) arrays. */
2132 arg_warned = warning_at (loc, OPT_Wnonnull,
2133 "%Kargument %i to %<%T[static %E]%> "
2134 "is null where non-null expected",
2135 exp, ptridx + 1, argtype,
2136 access_size);
2139 if (arg_warned)
2141 append_attrname (access, attrstr, sizeof attrstr);
2142 /* Remember a warning has been issued and avoid warning
2143 again below for the same attribute. */
2144 any_warned = true;
2145 continue;
2149 access_data data (ptr, access.second.mode, NULL_TREE, false,
2150 NULL_TREE, false);
2151 access_ref* const pobj = (access.second.mode == access_write_only
2152 ? &data.dst : &data.src);
2153 tree objsize = compute_objsize (ptr, 1, pobj);
2155 /* The size of the destination or source object. */
2156 tree dstsize = NULL_TREE, srcsize = NULL_TREE;
2157 if (access.second.mode == access_read_only
2158 || access.second.mode == access_none)
2160 /* For a read-only argument there is no destination. For
2161 no access, set the source as well and differentiate via
2162 the access flag below. */
2163 srcsize = objsize;
2164 if (access.second.mode == access_read_only
2165 || access.second.mode == access_none)
2167 /* For a read-only attribute there is no destination so
2168 clear OBJSIZE. This emits "reading N bytes" kind of
2169 diagnostics instead of the "writing N bytes" kind,
2170 unless MODE is none. */
2171 objsize = NULL_TREE;
2174 else
2175 dstsize = objsize;
2177 /* Clear the no-warning bit in case it was set by check_access
2178 in a prior iteration so that accesses via different arguments
2179 are diagnosed. */
2180 TREE_NO_WARNING (exp) = false;
2181 access_mode mode = data.mode;
2182 if (mode == access_deferred)
2183 mode = TYPE_READONLY (argtype) ? access_read_only : access_read_write;
2184 check_access (exp, access_size, /*maxread=*/ NULL_TREE, srcsize,
2185 dstsize, mode, &data);
2187 if (TREE_NO_WARNING (exp))
2189 any_warned = true;
2191 if (access.second.internal_p)
2192 inform (loc, "referencing argument %u of type %qT",
2193 ptridx + 1, ptrtype);
2194 else
2195 /* If check_access issued a warning above, append the relevant
2196 attribute to the string. */
2197 append_attrname (access, attrstr, sizeof attrstr);
2201 if (*attrstr)
2203 if (fndecl)
2204 inform (DECL_SOURCE_LOCATION (fndecl),
2205 "in a call to function %qD declared with attribute %qs",
2206 fndecl, attrstr);
2207 else
2208 inform (EXPR_LOCATION (fndecl),
2209 "in a call with type %qT and attribute %qs",
2210 fntype, attrstr);
2212 else if (any_warned)
2214 if (fndecl)
2215 inform (DECL_SOURCE_LOCATION (fndecl),
2216 "in a call to function %qD", fndecl);
2217 else
2218 inform (EXPR_LOCATION (fndecl),
2219 "in a call with type %qT", fntype);
2222 /* Set the bit in case if was cleared and not set above. */
2223 TREE_NO_WARNING (exp) = true;
2226 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
2227 CALL_EXPR EXP.
2229 NUM_ACTUALS is the total number of parameters.
2231 N_NAMED_ARGS is the total number of named arguments.
2233 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
2234 value, or null.
2236 FNDECL is the tree code for the target of this call (if known)
2238 ARGS_SO_FAR holds state needed by the target to know where to place
2239 the next argument.
2241 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
2242 for arguments which are passed in registers.
2244 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
2245 and may be modified by this routine.
2247 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
2248 flags which may be modified by this routine.
2250 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
2251 that requires allocation of stack space.
2253 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
2254 the thunked-to function. */
2256 static void
2257 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
2258 struct arg_data *args,
2259 struct args_size *args_size,
2260 int n_named_args ATTRIBUTE_UNUSED,
2261 tree exp, tree struct_value_addr_value,
2262 tree fndecl, tree fntype,
2263 cumulative_args_t args_so_far,
2264 int reg_parm_stack_space,
2265 rtx *old_stack_level,
2266 poly_int64_pod *old_pending_adj,
2267 int *must_preallocate, int *ecf_flags,
2268 bool *may_tailcall, bool call_from_thunk_p)
2270 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
2271 location_t loc = EXPR_LOCATION (exp);
2273 /* Count arg position in order args appear. */
2274 int argpos;
2276 int i;
2278 args_size->constant = 0;
2279 args_size->var = 0;
2281 bitmap_obstack_initialize (NULL);
2283 /* In this loop, we consider args in the order they are written.
2284 We fill up ARGS from the back. */
2286 i = num_actuals - 1;
2288 int j = i;
2289 call_expr_arg_iterator iter;
2290 tree arg;
2291 bitmap slots = NULL;
2293 if (struct_value_addr_value)
2295 args[j].tree_value = struct_value_addr_value;
2296 j--;
2298 argpos = 0;
2299 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2301 tree argtype = TREE_TYPE (arg);
2303 if (targetm.calls.split_complex_arg
2304 && argtype
2305 && TREE_CODE (argtype) == COMPLEX_TYPE
2306 && targetm.calls.split_complex_arg (argtype))
2308 tree subtype = TREE_TYPE (argtype);
2309 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
2310 j--;
2311 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
2313 else
2314 args[j].tree_value = arg;
2315 j--;
2316 argpos++;
2319 if (slots)
2320 BITMAP_FREE (slots);
2323 bitmap_obstack_release (NULL);
2325 tree fntypeattrs = TYPE_ATTRIBUTES (fntype);
2326 /* Extract attribute alloc_size from the type of the called expression
2327 (which could be a function or a function pointer) and if set, store
2328 the indices of the corresponding arguments in ALLOC_IDX, and then
2329 the actual argument(s) at those indices in ALLOC_ARGS. */
2330 int alloc_idx[2] = { -1, -1 };
2331 if (tree alloc_size = lookup_attribute ("alloc_size", fntypeattrs))
2333 tree args = TREE_VALUE (alloc_size);
2334 alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
2335 if (TREE_CHAIN (args))
2336 alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
2339 /* Array for up to the two attribute alloc_size arguments. */
2340 tree alloc_args[] = { NULL_TREE, NULL_TREE };
2342 /* Map of attribute accewss specifications for function arguments. */
2343 rdwr_map rdwr_idx;
2344 init_attr_rdwr_indices (&rdwr_idx, fntypeattrs);
2346 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
2347 for (argpos = 0; argpos < num_actuals; i--, argpos++)
2349 tree type = TREE_TYPE (args[i].tree_value);
2350 int unsignedp;
2352 /* Replace erroneous argument with constant zero. */
2353 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
2354 args[i].tree_value = integer_zero_node, type = integer_type_node;
2356 /* If TYPE is a transparent union or record, pass things the way
2357 we would pass the first field of the union or record. We have
2358 already verified that the modes are the same. */
2359 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
2360 type = TREE_TYPE (first_field (type));
2362 /* Decide where to pass this arg.
2364 args[i].reg is nonzero if all or part is passed in registers.
2366 args[i].partial is nonzero if part but not all is passed in registers,
2367 and the exact value says how many bytes are passed in registers.
2369 args[i].pass_on_stack is nonzero if the argument must at least be
2370 computed on the stack. It may then be loaded back into registers
2371 if args[i].reg is nonzero.
2373 These decisions are driven by the FUNCTION_... macros and must agree
2374 with those made by function.c. */
2376 /* See if this argument should be passed by invisible reference. */
2377 function_arg_info arg (type, argpos < n_named_args);
2378 if (pass_by_reference (args_so_far_pnt, arg))
2380 bool callee_copies;
2381 tree base = NULL_TREE;
2383 callee_copies = reference_callee_copied (args_so_far_pnt, arg);
2385 /* If we're compiling a thunk, pass through invisible references
2386 instead of making a copy. */
2387 if (call_from_thunk_p
2388 || (callee_copies
2389 && !TREE_ADDRESSABLE (type)
2390 && (base = get_base_address (args[i].tree_value))
2391 && TREE_CODE (base) != SSA_NAME
2392 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
2394 /* We may have turned the parameter value into an SSA name.
2395 Go back to the original parameter so we can take the
2396 address. */
2397 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
2399 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
2400 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
2401 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
2403 /* Argument setup code may have copied the value to register. We
2404 revert that optimization now because the tail call code must
2405 use the original location. */
2406 if (TREE_CODE (args[i].tree_value) == PARM_DECL
2407 && !MEM_P (DECL_RTL (args[i].tree_value))
2408 && DECL_INCOMING_RTL (args[i].tree_value)
2409 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
2410 set_decl_rtl (args[i].tree_value,
2411 DECL_INCOMING_RTL (args[i].tree_value));
2413 mark_addressable (args[i].tree_value);
2415 /* We can't use sibcalls if a callee-copied argument is
2416 stored in the current function's frame. */
2417 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
2419 *may_tailcall = false;
2420 maybe_complain_about_tail_call (exp,
2421 "a callee-copied argument is"
2422 " stored in the current"
2423 " function's frame");
2426 args[i].tree_value = build_fold_addr_expr_loc (loc,
2427 args[i].tree_value);
2428 type = TREE_TYPE (args[i].tree_value);
2430 if (*ecf_flags & ECF_CONST)
2431 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
2433 else
2435 /* We make a copy of the object and pass the address to the
2436 function being called. */
2437 rtx copy;
2439 if (!COMPLETE_TYPE_P (type)
2440 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2441 || (flag_stack_check == GENERIC_STACK_CHECK
2442 && compare_tree_int (TYPE_SIZE_UNIT (type),
2443 STACK_CHECK_MAX_VAR_SIZE) > 0))
2445 /* This is a variable-sized object. Make space on the stack
2446 for it. */
2447 rtx size_rtx = expr_size (args[i].tree_value);
2449 if (*old_stack_level == 0)
2451 emit_stack_save (SAVE_BLOCK, old_stack_level);
2452 *old_pending_adj = pending_stack_adjust;
2453 pending_stack_adjust = 0;
2456 /* We can pass TRUE as the 4th argument because we just
2457 saved the stack pointer and will restore it right after
2458 the call. */
2459 copy = allocate_dynamic_stack_space (size_rtx,
2460 TYPE_ALIGN (type),
2461 TYPE_ALIGN (type),
2462 max_int_size_in_bytes
2463 (type),
2464 true);
2465 copy = gen_rtx_MEM (BLKmode, copy);
2466 set_mem_attributes (copy, type, 1);
2468 else
2469 copy = assign_temp (type, 1, 0);
2471 store_expr (args[i].tree_value, copy, 0, false, false);
2473 /* Just change the const function to pure and then let
2474 the next test clear the pure based on
2475 callee_copies. */
2476 if (*ecf_flags & ECF_CONST)
2478 *ecf_flags &= ~ECF_CONST;
2479 *ecf_flags |= ECF_PURE;
2482 if (!callee_copies && *ecf_flags & ECF_PURE)
2483 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2485 args[i].tree_value
2486 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
2487 type = TREE_TYPE (args[i].tree_value);
2488 *may_tailcall = false;
2489 maybe_complain_about_tail_call (exp,
2490 "argument must be passed"
2491 " by copying");
2493 arg.pass_by_reference = true;
2496 unsignedp = TYPE_UNSIGNED (type);
2497 arg.type = type;
2498 arg.mode
2499 = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
2500 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
2502 args[i].unsignedp = unsignedp;
2503 args[i].mode = arg.mode;
2505 targetm.calls.warn_parameter_passing_abi (args_so_far, type);
2507 args[i].reg = targetm.calls.function_arg (args_so_far, arg);
2509 if (args[i].reg && CONST_INT_P (args[i].reg))
2510 args[i].reg = NULL;
2512 /* If this is a sibling call and the machine has register windows, the
2513 register window has to be unwinded before calling the routine, so
2514 arguments have to go into the incoming registers. */
2515 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
2516 args[i].tail_call_reg
2517 = targetm.calls.function_incoming_arg (args_so_far, arg);
2518 else
2519 args[i].tail_call_reg = args[i].reg;
2521 if (args[i].reg)
2522 args[i].partial = targetm.calls.arg_partial_bytes (args_so_far, arg);
2524 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (arg);
2526 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
2527 it means that we are to pass this arg in the register(s) designated
2528 by the PARALLEL, but also to pass it in the stack. */
2529 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
2530 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
2531 args[i].pass_on_stack = 1;
2533 /* If this is an addressable type, we must preallocate the stack
2534 since we must evaluate the object into its final location.
2536 If this is to be passed in both registers and the stack, it is simpler
2537 to preallocate. */
2538 if (TREE_ADDRESSABLE (type)
2539 || (args[i].pass_on_stack && args[i].reg != 0))
2540 *must_preallocate = 1;
2542 /* Compute the stack-size of this argument. */
2543 if (args[i].reg == 0 || args[i].partial != 0
2544 || reg_parm_stack_space > 0
2545 || args[i].pass_on_stack)
2546 locate_and_pad_parm (arg.mode, type,
2547 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2549 #else
2550 args[i].reg != 0,
2551 #endif
2552 reg_parm_stack_space,
2553 args[i].pass_on_stack ? 0 : args[i].partial,
2554 fndecl, args_size, &args[i].locate);
2555 #ifdef BLOCK_REG_PADDING
2556 else
2557 /* The argument is passed entirely in registers. See at which
2558 end it should be padded. */
2559 args[i].locate.where_pad =
2560 BLOCK_REG_PADDING (arg.mode, type,
2561 int_size_in_bytes (type) <= UNITS_PER_WORD);
2562 #endif
2564 /* Update ARGS_SIZE, the total stack space for args so far. */
2566 args_size->constant += args[i].locate.size.constant;
2567 if (args[i].locate.size.var)
2568 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
2570 /* Increment ARGS_SO_FAR, which has info about which arg-registers
2571 have been used, etc. */
2573 /* ??? Traditionally we've passed TYPE_MODE here, instead of the
2574 promoted_mode used for function_arg above. However, the
2575 corresponding handling of incoming arguments in function.c
2576 does pass the promoted mode. */
2577 arg.mode = TYPE_MODE (type);
2578 targetm.calls.function_arg_advance (args_so_far, arg);
2580 /* Store argument values for functions decorated with attribute
2581 alloc_size. */
2582 if (argpos == alloc_idx[0])
2583 alloc_args[0] = args[i].tree_value;
2584 else if (argpos == alloc_idx[1])
2585 alloc_args[1] = args[i].tree_value;
2587 /* Save the actual argument that corresponds to the access attribute
2588 operand for later processing. */
2589 if (attr_access *access = rdwr_idx.get (argpos))
2591 if (POINTER_TYPE_P (type))
2593 access->ptr = args[i].tree_value;
2594 // A nonnull ACCESS->SIZE contains VLA bounds. */
2596 else
2598 access->size = args[i].tree_value;
2599 gcc_assert (access->ptr == NULL_TREE);
2604 if (alloc_args[0])
2606 /* Check the arguments of functions decorated with attribute
2607 alloc_size. */
2608 maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
2611 /* Detect passing non-string arguments to functions expecting
2612 nul-terminated strings. */
2613 maybe_warn_nonstring_arg (fndecl, exp);
2615 /* Check attribute access arguments. */
2616 maybe_warn_rdwr_sizes (&rdwr_idx, fndecl, fntype, exp);
2619 /* Update ARGS_SIZE to contain the total size for the argument block.
2620 Return the original constant component of the argument block's size.
2622 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
2623 for arguments passed in registers. */
2625 static poly_int64
2626 compute_argument_block_size (int reg_parm_stack_space,
2627 struct args_size *args_size,
2628 tree fndecl ATTRIBUTE_UNUSED,
2629 tree fntype ATTRIBUTE_UNUSED,
2630 int preferred_stack_boundary ATTRIBUTE_UNUSED)
2632 poly_int64 unadjusted_args_size = args_size->constant;
2634 /* For accumulate outgoing args mode we don't need to align, since the frame
2635 will be already aligned. Align to STACK_BOUNDARY in order to prevent
2636 backends from generating misaligned frame sizes. */
2637 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
2638 preferred_stack_boundary = STACK_BOUNDARY;
2640 /* Compute the actual size of the argument block required. The variable
2641 and constant sizes must be combined, the size may have to be rounded,
2642 and there may be a minimum required size. */
2644 if (args_size->var)
2646 args_size->var = ARGS_SIZE_TREE (*args_size);
2647 args_size->constant = 0;
2649 preferred_stack_boundary /= BITS_PER_UNIT;
2650 if (preferred_stack_boundary > 1)
2652 /* We don't handle this case yet. To handle it correctly we have
2653 to add the delta, round and subtract the delta.
2654 Currently no machine description requires this support. */
2655 gcc_assert (multiple_p (stack_pointer_delta,
2656 preferred_stack_boundary));
2657 args_size->var = round_up (args_size->var, preferred_stack_boundary);
2660 if (reg_parm_stack_space > 0)
2662 args_size->var
2663 = size_binop (MAX_EXPR, args_size->var,
2664 ssize_int (reg_parm_stack_space));
2666 /* The area corresponding to register parameters is not to count in
2667 the size of the block we need. So make the adjustment. */
2668 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2669 args_size->var
2670 = size_binop (MINUS_EXPR, args_size->var,
2671 ssize_int (reg_parm_stack_space));
2674 else
2676 preferred_stack_boundary /= BITS_PER_UNIT;
2677 if (preferred_stack_boundary < 1)
2678 preferred_stack_boundary = 1;
2679 args_size->constant = (aligned_upper_bound (args_size->constant
2680 + stack_pointer_delta,
2681 preferred_stack_boundary)
2682 - stack_pointer_delta);
2684 args_size->constant = upper_bound (args_size->constant,
2685 reg_parm_stack_space);
2687 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2688 args_size->constant -= reg_parm_stack_space;
2690 return unadjusted_args_size;
2693 /* Precompute parameters as needed for a function call.
2695 FLAGS is mask of ECF_* constants.
2697 NUM_ACTUALS is the number of arguments.
2699 ARGS is an array containing information for each argument; this
2700 routine fills in the INITIAL_VALUE and VALUE fields for each
2701 precomputed argument. */
2703 static void
2704 precompute_arguments (int num_actuals, struct arg_data *args)
2706 int i;
2708 /* If this is a libcall, then precompute all arguments so that we do not
2709 get extraneous instructions emitted as part of the libcall sequence. */
2711 /* If we preallocated the stack space, and some arguments must be passed
2712 on the stack, then we must precompute any parameter which contains a
2713 function call which will store arguments on the stack.
2714 Otherwise, evaluating the parameter may clobber previous parameters
2715 which have already been stored into the stack. (we have code to avoid
2716 such case by saving the outgoing stack arguments, but it results in
2717 worse code) */
2718 if (!ACCUMULATE_OUTGOING_ARGS)
2719 return;
2721 for (i = 0; i < num_actuals; i++)
2723 tree type;
2724 machine_mode mode;
2726 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
2727 continue;
2729 /* If this is an addressable type, we cannot pre-evaluate it. */
2730 type = TREE_TYPE (args[i].tree_value);
2731 gcc_assert (!TREE_ADDRESSABLE (type));
2733 args[i].initial_value = args[i].value
2734 = expand_normal (args[i].tree_value);
2736 mode = TYPE_MODE (type);
2737 if (mode != args[i].mode)
2739 int unsignedp = args[i].unsignedp;
2740 args[i].value
2741 = convert_modes (args[i].mode, mode,
2742 args[i].value, args[i].unsignedp);
2744 /* CSE will replace this only if it contains args[i].value
2745 pseudo, so convert it down to the declared mode using
2746 a SUBREG. */
2747 if (REG_P (args[i].value)
2748 && GET_MODE_CLASS (args[i].mode) == MODE_INT
2749 && promote_mode (type, mode, &unsignedp) != args[i].mode)
2751 args[i].initial_value
2752 = gen_lowpart_SUBREG (mode, args[i].value);
2753 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
2754 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
2760 /* Given the current state of MUST_PREALLOCATE and information about
2761 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
2762 compute and return the final value for MUST_PREALLOCATE. */
2764 static int
2765 finalize_must_preallocate (int must_preallocate, int num_actuals,
2766 struct arg_data *args, struct args_size *args_size)
2768 /* See if we have or want to preallocate stack space.
2770 If we would have to push a partially-in-regs parm
2771 before other stack parms, preallocate stack space instead.
2773 If the size of some parm is not a multiple of the required stack
2774 alignment, we must preallocate.
2776 If the total size of arguments that would otherwise create a copy in
2777 a temporary (such as a CALL) is more than half the total argument list
2778 size, preallocation is faster.
2780 Another reason to preallocate is if we have a machine (like the m88k)
2781 where stack alignment is required to be maintained between every
2782 pair of insns, not just when the call is made. However, we assume here
2783 that such machines either do not have push insns (and hence preallocation
2784 would occur anyway) or the problem is taken care of with
2785 PUSH_ROUNDING. */
2787 if (! must_preallocate)
2789 int partial_seen = 0;
2790 poly_int64 copy_to_evaluate_size = 0;
2791 int i;
2793 for (i = 0; i < num_actuals && ! must_preallocate; i++)
2795 if (args[i].partial > 0 && ! args[i].pass_on_stack)
2796 partial_seen = 1;
2797 else if (partial_seen && args[i].reg == 0)
2798 must_preallocate = 1;
2800 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
2801 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
2802 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
2803 || TREE_CODE (args[i].tree_value) == COND_EXPR
2804 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
2805 copy_to_evaluate_size
2806 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2809 if (maybe_ne (args_size->constant, 0)
2810 && maybe_ge (copy_to_evaluate_size * 2, args_size->constant))
2811 must_preallocate = 1;
2813 return must_preallocate;
2816 /* If we preallocated stack space, compute the address of each argument
2817 and store it into the ARGS array.
2819 We need not ensure it is a valid memory address here; it will be
2820 validized when it is used.
2822 ARGBLOCK is an rtx for the address of the outgoing arguments. */
2824 static void
2825 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
2827 if (argblock)
2829 rtx arg_reg = argblock;
2830 int i;
2831 poly_int64 arg_offset = 0;
2833 if (GET_CODE (argblock) == PLUS)
2835 arg_reg = XEXP (argblock, 0);
2836 arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
2839 for (i = 0; i < num_actuals; i++)
2841 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
2842 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
2843 rtx addr;
2844 unsigned int align, boundary;
2845 poly_uint64 units_on_stack = 0;
2846 machine_mode partial_mode = VOIDmode;
2848 /* Skip this parm if it will not be passed on the stack. */
2849 if (! args[i].pass_on_stack
2850 && args[i].reg != 0
2851 && args[i].partial == 0)
2852 continue;
2854 if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
2855 continue;
2857 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
2858 addr = plus_constant (Pmode, addr, arg_offset);
2860 if (args[i].partial != 0)
2862 /* Only part of the parameter is being passed on the stack.
2863 Generate a simple memory reference of the correct size. */
2864 units_on_stack = args[i].locate.size.constant;
2865 poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT;
2866 partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk ();
2867 args[i].stack = gen_rtx_MEM (partial_mode, addr);
2868 set_mem_size (args[i].stack, units_on_stack);
2870 else
2872 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
2873 set_mem_attributes (args[i].stack,
2874 TREE_TYPE (args[i].tree_value), 1);
2876 align = BITS_PER_UNIT;
2877 boundary = args[i].locate.boundary;
2878 poly_int64 offset_val;
2879 if (args[i].locate.where_pad != PAD_DOWNWARD)
2880 align = boundary;
2881 else if (poly_int_rtx_p (offset, &offset_val))
2883 align = least_bit_hwi (boundary);
2884 unsigned int offset_align
2885 = known_alignment (offset_val) * BITS_PER_UNIT;
2886 if (offset_align != 0)
2887 align = MIN (align, offset_align);
2889 set_mem_align (args[i].stack, align);
2891 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset);
2892 addr = plus_constant (Pmode, addr, arg_offset);
2894 if (args[i].partial != 0)
2896 /* Only part of the parameter is being passed on the stack.
2897 Generate a simple memory reference of the correct size.
2899 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
2900 set_mem_size (args[i].stack_slot, units_on_stack);
2902 else
2904 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
2905 set_mem_attributes (args[i].stack_slot,
2906 TREE_TYPE (args[i].tree_value), 1);
2908 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
2910 /* Function incoming arguments may overlap with sibling call
2911 outgoing arguments and we cannot allow reordering of reads
2912 from function arguments with stores to outgoing arguments
2913 of sibling calls. */
2914 set_mem_alias_set (args[i].stack, 0);
2915 set_mem_alias_set (args[i].stack_slot, 0);
2920 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
2921 in a call instruction.
2923 FNDECL is the tree node for the target function. For an indirect call
2924 FNDECL will be NULL_TREE.
2926 ADDR is the operand 0 of CALL_EXPR for this call. */
2928 static rtx
2929 rtx_for_function_call (tree fndecl, tree addr)
2931 rtx funexp;
2933 /* Get the function to call, in the form of RTL. */
2934 if (fndecl)
2936 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
2937 TREE_USED (fndecl) = 1;
2939 /* Get a SYMBOL_REF rtx for the function address. */
2940 funexp = XEXP (DECL_RTL (fndecl), 0);
2942 else
2943 /* Generate an rtx (probably a pseudo-register) for the address. */
2945 push_temp_slots ();
2946 funexp = expand_normal (addr);
2947 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
2949 return funexp;
2952 /* Return the static chain for this function, if any. */
2955 rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
2957 if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
2958 return NULL;
2960 return targetm.calls.static_chain (fndecl_or_type, incoming_p);
2963 /* Internal state for internal_arg_pointer_based_exp and its helpers. */
2964 static struct
2966 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
2967 or NULL_RTX if none has been scanned yet. */
2968 rtx_insn *scan_start;
2969 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
2970 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
2971 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
2972 with fixed offset, or PC if this is with variable or unknown offset. */
2973 vec<rtx> cache;
2974 } internal_arg_pointer_exp_state;
2976 static rtx internal_arg_pointer_based_exp (const_rtx, bool);
2978 /* Helper function for internal_arg_pointer_based_exp. Scan insns in
2979 the tail call sequence, starting with first insn that hasn't been
2980 scanned yet, and note for each pseudo on the LHS whether it is based
2981 on crtl->args.internal_arg_pointer or not, and what offset from that
2982 that pointer it has. */
2984 static void
2985 internal_arg_pointer_based_exp_scan (void)
2987 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
2989 if (scan_start == NULL_RTX)
2990 insn = get_insns ();
2991 else
2992 insn = NEXT_INSN (scan_start);
2994 while (insn)
2996 rtx set = single_set (insn);
2997 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
2999 rtx val = NULL_RTX;
3000 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
3001 /* Punt on pseudos set multiple times. */
3002 if (idx < internal_arg_pointer_exp_state.cache.length ()
3003 && (internal_arg_pointer_exp_state.cache[idx]
3004 != NULL_RTX))
3005 val = pc_rtx;
3006 else
3007 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
3008 if (val != NULL_RTX)
3010 if (idx >= internal_arg_pointer_exp_state.cache.length ())
3011 internal_arg_pointer_exp_state.cache
3012 .safe_grow_cleared (idx + 1, true);
3013 internal_arg_pointer_exp_state.cache[idx] = val;
3016 if (NEXT_INSN (insn) == NULL_RTX)
3017 scan_start = insn;
3018 insn = NEXT_INSN (insn);
3021 internal_arg_pointer_exp_state.scan_start = scan_start;
3024 /* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
3025 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
3026 it with fixed offset, or PC if this is with variable or unknown offset.
3027 TOPLEVEL is true if the function is invoked at the topmost level. */
3029 static rtx
3030 internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
3032 if (CONSTANT_P (rtl))
3033 return NULL_RTX;
3035 if (rtl == crtl->args.internal_arg_pointer)
3036 return const0_rtx;
3038 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
3039 return NULL_RTX;
3041 poly_int64 offset;
3042 if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), &offset))
3044 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
3045 if (val == NULL_RTX || val == pc_rtx)
3046 return val;
3047 return plus_constant (Pmode, val, offset);
3050 /* When called at the topmost level, scan pseudo assignments in between the
3051 last scanned instruction in the tail call sequence and the latest insn
3052 in that sequence. */
3053 if (toplevel)
3054 internal_arg_pointer_based_exp_scan ();
3056 if (REG_P (rtl))
3058 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
3059 if (idx < internal_arg_pointer_exp_state.cache.length ())
3060 return internal_arg_pointer_exp_state.cache[idx];
3062 return NULL_RTX;
3065 subrtx_iterator::array_type array;
3066 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
3068 const_rtx x = *iter;
3069 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
3070 return pc_rtx;
3071 if (MEM_P (x))
3072 iter.skip_subrtxes ();
3075 return NULL_RTX;
3078 /* Return true if SIZE bytes starting from address ADDR might overlap an
3079 already-clobbered argument area. This function is used to determine
3080 if we should give up a sibcall. */
3082 static bool
3083 mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size)
3085 poly_int64 i;
3086 unsigned HOST_WIDE_INT start, end;
3087 rtx val;
3089 if (bitmap_empty_p (stored_args_map)
3090 && stored_args_watermark == HOST_WIDE_INT_M1U)
3091 return false;
3092 val = internal_arg_pointer_based_exp (addr, true);
3093 if (val == NULL_RTX)
3094 return false;
3095 else if (!poly_int_rtx_p (val, &i))
3096 return true;
3098 if (known_eq (size, 0U))
3099 return false;
3101 if (STACK_GROWS_DOWNWARD)
3102 i -= crtl->args.pretend_args_size;
3103 else
3104 i += crtl->args.pretend_args_size;
3106 if (ARGS_GROW_DOWNWARD)
3107 i = -i - size;
3109 /* We can ignore any references to the function's pretend args,
3110 which at this point would manifest as negative values of I. */
3111 if (known_le (i, 0) && known_le (size, poly_uint64 (-i)))
3112 return false;
3114 start = maybe_lt (i, 0) ? 0 : constant_lower_bound (i);
3115 if (!(i + size).is_constant (&end))
3116 end = HOST_WIDE_INT_M1U;
3118 if (end > stored_args_watermark)
3119 return true;
3121 end = MIN (end, SBITMAP_SIZE (stored_args_map));
3122 for (unsigned HOST_WIDE_INT k = start; k < end; ++k)
3123 if (bitmap_bit_p (stored_args_map, k))
3124 return true;
3126 return false;
3129 /* Do the register loads required for any wholly-register parms or any
3130 parms which are passed both on the stack and in a register. Their
3131 expressions were already evaluated.
3133 Mark all register-parms as living through the call, putting these USE
3134 insns in the CALL_INSN_FUNCTION_USAGE field.
3136 When IS_SIBCALL, perform the check_sibcall_argument_overlap
3137 checking, setting *SIBCALL_FAILURE if appropriate. */
3139 static void
3140 load_register_parameters (struct arg_data *args, int num_actuals,
3141 rtx *call_fusage, int flags, int is_sibcall,
3142 int *sibcall_failure)
3144 int i, j;
3146 for (i = 0; i < num_actuals; i++)
3148 rtx reg = ((flags & ECF_SIBCALL)
3149 ? args[i].tail_call_reg : args[i].reg);
3150 if (reg)
3152 int partial = args[i].partial;
3153 int nregs;
3154 poly_int64 size = 0;
3155 HOST_WIDE_INT const_size = 0;
3156 rtx_insn *before_arg = get_last_insn ();
3157 tree type = TREE_TYPE (args[i].tree_value);
3158 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
3159 type = TREE_TYPE (first_field (type));
3160 /* Set non-negative if we must move a word at a time, even if
3161 just one word (e.g, partial == 4 && mode == DFmode). Set
3162 to -1 if we just use a normal move insn. This value can be
3163 zero if the argument is a zero size structure. */
3164 nregs = -1;
3165 if (GET_CODE (reg) == PARALLEL)
3167 else if (partial)
3169 gcc_assert (partial % UNITS_PER_WORD == 0);
3170 nregs = partial / UNITS_PER_WORD;
3172 else if (TYPE_MODE (type) == BLKmode)
3174 /* Variable-sized parameters should be described by a
3175 PARALLEL instead. */
3176 const_size = int_size_in_bytes (type);
3177 gcc_assert (const_size >= 0);
3178 nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3179 size = const_size;
3181 else
3182 size = GET_MODE_SIZE (args[i].mode);
3184 /* Handle calls that pass values in multiple non-contiguous
3185 locations. The Irix 6 ABI has examples of this. */
3187 if (GET_CODE (reg) == PARALLEL)
3188 emit_group_move (reg, args[i].parallel_value);
3190 /* If simple case, just do move. If normal partial, store_one_arg
3191 has already loaded the register for us. In all other cases,
3192 load the register(s) from memory. */
3194 else if (nregs == -1)
3196 emit_move_insn (reg, args[i].value);
3197 #ifdef BLOCK_REG_PADDING
3198 /* Handle case where we have a value that needs shifting
3199 up to the msb. eg. a QImode value and we're padding
3200 upward on a BYTES_BIG_ENDIAN machine. */
3201 if (args[i].locate.where_pad
3202 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
3204 gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
3205 if (maybe_lt (size, UNITS_PER_WORD))
3207 rtx x;
3208 poly_int64 shift
3209 = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3211 /* Assigning REG here rather than a temp makes
3212 CALL_FUSAGE report the whole reg as used.
3213 Strictly speaking, the call only uses SIZE
3214 bytes at the msb end, but it doesn't seem worth
3215 generating rtl to say that. */
3216 reg = gen_rtx_REG (word_mode, REGNO (reg));
3217 x = expand_shift (LSHIFT_EXPR, word_mode,
3218 reg, shift, reg, 1);
3219 if (x != reg)
3220 emit_move_insn (reg, x);
3223 #endif
3226 /* If we have pre-computed the values to put in the registers in
3227 the case of non-aligned structures, copy them in now. */
3229 else if (args[i].n_aligned_regs != 0)
3230 for (j = 0; j < args[i].n_aligned_regs; j++)
3231 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
3232 args[i].aligned_regs[j]);
3234 else if (partial == 0 || args[i].pass_on_stack)
3236 /* SIZE and CONST_SIZE are 0 for partial arguments and
3237 the size of a BLKmode type otherwise. */
3238 gcc_checking_assert (known_eq (size, const_size));
3239 rtx mem = validize_mem (copy_rtx (args[i].value));
3241 /* Check for overlap with already clobbered argument area,
3242 providing that this has non-zero size. */
3243 if (is_sibcall
3244 && const_size != 0
3245 && (mem_might_overlap_already_clobbered_arg_p
3246 (XEXP (args[i].value, 0), const_size)))
3247 *sibcall_failure = 1;
3249 if (const_size % UNITS_PER_WORD == 0
3250 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
3251 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
3252 else
3254 if (nregs > 1)
3255 move_block_to_reg (REGNO (reg), mem, nregs - 1,
3256 args[i].mode);
3257 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
3258 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
3259 unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
3260 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
3261 word_mode, word_mode, false,
3262 NULL);
3263 if (BYTES_BIG_ENDIAN)
3264 x = expand_shift (LSHIFT_EXPR, word_mode, x,
3265 BITS_PER_WORD - bitsize, dest, 1);
3266 if (x != dest)
3267 emit_move_insn (dest, x);
3270 /* Handle a BLKmode that needs shifting. */
3271 if (nregs == 1 && const_size < UNITS_PER_WORD
3272 #ifdef BLOCK_REG_PADDING
3273 && args[i].locate.where_pad == PAD_DOWNWARD
3274 #else
3275 && BYTES_BIG_ENDIAN
3276 #endif
3279 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
3280 int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
3281 enum tree_code dir = (BYTES_BIG_ENDIAN
3282 ? RSHIFT_EXPR : LSHIFT_EXPR);
3283 rtx x;
3285 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
3286 if (x != dest)
3287 emit_move_insn (dest, x);
3291 /* When a parameter is a block, and perhaps in other cases, it is
3292 possible that it did a load from an argument slot that was
3293 already clobbered. */
3294 if (is_sibcall
3295 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
3296 *sibcall_failure = 1;
3298 /* Handle calls that pass values in multiple non-contiguous
3299 locations. The Irix 6 ABI has examples of this. */
3300 if (GET_CODE (reg) == PARALLEL)
3301 use_group_regs (call_fusage, reg);
3302 else if (nregs == -1)
3303 use_reg_mode (call_fusage, reg, TYPE_MODE (type));
3304 else if (nregs > 0)
3305 use_regs (call_fusage, REGNO (reg), nregs);
3310 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
3311 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
3312 bytes, then we would need to push some additional bytes to pad the
3313 arguments. So, we try to compute an adjust to the stack pointer for an
3314 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
3315 bytes. Then, when the arguments are pushed the stack will be perfectly
3316 aligned.
3318 Return true if this optimization is possible, storing the adjustment
3319 in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
3320 bytes that should be popped after the call. */
3322 static bool
3323 combine_pending_stack_adjustment_and_call (poly_int64_pod *adjustment_out,
3324 poly_int64 unadjusted_args_size,
3325 struct args_size *args_size,
3326 unsigned int preferred_unit_stack_boundary)
3328 /* The number of bytes to pop so that the stack will be
3329 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
3330 poly_int64 adjustment;
3331 /* The alignment of the stack after the arguments are pushed, if we
3332 just pushed the arguments without adjust the stack here. */
3333 unsigned HOST_WIDE_INT unadjusted_alignment;
3335 if (!known_misalignment (stack_pointer_delta + unadjusted_args_size,
3336 preferred_unit_stack_boundary,
3337 &unadjusted_alignment))
3338 return false;
3340 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
3341 as possible -- leaving just enough left to cancel out the
3342 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
3343 PENDING_STACK_ADJUST is non-negative, and congruent to
3344 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
3346 /* Begin by trying to pop all the bytes. */
3347 unsigned HOST_WIDE_INT tmp_misalignment;
3348 if (!known_misalignment (pending_stack_adjust,
3349 preferred_unit_stack_boundary,
3350 &tmp_misalignment))
3351 return false;
3352 unadjusted_alignment -= tmp_misalignment;
3353 adjustment = pending_stack_adjust;
3354 /* Push enough additional bytes that the stack will be aligned
3355 after the arguments are pushed. */
3356 if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
3357 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
3359 /* We need to know whether the adjusted argument size
3360 (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
3361 or a deallocation. */
3362 if (!ordered_p (adjustment, unadjusted_args_size))
3363 return false;
3365 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
3366 bytes after the call. The right number is the entire
3367 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
3368 by the arguments in the first place. */
3369 args_size->constant
3370 = pending_stack_adjust - adjustment + unadjusted_args_size;
3372 *adjustment_out = adjustment;
3373 return true;
3376 /* Scan X expression if it does not dereference any argument slots
3377 we already clobbered by tail call arguments (as noted in stored_args_map
3378 bitmap).
3379 Return nonzero if X expression dereferences such argument slots,
3380 zero otherwise. */
3382 static int
3383 check_sibcall_argument_overlap_1 (rtx x)
3385 RTX_CODE code;
3386 int i, j;
3387 const char *fmt;
3389 if (x == NULL_RTX)
3390 return 0;
3392 code = GET_CODE (x);
3394 /* We need not check the operands of the CALL expression itself. */
3395 if (code == CALL)
3396 return 0;
3398 if (code == MEM)
3399 return (mem_might_overlap_already_clobbered_arg_p
3400 (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x))));
3402 /* Scan all subexpressions. */
3403 fmt = GET_RTX_FORMAT (code);
3404 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3406 if (*fmt == 'e')
3408 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
3409 return 1;
3411 else if (*fmt == 'E')
3413 for (j = 0; j < XVECLEN (x, i); j++)
3414 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
3415 return 1;
3418 return 0;
3421 /* Scan sequence after INSN if it does not dereference any argument slots
3422 we already clobbered by tail call arguments (as noted in stored_args_map
3423 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
3424 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
3425 should be 0). Return nonzero if sequence after INSN dereferences such argument
3426 slots, zero otherwise. */
3428 static int
3429 check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
3430 int mark_stored_args_map)
3432 poly_uint64 low, high;
3433 unsigned HOST_WIDE_INT const_low, const_high;
3435 if (insn == NULL_RTX)
3436 insn = get_insns ();
3437 else
3438 insn = NEXT_INSN (insn);
3440 for (; insn; insn = NEXT_INSN (insn))
3441 if (INSN_P (insn)
3442 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
3443 break;
3445 if (mark_stored_args_map)
3447 if (ARGS_GROW_DOWNWARD)
3448 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
3449 else
3450 low = arg->locate.slot_offset.constant;
3451 high = low + arg->locate.size.constant;
3453 const_low = constant_lower_bound (low);
3454 if (high.is_constant (&const_high))
3455 for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i)
3456 bitmap_set_bit (stored_args_map, i);
3457 else
3458 stored_args_watermark = MIN (stored_args_watermark, const_low);
3460 return insn != NULL_RTX;
3463 /* Given that a function returns a value of mode MODE at the most
3464 significant end of hard register VALUE, shift VALUE left or right
3465 as specified by LEFT_P. Return true if some action was needed. */
3467 bool
3468 shift_return_value (machine_mode mode, bool left_p, rtx value)
3470 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
3471 machine_mode value_mode = GET_MODE (value);
3472 poly_int64 shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
3474 if (known_eq (shift, 0))
3475 return false;
3477 /* Use ashr rather than lshr for right shifts. This is for the benefit
3478 of the MIPS port, which requires SImode values to be sign-extended
3479 when stored in 64-bit registers. */
3480 if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
3481 value, gen_int_shift_amount (value_mode, shift),
3482 value, 1, OPTAB_WIDEN))
3483 gcc_unreachable ();
3484 return true;
3487 /* If X is a likely-spilled register value, copy it to a pseudo
3488 register and return that register. Return X otherwise. */
3490 static rtx
3491 avoid_likely_spilled_reg (rtx x)
3493 rtx new_rtx;
3495 if (REG_P (x)
3496 && HARD_REGISTER_P (x)
3497 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
3499 /* Make sure that we generate a REG rather than a CONCAT.
3500 Moves into CONCATs can need nontrivial instructions,
3501 and the whole point of this function is to avoid
3502 using the hard register directly in such a situation. */
3503 generating_concat_p = 0;
3504 new_rtx = gen_reg_rtx (GET_MODE (x));
3505 generating_concat_p = 1;
3506 emit_move_insn (new_rtx, x);
3507 return new_rtx;
3509 return x;
3512 /* Helper function for expand_call.
3513 Return false is EXP is not implementable as a sibling call. */
3515 static bool
3516 can_implement_as_sibling_call_p (tree exp,
3517 rtx structure_value_addr,
3518 tree funtype,
3519 int reg_parm_stack_space ATTRIBUTE_UNUSED,
3520 tree fndecl,
3521 int flags,
3522 tree addr,
3523 const args_size &args_size)
3525 if (!targetm.have_sibcall_epilogue ())
3527 maybe_complain_about_tail_call
3528 (exp,
3529 "machine description does not have"
3530 " a sibcall_epilogue instruction pattern");
3531 return false;
3534 /* Doing sibling call optimization needs some work, since
3535 structure_value_addr can be allocated on the stack.
3536 It does not seem worth the effort since few optimizable
3537 sibling calls will return a structure. */
3538 if (structure_value_addr != NULL_RTX)
3540 maybe_complain_about_tail_call (exp, "callee returns a structure");
3541 return false;
3544 #ifdef REG_PARM_STACK_SPACE
3545 /* If outgoing reg parm stack space changes, we cannot do sibcall. */
3546 if (OUTGOING_REG_PARM_STACK_SPACE (funtype)
3547 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))
3548 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl)))
3550 maybe_complain_about_tail_call (exp,
3551 "inconsistent size of stack space"
3552 " allocated for arguments which are"
3553 " passed in registers");
3554 return false;
3556 #endif
3558 /* Check whether the target is able to optimize the call
3559 into a sibcall. */
3560 if (!targetm.function_ok_for_sibcall (fndecl, exp))
3562 maybe_complain_about_tail_call (exp,
3563 "target is not able to optimize the"
3564 " call into a sibling call");
3565 return false;
3568 /* Functions that do not return exactly once may not be sibcall
3569 optimized. */
3570 if (flags & ECF_RETURNS_TWICE)
3572 maybe_complain_about_tail_call (exp, "callee returns twice");
3573 return false;
3575 if (flags & ECF_NORETURN)
3577 maybe_complain_about_tail_call (exp, "callee does not return");
3578 return false;
3581 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
3583 maybe_complain_about_tail_call (exp, "volatile function type");
3584 return false;
3587 /* If the called function is nested in the current one, it might access
3588 some of the caller's arguments, but could clobber them beforehand if
3589 the argument areas are shared. */
3590 if (fndecl && decl_function_context (fndecl) == current_function_decl)
3592 maybe_complain_about_tail_call (exp, "nested function");
3593 return false;
3596 /* If this function requires more stack slots than the current
3597 function, we cannot change it into a sibling call.
3598 crtl->args.pretend_args_size is not part of the
3599 stack allocated by our caller. */
3600 if (maybe_gt (args_size.constant,
3601 crtl->args.size - crtl->args.pretend_args_size))
3603 maybe_complain_about_tail_call (exp,
3604 "callee required more stack slots"
3605 " than the caller");
3606 return false;
3609 /* If the callee pops its own arguments, then it must pop exactly
3610 the same number of arguments as the current function. */
3611 if (maybe_ne (targetm.calls.return_pops_args (fndecl, funtype,
3612 args_size.constant),
3613 targetm.calls.return_pops_args (current_function_decl,
3614 TREE_TYPE
3615 (current_function_decl),
3616 crtl->args.size)))
3618 maybe_complain_about_tail_call (exp,
3619 "inconsistent number of"
3620 " popped arguments");
3621 return false;
3624 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
3626 maybe_complain_about_tail_call (exp, "frontend does not support"
3627 " sibling call");
3628 return false;
3631 /* All checks passed. */
3632 return true;
3635 /* Update stack alignment when the parameter is passed in the stack
3636 since the outgoing parameter requires extra alignment on the calling
3637 function side. */
3639 static void
3640 update_stack_alignment_for_call (struct locate_and_pad_arg_data *locate)
3642 if (crtl->stack_alignment_needed < locate->boundary)
3643 crtl->stack_alignment_needed = locate->boundary;
3644 if (crtl->preferred_stack_boundary < locate->boundary)
3645 crtl->preferred_stack_boundary = locate->boundary;
3648 /* Generate all the code for a CALL_EXPR exp
3649 and return an rtx for its value.
3650 Store the value in TARGET (specified as an rtx) if convenient.
3651 If the value is stored in TARGET then TARGET is returned.
3652 If IGNORE is nonzero, then we ignore the value of the function call. */
3655 expand_call (tree exp, rtx target, int ignore)
3657 /* Nonzero if we are currently expanding a call. */
3658 static int currently_expanding_call = 0;
3660 /* RTX for the function to be called. */
3661 rtx funexp;
3662 /* Sequence of insns to perform a normal "call". */
3663 rtx_insn *normal_call_insns = NULL;
3664 /* Sequence of insns to perform a tail "call". */
3665 rtx_insn *tail_call_insns = NULL;
3666 /* Data type of the function. */
3667 tree funtype;
3668 tree type_arg_types;
3669 tree rettype;
3670 /* Declaration of the function being called,
3671 or 0 if the function is computed (not known by name). */
3672 tree fndecl = 0;
3673 /* The type of the function being called. */
3674 tree fntype;
3675 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
3676 bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
3677 int pass;
3679 /* Register in which non-BLKmode value will be returned,
3680 or 0 if no value or if value is BLKmode. */
3681 rtx valreg;
3682 /* Address where we should return a BLKmode value;
3683 0 if value not BLKmode. */
3684 rtx structure_value_addr = 0;
3685 /* Nonzero if that address is being passed by treating it as
3686 an extra, implicit first parameter. Otherwise,
3687 it is passed by being copied directly into struct_value_rtx. */
3688 int structure_value_addr_parm = 0;
3689 /* Holds the value of implicit argument for the struct value. */
3690 tree structure_value_addr_value = NULL_TREE;
3691 /* Size of aggregate value wanted, or zero if none wanted
3692 or if we are using the non-reentrant PCC calling convention
3693 or expecting the value in registers. */
3694 poly_int64 struct_value_size = 0;
3695 /* Nonzero if called function returns an aggregate in memory PCC style,
3696 by returning the address of where to find it. */
3697 int pcc_struct_value = 0;
3698 rtx struct_value = 0;
3700 /* Number of actual parameters in this call, including struct value addr. */
3701 int num_actuals;
3702 /* Number of named args. Args after this are anonymous ones
3703 and they must all go on the stack. */
3704 int n_named_args;
3705 /* Number of complex actual arguments that need to be split. */
3706 int num_complex_actuals = 0;
3708 /* Vector of information about each argument.
3709 Arguments are numbered in the order they will be pushed,
3710 not the order they are written. */
3711 struct arg_data *args;
3713 /* Total size in bytes of all the stack-parms scanned so far. */
3714 struct args_size args_size;
3715 struct args_size adjusted_args_size;
3716 /* Size of arguments before any adjustments (such as rounding). */
3717 poly_int64 unadjusted_args_size;
3718 /* Data on reg parms scanned so far. */
3719 CUMULATIVE_ARGS args_so_far_v;
3720 cumulative_args_t args_so_far;
3721 /* Nonzero if a reg parm has been scanned. */
3722 int reg_parm_seen;
3723 /* Nonzero if this is an indirect function call. */
3725 /* Nonzero if we must avoid push-insns in the args for this call.
3726 If stack space is allocated for register parameters, but not by the
3727 caller, then it is preallocated in the fixed part of the stack frame.
3728 So the entire argument block must then be preallocated (i.e., we
3729 ignore PUSH_ROUNDING in that case). */
3731 int must_preallocate = !PUSH_ARGS;
3733 /* Size of the stack reserved for parameter registers. */
3734 int reg_parm_stack_space = 0;
3736 /* Address of space preallocated for stack parms
3737 (on machines that lack push insns), or 0 if space not preallocated. */
3738 rtx argblock = 0;
3740 /* Mask of ECF_ and ERF_ flags. */
3741 int flags = 0;
3742 int return_flags = 0;
3743 #ifdef REG_PARM_STACK_SPACE
3744 /* Define the boundary of the register parm stack space that needs to be
3745 saved, if any. */
3746 int low_to_save, high_to_save;
3747 rtx save_area = 0; /* Place that it is saved */
3748 #endif
3750 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3751 char *initial_stack_usage_map = stack_usage_map;
3752 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
3753 char *stack_usage_map_buf = NULL;
3755 poly_int64 old_stack_allocated;
3757 /* State variables to track stack modifications. */
3758 rtx old_stack_level = 0;
3759 int old_stack_arg_under_construction = 0;
3760 poly_int64 old_pending_adj = 0;
3761 int old_inhibit_defer_pop = inhibit_defer_pop;
3763 /* Some stack pointer alterations we make are performed via
3764 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
3765 which we then also need to save/restore along the way. */
3766 poly_int64 old_stack_pointer_delta = 0;
3768 rtx call_fusage;
3769 tree addr = CALL_EXPR_FN (exp);
3770 int i;
3771 /* The alignment of the stack, in bits. */
3772 unsigned HOST_WIDE_INT preferred_stack_boundary;
3773 /* The alignment of the stack, in bytes. */
3774 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
3775 /* The static chain value to use for this call. */
3776 rtx static_chain_value;
3777 /* See if this is "nothrow" function call. */
3778 if (TREE_NOTHROW (exp))
3779 flags |= ECF_NOTHROW;
3781 /* See if we can find a DECL-node for the actual function, and get the
3782 function attributes (flags) from the function decl or type node. */
3783 fndecl = get_callee_fndecl (exp);
3784 if (fndecl)
3786 fntype = TREE_TYPE (fndecl);
3787 flags |= flags_from_decl_or_type (fndecl);
3788 return_flags |= decl_return_flags (fndecl);
3790 else
3792 fntype = TREE_TYPE (TREE_TYPE (addr));
3793 flags |= flags_from_decl_or_type (fntype);
3794 if (CALL_EXPR_BY_DESCRIPTOR (exp))
3795 flags |= ECF_BY_DESCRIPTOR;
3797 rettype = TREE_TYPE (exp);
3799 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
3801 /* Warn if this value is an aggregate type,
3802 regardless of which calling convention we are using for it. */
3803 if (AGGREGATE_TYPE_P (rettype))
3804 warning (OPT_Waggregate_return, "function call has aggregate value");
3806 /* If the result of a non looping pure or const function call is
3807 ignored (or void), and none of its arguments are volatile, we can
3808 avoid expanding the call and just evaluate the arguments for
3809 side-effects. */
3810 if ((flags & (ECF_CONST | ECF_PURE))
3811 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
3812 && (ignore || target == const0_rtx
3813 || TYPE_MODE (rettype) == VOIDmode))
3815 bool volatilep = false;
3816 tree arg;
3817 call_expr_arg_iterator iter;
3819 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3820 if (TREE_THIS_VOLATILE (arg))
3822 volatilep = true;
3823 break;
3826 if (! volatilep)
3828 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3829 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
3830 return const0_rtx;
3834 #ifdef REG_PARM_STACK_SPACE
3835 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
3836 #endif
3838 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
3839 && reg_parm_stack_space > 0 && PUSH_ARGS)
3840 must_preallocate = 1;
3842 /* Set up a place to return a structure. */
3844 /* Cater to broken compilers. */
3845 if (aggregate_value_p (exp, fntype))
3847 /* This call returns a big structure. */
3848 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3850 #ifdef PCC_STATIC_STRUCT_RETURN
3852 pcc_struct_value = 1;
3854 #else /* not PCC_STATIC_STRUCT_RETURN */
3856 if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), &struct_value_size))
3857 struct_value_size = -1;
3859 /* Even if it is semantically safe to use the target as the return
3860 slot, it may be not sufficiently aligned for the return type. */
3861 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
3862 && target
3863 && MEM_P (target)
3864 /* If rettype is addressable, we may not create a temporary.
3865 If target is properly aligned at runtime and the compiler
3866 just doesn't know about it, it will work fine, otherwise it
3867 will be UB. */
3868 && (TREE_ADDRESSABLE (rettype)
3869 || !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
3870 && targetm.slow_unaligned_access (TYPE_MODE (rettype),
3871 MEM_ALIGN (target)))))
3872 structure_value_addr = XEXP (target, 0);
3873 else
3875 /* For variable-sized objects, we must be called with a target
3876 specified. If we were to allocate space on the stack here,
3877 we would have no way of knowing when to free it. */
3878 rtx d = assign_temp (rettype, 1, 1);
3879 structure_value_addr = XEXP (d, 0);
3880 target = 0;
3883 #endif /* not PCC_STATIC_STRUCT_RETURN */
3886 /* Figure out the amount to which the stack should be aligned. */
3887 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3888 if (fndecl)
3890 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
3891 /* Without automatic stack alignment, we can't increase preferred
3892 stack boundary. With automatic stack alignment, it is
3893 unnecessary since unless we can guarantee that all callers will
3894 align the outgoing stack properly, callee has to align its
3895 stack anyway. */
3896 if (i
3897 && i->preferred_incoming_stack_boundary
3898 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
3899 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
3902 /* Operand 0 is a pointer-to-function; get the type of the function. */
3903 funtype = TREE_TYPE (addr);
3904 gcc_assert (POINTER_TYPE_P (funtype));
3905 funtype = TREE_TYPE (funtype);
3907 /* Count whether there are actual complex arguments that need to be split
3908 into their real and imaginary parts. Munge the type_arg_types
3909 appropriately here as well. */
3910 if (targetm.calls.split_complex_arg)
3912 call_expr_arg_iterator iter;
3913 tree arg;
3914 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3916 tree type = TREE_TYPE (arg);
3917 if (type && TREE_CODE (type) == COMPLEX_TYPE
3918 && targetm.calls.split_complex_arg (type))
3919 num_complex_actuals++;
3921 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
3923 else
3924 type_arg_types = TYPE_ARG_TYPES (funtype);
3926 if (flags & ECF_MAY_BE_ALLOCA)
3927 cfun->calls_alloca = 1;
3929 /* If struct_value_rtx is 0, it means pass the address
3930 as if it were an extra parameter. Put the argument expression
3931 in structure_value_addr_value. */
3932 if (structure_value_addr && struct_value == 0)
3934 /* If structure_value_addr is a REG other than
3935 virtual_outgoing_args_rtx, we can use always use it. If it
3936 is not a REG, we must always copy it into a register.
3937 If it is virtual_outgoing_args_rtx, we must copy it to another
3938 register in some cases. */
3939 rtx temp = (!REG_P (structure_value_addr)
3940 || (ACCUMULATE_OUTGOING_ARGS
3941 && stack_arg_under_construction
3942 && structure_value_addr == virtual_outgoing_args_rtx)
3943 ? copy_addr_to_reg (convert_memory_address
3944 (Pmode, structure_value_addr))
3945 : structure_value_addr);
3947 structure_value_addr_value =
3948 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
3949 structure_value_addr_parm = 1;
3952 /* Count the arguments and set NUM_ACTUALS. */
3953 num_actuals =
3954 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
3956 /* Compute number of named args.
3957 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
3959 if (type_arg_types != 0)
3960 n_named_args
3961 = (list_length (type_arg_types)
3962 /* Count the struct value address, if it is passed as a parm. */
3963 + structure_value_addr_parm);
3964 else
3965 /* If we know nothing, treat all args as named. */
3966 n_named_args = num_actuals;
3968 /* Start updating where the next arg would go.
3970 On some machines (such as the PA) indirect calls have a different
3971 calling convention than normal calls. The fourth argument in
3972 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
3973 or not. */
3974 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
3975 args_so_far = pack_cumulative_args (&args_so_far_v);
3977 /* Now possibly adjust the number of named args.
3978 Normally, don't include the last named arg if anonymous args follow.
3979 We do include the last named arg if
3980 targetm.calls.strict_argument_naming() returns nonzero.
3981 (If no anonymous args follow, the result of list_length is actually
3982 one too large. This is harmless.)
3984 If targetm.calls.pretend_outgoing_varargs_named() returns
3985 nonzero, and targetm.calls.strict_argument_naming() returns zero,
3986 this machine will be able to place unnamed args that were passed
3987 in registers into the stack. So treat all args as named. This
3988 allows the insns emitting for a specific argument list to be
3989 independent of the function declaration.
3991 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
3992 we do not have any reliable way to pass unnamed args in
3993 registers, so we must force them into memory. */
3995 if (type_arg_types != 0
3996 && targetm.calls.strict_argument_naming (args_so_far))
3998 else if (type_arg_types != 0
3999 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
4000 /* Don't include the last named arg. */
4001 --n_named_args;
4002 else
4003 /* Treat all args as named. */
4004 n_named_args = num_actuals;
4006 /* Make a vector to hold all the information about each arg. */
4007 args = XCNEWVEC (struct arg_data, num_actuals);
4009 /* Build up entries in the ARGS array, compute the size of the
4010 arguments into ARGS_SIZE, etc. */
4011 initialize_argument_information (num_actuals, args, &args_size,
4012 n_named_args, exp,
4013 structure_value_addr_value, fndecl, fntype,
4014 args_so_far, reg_parm_stack_space,
4015 &old_stack_level, &old_pending_adj,
4016 &must_preallocate, &flags,
4017 &try_tail_call, CALL_FROM_THUNK_P (exp));
4019 if (args_size.var)
4020 must_preallocate = 1;
4022 /* Now make final decision about preallocating stack space. */
4023 must_preallocate = finalize_must_preallocate (must_preallocate,
4024 num_actuals, args,
4025 &args_size);
4027 /* If the structure value address will reference the stack pointer, we
4028 must stabilize it. We don't need to do this if we know that we are
4029 not going to adjust the stack pointer in processing this call. */
4031 if (structure_value_addr
4032 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
4033 || reg_mentioned_p (virtual_outgoing_args_rtx,
4034 structure_value_addr))
4035 && (args_size.var
4036 || (!ACCUMULATE_OUTGOING_ARGS
4037 && maybe_ne (args_size.constant, 0))))
4038 structure_value_addr = copy_to_reg (structure_value_addr);
4040 /* Tail calls can make things harder to debug, and we've traditionally
4041 pushed these optimizations into -O2. Don't try if we're already
4042 expanding a call, as that means we're an argument. Don't try if
4043 there's cleanups, as we know there's code to follow the call. */
4044 if (currently_expanding_call++ != 0
4045 || (!flag_optimize_sibling_calls && !CALL_FROM_THUNK_P (exp))
4046 || args_size.var
4047 || dbg_cnt (tail_call) == false)
4048 try_tail_call = 0;
4050 /* Workaround buggy C/C++ wrappers around Fortran routines with
4051 character(len=constant) arguments if the hidden string length arguments
4052 are passed on the stack; if the callers forget to pass those arguments,
4053 attempting to tail call in such routines leads to stack corruption.
4054 Avoid tail calls in functions where at least one such hidden string
4055 length argument is passed (partially or fully) on the stack in the
4056 caller and the callee needs to pass any arguments on the stack.
4057 See PR90329. */
4058 if (try_tail_call && maybe_ne (args_size.constant, 0))
4059 for (tree arg = DECL_ARGUMENTS (current_function_decl);
4060 arg; arg = DECL_CHAIN (arg))
4061 if (DECL_HIDDEN_STRING_LENGTH (arg) && DECL_INCOMING_RTL (arg))
4063 subrtx_iterator::array_type array;
4064 FOR_EACH_SUBRTX (iter, array, DECL_INCOMING_RTL (arg), NONCONST)
4065 if (MEM_P (*iter))
4067 try_tail_call = 0;
4068 break;
4072 /* If the user has marked the function as requiring tail-call
4073 optimization, attempt it. */
4074 if (must_tail_call)
4075 try_tail_call = 1;
4077 /* Rest of purposes for tail call optimizations to fail. */
4078 if (try_tail_call)
4079 try_tail_call = can_implement_as_sibling_call_p (exp,
4080 structure_value_addr,
4081 funtype,
4082 reg_parm_stack_space,
4083 fndecl,
4084 flags, addr, args_size);
4086 /* Check if caller and callee disagree in promotion of function
4087 return value. */
4088 if (try_tail_call)
4090 machine_mode caller_mode, caller_promoted_mode;
4091 machine_mode callee_mode, callee_promoted_mode;
4092 int caller_unsignedp, callee_unsignedp;
4093 tree caller_res = DECL_RESULT (current_function_decl);
4095 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
4096 caller_mode = DECL_MODE (caller_res);
4097 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
4098 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
4099 caller_promoted_mode
4100 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
4101 &caller_unsignedp,
4102 TREE_TYPE (current_function_decl), 1);
4103 callee_promoted_mode
4104 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
4105 &callee_unsignedp,
4106 funtype, 1);
4107 if (caller_mode != VOIDmode
4108 && (caller_promoted_mode != callee_promoted_mode
4109 || ((caller_mode != caller_promoted_mode
4110 || callee_mode != callee_promoted_mode)
4111 && (caller_unsignedp != callee_unsignedp
4112 || partial_subreg_p (caller_mode, callee_mode)))))
4114 try_tail_call = 0;
4115 maybe_complain_about_tail_call (exp,
4116 "caller and callee disagree in"
4117 " promotion of function"
4118 " return value");
4122 /* Ensure current function's preferred stack boundary is at least
4123 what we need. Stack alignment may also increase preferred stack
4124 boundary. */
4125 for (i = 0; i < num_actuals; i++)
4126 if (reg_parm_stack_space > 0
4127 || args[i].reg == 0
4128 || args[i].partial != 0
4129 || args[i].pass_on_stack)
4130 update_stack_alignment_for_call (&args[i].locate);
4131 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
4132 crtl->preferred_stack_boundary = preferred_stack_boundary;
4133 else
4134 preferred_stack_boundary = crtl->preferred_stack_boundary;
4136 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
4138 if (flag_callgraph_info)
4139 record_final_call (fndecl, EXPR_LOCATION (exp));
4141 /* We want to make two insn chains; one for a sibling call, the other
4142 for a normal call. We will select one of the two chains after
4143 initial RTL generation is complete. */
4144 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
4146 int sibcall_failure = 0;
4147 /* We want to emit any pending stack adjustments before the tail
4148 recursion "call". That way we know any adjustment after the tail
4149 recursion call can be ignored if we indeed use the tail
4150 call expansion. */
4151 saved_pending_stack_adjust save;
4152 rtx_insn *insns, *before_call, *after_args;
4153 rtx next_arg_reg;
4155 if (pass == 0)
4157 /* State variables we need to save and restore between
4158 iterations. */
4159 save_pending_stack_adjust (&save);
4161 if (pass)
4162 flags &= ~ECF_SIBCALL;
4163 else
4164 flags |= ECF_SIBCALL;
4166 /* Other state variables that we must reinitialize each time
4167 through the loop (that are not initialized by the loop itself). */
4168 argblock = 0;
4169 call_fusage = 0;
4171 /* Start a new sequence for the normal call case.
4173 From this point on, if the sibling call fails, we want to set
4174 sibcall_failure instead of continuing the loop. */
4175 start_sequence ();
4177 /* Don't let pending stack adjusts add up to too much.
4178 Also, do all pending adjustments now if there is any chance
4179 this might be a call to alloca or if we are expanding a sibling
4180 call sequence.
4181 Also do the adjustments before a throwing call, otherwise
4182 exception handling can fail; PR 19225. */
4183 if (maybe_ge (pending_stack_adjust, 32)
4184 || (maybe_ne (pending_stack_adjust, 0)
4185 && (flags & ECF_MAY_BE_ALLOCA))
4186 || (maybe_ne (pending_stack_adjust, 0)
4187 && flag_exceptions && !(flags & ECF_NOTHROW))
4188 || pass == 0)
4189 do_pending_stack_adjust ();
4191 /* Precompute any arguments as needed. */
4192 if (pass)
4193 precompute_arguments (num_actuals, args);
4195 /* Now we are about to start emitting insns that can be deleted
4196 if a libcall is deleted. */
4197 if (pass && (flags & ECF_MALLOC))
4198 start_sequence ();
4200 if (pass == 0
4201 && crtl->stack_protect_guard
4202 && targetm.stack_protect_runtime_enabled_p ())
4203 stack_protect_epilogue ();
4205 adjusted_args_size = args_size;
4206 /* Compute the actual size of the argument block required. The variable
4207 and constant sizes must be combined, the size may have to be rounded,
4208 and there may be a minimum required size. When generating a sibcall
4209 pattern, do not round up, since we'll be re-using whatever space our
4210 caller provided. */
4211 unadjusted_args_size
4212 = compute_argument_block_size (reg_parm_stack_space,
4213 &adjusted_args_size,
4214 fndecl, fntype,
4215 (pass == 0 ? 0
4216 : preferred_stack_boundary));
4218 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
4220 /* The argument block when performing a sibling call is the
4221 incoming argument block. */
4222 if (pass == 0)
4224 argblock = crtl->args.internal_arg_pointer;
4225 if (STACK_GROWS_DOWNWARD)
4226 argblock
4227 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
4228 else
4229 argblock
4230 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
4232 HOST_WIDE_INT map_size = constant_lower_bound (args_size.constant);
4233 stored_args_map = sbitmap_alloc (map_size);
4234 bitmap_clear (stored_args_map);
4235 stored_args_watermark = HOST_WIDE_INT_M1U;
4238 /* If we have no actual push instructions, or shouldn't use them,
4239 make space for all args right now. */
4240 else if (adjusted_args_size.var != 0)
4242 if (old_stack_level == 0)
4244 emit_stack_save (SAVE_BLOCK, &old_stack_level);
4245 old_stack_pointer_delta = stack_pointer_delta;
4246 old_pending_adj = pending_stack_adjust;
4247 pending_stack_adjust = 0;
4248 /* stack_arg_under_construction says whether a stack arg is
4249 being constructed at the old stack level. Pushing the stack
4250 gets a clean outgoing argument block. */
4251 old_stack_arg_under_construction = stack_arg_under_construction;
4252 stack_arg_under_construction = 0;
4254 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
4255 if (flag_stack_usage_info)
4256 current_function_has_unbounded_dynamic_stack_size = 1;
4258 else
4260 /* Note that we must go through the motions of allocating an argument
4261 block even if the size is zero because we may be storing args
4262 in the area reserved for register arguments, which may be part of
4263 the stack frame. */
4265 poly_int64 needed = adjusted_args_size.constant;
4267 /* Store the maximum argument space used. It will be pushed by
4268 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
4269 checking). */
4271 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
4272 needed);
4274 if (must_preallocate)
4276 if (ACCUMULATE_OUTGOING_ARGS)
4278 /* Since the stack pointer will never be pushed, it is
4279 possible for the evaluation of a parm to clobber
4280 something we have already written to the stack.
4281 Since most function calls on RISC machines do not use
4282 the stack, this is uncommon, but must work correctly.
4284 Therefore, we save any area of the stack that was already
4285 written and that we are using. Here we set up to do this
4286 by making a new stack usage map from the old one. The
4287 actual save will be done by store_one_arg.
4289 Another approach might be to try to reorder the argument
4290 evaluations to avoid this conflicting stack usage. */
4292 /* Since we will be writing into the entire argument area,
4293 the map must be allocated for its entire size, not just
4294 the part that is the responsibility of the caller. */
4295 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4296 needed += reg_parm_stack_space;
4298 poly_int64 limit = needed;
4299 if (ARGS_GROW_DOWNWARD)
4300 limit += 1;
4302 /* For polynomial sizes, this is the maximum possible
4303 size needed for arguments with a constant size
4304 and offset. */
4305 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
4306 highest_outgoing_arg_in_use
4307 = MAX (initial_highest_arg_in_use, const_limit);
4309 free (stack_usage_map_buf);
4310 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
4311 stack_usage_map = stack_usage_map_buf;
4313 if (initial_highest_arg_in_use)
4314 memcpy (stack_usage_map, initial_stack_usage_map,
4315 initial_highest_arg_in_use);
4317 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
4318 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
4319 (highest_outgoing_arg_in_use
4320 - initial_highest_arg_in_use));
4321 needed = 0;
4323 /* The address of the outgoing argument list must not be
4324 copied to a register here, because argblock would be left
4325 pointing to the wrong place after the call to
4326 allocate_dynamic_stack_space below. */
4328 argblock = virtual_outgoing_args_rtx;
4330 else
4332 /* Try to reuse some or all of the pending_stack_adjust
4333 to get this space. */
4334 if (inhibit_defer_pop == 0
4335 && (combine_pending_stack_adjustment_and_call
4336 (&needed,
4337 unadjusted_args_size,
4338 &adjusted_args_size,
4339 preferred_unit_stack_boundary)))
4341 /* combine_pending_stack_adjustment_and_call computes
4342 an adjustment before the arguments are allocated.
4343 Account for them and see whether or not the stack
4344 needs to go up or down. */
4345 needed = unadjusted_args_size - needed;
4347 /* Checked by
4348 combine_pending_stack_adjustment_and_call. */
4349 gcc_checking_assert (ordered_p (needed, 0));
4350 if (maybe_lt (needed, 0))
4352 /* We're releasing stack space. */
4353 /* ??? We can avoid any adjustment at all if we're
4354 already aligned. FIXME. */
4355 pending_stack_adjust = -needed;
4356 do_pending_stack_adjust ();
4357 needed = 0;
4359 else
4360 /* We need to allocate space. We'll do that in
4361 push_block below. */
4362 pending_stack_adjust = 0;
4365 /* Special case this because overhead of `push_block' in
4366 this case is non-trivial. */
4367 if (known_eq (needed, 0))
4368 argblock = virtual_outgoing_args_rtx;
4369 else
4371 rtx needed_rtx = gen_int_mode (needed, Pmode);
4372 argblock = push_block (needed_rtx, 0, 0);
4373 if (ARGS_GROW_DOWNWARD)
4374 argblock = plus_constant (Pmode, argblock, needed);
4377 /* We only really need to call `copy_to_reg' in the case
4378 where push insns are going to be used to pass ARGBLOCK
4379 to a function call in ARGS. In that case, the stack
4380 pointer changes value from the allocation point to the
4381 call point, and hence the value of
4382 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
4383 as well always do it. */
4384 argblock = copy_to_reg (argblock);
4389 if (ACCUMULATE_OUTGOING_ARGS)
4391 /* The save/restore code in store_one_arg handles all
4392 cases except one: a constructor call (including a C
4393 function returning a BLKmode struct) to initialize
4394 an argument. */
4395 if (stack_arg_under_construction)
4397 rtx push_size
4398 = (gen_int_mode
4399 (adjusted_args_size.constant
4400 + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype
4401 : TREE_TYPE (fndecl))
4402 ? 0 : reg_parm_stack_space), Pmode));
4403 if (old_stack_level == 0)
4405 emit_stack_save (SAVE_BLOCK, &old_stack_level);
4406 old_stack_pointer_delta = stack_pointer_delta;
4407 old_pending_adj = pending_stack_adjust;
4408 pending_stack_adjust = 0;
4409 /* stack_arg_under_construction says whether a stack
4410 arg is being constructed at the old stack level.
4411 Pushing the stack gets a clean outgoing argument
4412 block. */
4413 old_stack_arg_under_construction
4414 = stack_arg_under_construction;
4415 stack_arg_under_construction = 0;
4416 /* Make a new map for the new argument list. */
4417 free (stack_usage_map_buf);
4418 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
4419 stack_usage_map = stack_usage_map_buf;
4420 highest_outgoing_arg_in_use = 0;
4421 stack_usage_watermark = HOST_WIDE_INT_M1U;
4423 /* We can pass TRUE as the 4th argument because we just
4424 saved the stack pointer and will restore it right after
4425 the call. */
4426 allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
4427 -1, true);
4430 /* If argument evaluation might modify the stack pointer,
4431 copy the address of the argument list to a register. */
4432 for (i = 0; i < num_actuals; i++)
4433 if (args[i].pass_on_stack)
4435 argblock = copy_addr_to_reg (argblock);
4436 break;
4440 compute_argument_addresses (args, argblock, num_actuals);
4442 /* Stack is properly aligned, pops can't safely be deferred during
4443 the evaluation of the arguments. */
4444 NO_DEFER_POP;
4446 /* Precompute all register parameters. It isn't safe to compute
4447 anything once we have started filling any specific hard regs.
4448 TLS symbols sometimes need a call to resolve. Precompute
4449 register parameters before any stack pointer manipulation
4450 to avoid unaligned stack in the called function. */
4451 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
4453 OK_DEFER_POP;
4455 /* Perform stack alignment before the first push (the last arg). */
4456 if (argblock == 0
4457 && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space)
4458 && maybe_ne (adjusted_args_size.constant, unadjusted_args_size))
4460 /* When the stack adjustment is pending, we get better code
4461 by combining the adjustments. */
4462 if (maybe_ne (pending_stack_adjust, 0)
4463 && ! inhibit_defer_pop
4464 && (combine_pending_stack_adjustment_and_call
4465 (&pending_stack_adjust,
4466 unadjusted_args_size,
4467 &adjusted_args_size,
4468 preferred_unit_stack_boundary)))
4469 do_pending_stack_adjust ();
4470 else if (argblock == 0)
4471 anti_adjust_stack (gen_int_mode (adjusted_args_size.constant
4472 - unadjusted_args_size,
4473 Pmode));
4475 /* Now that the stack is properly aligned, pops can't safely
4476 be deferred during the evaluation of the arguments. */
4477 NO_DEFER_POP;
4479 /* Record the maximum pushed stack space size. We need to delay
4480 doing it this far to take into account the optimization done
4481 by combine_pending_stack_adjustment_and_call. */
4482 if (flag_stack_usage_info
4483 && !ACCUMULATE_OUTGOING_ARGS
4484 && pass
4485 && adjusted_args_size.var == 0)
4487 poly_int64 pushed = (adjusted_args_size.constant
4488 + pending_stack_adjust);
4489 current_function_pushed_stack_size
4490 = upper_bound (current_function_pushed_stack_size, pushed);
4493 funexp = rtx_for_function_call (fndecl, addr);
4495 if (CALL_EXPR_STATIC_CHAIN (exp))
4496 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
4497 else
4498 static_chain_value = 0;
4500 #ifdef REG_PARM_STACK_SPACE
4501 /* Save the fixed argument area if it's part of the caller's frame and
4502 is clobbered by argument setup for this call. */
4503 if (ACCUMULATE_OUTGOING_ARGS && pass)
4504 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4505 &low_to_save, &high_to_save);
4506 #endif
4508 /* Now store (and compute if necessary) all non-register parms.
4509 These come before register parms, since they can require block-moves,
4510 which could clobber the registers used for register parms.
4511 Parms which have partial registers are not stored here,
4512 but we do preallocate space here if they want that. */
4514 for (i = 0; i < num_actuals; i++)
4516 if (args[i].reg == 0 || args[i].pass_on_stack)
4518 rtx_insn *before_arg = get_last_insn ();
4520 /* We don't allow passing huge (> 2^30 B) arguments
4521 by value. It would cause an overflow later on. */
4522 if (constant_lower_bound (adjusted_args_size.constant)
4523 >= (1 << (HOST_BITS_PER_INT - 2)))
4525 sorry ("passing too large argument on stack");
4526 continue;
4529 if (store_one_arg (&args[i], argblock, flags,
4530 adjusted_args_size.var != 0,
4531 reg_parm_stack_space)
4532 || (pass == 0
4533 && check_sibcall_argument_overlap (before_arg,
4534 &args[i], 1)))
4535 sibcall_failure = 1;
4538 if (args[i].stack)
4539 call_fusage
4540 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
4541 gen_rtx_USE (VOIDmode, args[i].stack),
4542 call_fusage);
4545 /* If we have a parm that is passed in registers but not in memory
4546 and whose alignment does not permit a direct copy into registers,
4547 make a group of pseudos that correspond to each register that we
4548 will later fill. */
4549 if (STRICT_ALIGNMENT)
4550 store_unaligned_arguments_into_pseudos (args, num_actuals);
4552 /* Now store any partially-in-registers parm.
4553 This is the last place a block-move can happen. */
4554 if (reg_parm_seen)
4555 for (i = 0; i < num_actuals; i++)
4556 if (args[i].partial != 0 && ! args[i].pass_on_stack)
4558 rtx_insn *before_arg = get_last_insn ();
4560 /* On targets with weird calling conventions (e.g. PA) it's
4561 hard to ensure that all cases of argument overlap between
4562 stack and registers work. Play it safe and bail out. */
4563 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
4565 sibcall_failure = 1;
4566 break;
4569 if (store_one_arg (&args[i], argblock, flags,
4570 adjusted_args_size.var != 0,
4571 reg_parm_stack_space)
4572 || (pass == 0
4573 && check_sibcall_argument_overlap (before_arg,
4574 &args[i], 1)))
4575 sibcall_failure = 1;
4578 bool any_regs = false;
4579 for (i = 0; i < num_actuals; i++)
4580 if (args[i].reg != NULL_RTX)
4582 any_regs = true;
4583 targetm.calls.call_args (args[i].reg, funtype);
4585 if (!any_regs)
4586 targetm.calls.call_args (pc_rtx, funtype);
4588 /* Figure out the register where the value, if any, will come back. */
4589 valreg = 0;
4590 if (TYPE_MODE (rettype) != VOIDmode
4591 && ! structure_value_addr)
4593 if (pcc_struct_value)
4594 valreg = hard_function_value (build_pointer_type (rettype),
4595 fndecl, NULL, (pass == 0));
4596 else
4597 valreg = hard_function_value (rettype, fndecl, fntype,
4598 (pass == 0));
4600 /* If VALREG is a PARALLEL whose first member has a zero
4601 offset, use that. This is for targets such as m68k that
4602 return the same value in multiple places. */
4603 if (GET_CODE (valreg) == PARALLEL)
4605 rtx elem = XVECEXP (valreg, 0, 0);
4606 rtx where = XEXP (elem, 0);
4607 rtx offset = XEXP (elem, 1);
4608 if (offset == const0_rtx
4609 && GET_MODE (where) == GET_MODE (valreg))
4610 valreg = where;
4614 /* If register arguments require space on the stack and stack space
4615 was not preallocated, allocate stack space here for arguments
4616 passed in registers. */
4617 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
4618 && !ACCUMULATE_OUTGOING_ARGS
4619 && must_preallocate == 0 && reg_parm_stack_space > 0)
4620 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
4622 /* Pass the function the address in which to return a
4623 structure value. */
4624 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
4626 structure_value_addr
4627 = convert_memory_address (Pmode, structure_value_addr);
4628 emit_move_insn (struct_value,
4629 force_reg (Pmode,
4630 force_operand (structure_value_addr,
4631 NULL_RTX)));
4633 if (REG_P (struct_value))
4634 use_reg (&call_fusage, struct_value);
4637 after_args = get_last_insn ();
4638 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
4639 static_chain_value, &call_fusage,
4640 reg_parm_seen, flags);
4642 load_register_parameters (args, num_actuals, &call_fusage, flags,
4643 pass == 0, &sibcall_failure);
4645 /* Save a pointer to the last insn before the call, so that we can
4646 later safely search backwards to find the CALL_INSN. */
4647 before_call = get_last_insn ();
4649 /* Set up next argument register. For sibling calls on machines
4650 with register windows this should be the incoming register. */
4651 if (pass == 0)
4652 next_arg_reg = targetm.calls.function_incoming_arg
4653 (args_so_far, function_arg_info::end_marker ());
4654 else
4655 next_arg_reg = targetm.calls.function_arg
4656 (args_so_far, function_arg_info::end_marker ());
4658 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
4660 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
4661 arg_nr = num_actuals - arg_nr - 1;
4662 if (arg_nr >= 0
4663 && arg_nr < num_actuals
4664 && args[arg_nr].reg
4665 && valreg
4666 && REG_P (valreg)
4667 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
4668 call_fusage
4669 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
4670 gen_rtx_SET (valreg, args[arg_nr].reg),
4671 call_fusage);
4673 /* All arguments and registers used for the call must be set up by
4674 now! */
4676 /* Stack must be properly aligned now. */
4677 gcc_assert (!pass
4678 || multiple_p (stack_pointer_delta,
4679 preferred_unit_stack_boundary));
4681 /* Generate the actual call instruction. */
4682 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
4683 adjusted_args_size.constant, struct_value_size,
4684 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
4685 flags, args_so_far);
4687 if (flag_ipa_ra)
4689 rtx_call_insn *last;
4690 rtx datum = NULL_RTX;
4691 if (fndecl != NULL_TREE)
4693 datum = XEXP (DECL_RTL (fndecl), 0);
4694 gcc_assert (datum != NULL_RTX
4695 && GET_CODE (datum) == SYMBOL_REF);
4697 last = last_call_insn ();
4698 add_reg_note (last, REG_CALL_DECL, datum);
4701 /* If the call setup or the call itself overlaps with anything
4702 of the argument setup we probably clobbered our call address.
4703 In that case we can't do sibcalls. */
4704 if (pass == 0
4705 && check_sibcall_argument_overlap (after_args, 0, 0))
4706 sibcall_failure = 1;
4708 /* If a non-BLKmode value is returned at the most significant end
4709 of a register, shift the register right by the appropriate amount
4710 and update VALREG accordingly. BLKmode values are handled by the
4711 group load/store machinery below. */
4712 if (!structure_value_addr
4713 && !pcc_struct_value
4714 && TYPE_MODE (rettype) != VOIDmode
4715 && TYPE_MODE (rettype) != BLKmode
4716 && REG_P (valreg)
4717 && targetm.calls.return_in_msb (rettype))
4719 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
4720 sibcall_failure = 1;
4721 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
4724 if (pass && (flags & ECF_MALLOC))
4726 rtx temp = gen_reg_rtx (GET_MODE (valreg));
4727 rtx_insn *last, *insns;
4729 /* The return value from a malloc-like function is a pointer. */
4730 if (TREE_CODE (rettype) == POINTER_TYPE)
4731 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
4733 emit_move_insn (temp, valreg);
4735 /* The return value from a malloc-like function cannot alias
4736 anything else. */
4737 last = get_last_insn ();
4738 add_reg_note (last, REG_NOALIAS, temp);
4740 /* Write out the sequence. */
4741 insns = get_insns ();
4742 end_sequence ();
4743 emit_insn (insns);
4744 valreg = temp;
4747 /* For calls to `setjmp', etc., inform
4748 function.c:setjmp_warnings that it should complain if
4749 nonvolatile values are live. For functions that cannot
4750 return, inform flow that control does not fall through. */
4752 if ((flags & ECF_NORETURN) || pass == 0)
4754 /* The barrier must be emitted
4755 immediately after the CALL_INSN. Some ports emit more
4756 than just a CALL_INSN above, so we must search for it here. */
4758 rtx_insn *last = get_last_insn ();
4759 while (!CALL_P (last))
4761 last = PREV_INSN (last);
4762 /* There was no CALL_INSN? */
4763 gcc_assert (last != before_call);
4766 emit_barrier_after (last);
4768 /* Stack adjustments after a noreturn call are dead code.
4769 However when NO_DEFER_POP is in effect, we must preserve
4770 stack_pointer_delta. */
4771 if (inhibit_defer_pop == 0)
4773 stack_pointer_delta = old_stack_allocated;
4774 pending_stack_adjust = 0;
4778 /* If value type not void, return an rtx for the value. */
4780 if (TYPE_MODE (rettype) == VOIDmode
4781 || ignore)
4782 target = const0_rtx;
4783 else if (structure_value_addr)
4785 if (target == 0 || !MEM_P (target))
4787 target
4788 = gen_rtx_MEM (TYPE_MODE (rettype),
4789 memory_address (TYPE_MODE (rettype),
4790 structure_value_addr));
4791 set_mem_attributes (target, rettype, 1);
4794 else if (pcc_struct_value)
4796 /* This is the special C++ case where we need to
4797 know what the true target was. We take care to
4798 never use this value more than once in one expression. */
4799 target = gen_rtx_MEM (TYPE_MODE (rettype),
4800 copy_to_reg (valreg));
4801 set_mem_attributes (target, rettype, 1);
4803 /* Handle calls that return values in multiple non-contiguous locations.
4804 The Irix 6 ABI has examples of this. */
4805 else if (GET_CODE (valreg) == PARALLEL)
4807 if (target == 0)
4808 target = emit_group_move_into_temps (valreg);
4809 else if (rtx_equal_p (target, valreg))
4811 else if (GET_CODE (target) == PARALLEL)
4812 /* Handle the result of a emit_group_move_into_temps
4813 call in the previous pass. */
4814 emit_group_move (target, valreg);
4815 else
4816 emit_group_store (target, valreg, rettype,
4817 int_size_in_bytes (rettype));
4819 else if (target
4820 && GET_MODE (target) == TYPE_MODE (rettype)
4821 && GET_MODE (target) == GET_MODE (valreg))
4823 bool may_overlap = false;
4825 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
4826 reg to a plain register. */
4827 if (!REG_P (target) || HARD_REGISTER_P (target))
4828 valreg = avoid_likely_spilled_reg (valreg);
4830 /* If TARGET is a MEM in the argument area, and we have
4831 saved part of the argument area, then we can't store
4832 directly into TARGET as it may get overwritten when we
4833 restore the argument save area below. Don't work too
4834 hard though and simply force TARGET to a register if it
4835 is a MEM; the optimizer is quite likely to sort it out. */
4836 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
4837 for (i = 0; i < num_actuals; i++)
4838 if (args[i].save_area)
4840 may_overlap = true;
4841 break;
4844 if (may_overlap)
4845 target = copy_to_reg (valreg);
4846 else
4848 /* TARGET and VALREG cannot be equal at this point
4849 because the latter would not have
4850 REG_FUNCTION_VALUE_P true, while the former would if
4851 it were referring to the same register.
4853 If they refer to the same register, this move will be
4854 a no-op, except when function inlining is being
4855 done. */
4856 emit_move_insn (target, valreg);
4858 /* If we are setting a MEM, this code must be executed.
4859 Since it is emitted after the call insn, sibcall
4860 optimization cannot be performed in that case. */
4861 if (MEM_P (target))
4862 sibcall_failure = 1;
4865 else
4866 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
4868 /* If we promoted this return value, make the proper SUBREG.
4869 TARGET might be const0_rtx here, so be careful. */
4870 if (REG_P (target)
4871 && TYPE_MODE (rettype) != BLKmode
4872 && GET_MODE (target) != TYPE_MODE (rettype))
4874 tree type = rettype;
4875 int unsignedp = TYPE_UNSIGNED (type);
4876 machine_mode pmode;
4878 /* Ensure we promote as expected, and get the new unsignedness. */
4879 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
4880 funtype, 1);
4881 gcc_assert (GET_MODE (target) == pmode);
4883 poly_uint64 offset = subreg_lowpart_offset (TYPE_MODE (type),
4884 GET_MODE (target));
4885 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
4886 SUBREG_PROMOTED_VAR_P (target) = 1;
4887 SUBREG_PROMOTED_SET (target, unsignedp);
4890 /* If size of args is variable or this was a constructor call for a stack
4891 argument, restore saved stack-pointer value. */
4893 if (old_stack_level)
4895 rtx_insn *prev = get_last_insn ();
4897 emit_stack_restore (SAVE_BLOCK, old_stack_level);
4898 stack_pointer_delta = old_stack_pointer_delta;
4900 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
4902 pending_stack_adjust = old_pending_adj;
4903 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
4904 stack_arg_under_construction = old_stack_arg_under_construction;
4905 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4906 stack_usage_map = initial_stack_usage_map;
4907 stack_usage_watermark = initial_stack_usage_watermark;
4908 sibcall_failure = 1;
4910 else if (ACCUMULATE_OUTGOING_ARGS && pass)
4912 #ifdef REG_PARM_STACK_SPACE
4913 if (save_area)
4914 restore_fixed_argument_area (save_area, argblock,
4915 high_to_save, low_to_save);
4916 #endif
4918 /* If we saved any argument areas, restore them. */
4919 for (i = 0; i < num_actuals; i++)
4920 if (args[i].save_area)
4922 machine_mode save_mode = GET_MODE (args[i].save_area);
4923 rtx stack_area
4924 = gen_rtx_MEM (save_mode,
4925 memory_address (save_mode,
4926 XEXP (args[i].stack_slot, 0)));
4928 if (save_mode != BLKmode)
4929 emit_move_insn (stack_area, args[i].save_area);
4930 else
4931 emit_block_move (stack_area, args[i].save_area,
4932 (gen_int_mode
4933 (args[i].locate.size.constant, Pmode)),
4934 BLOCK_OP_CALL_PARM);
4937 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4938 stack_usage_map = initial_stack_usage_map;
4939 stack_usage_watermark = initial_stack_usage_watermark;
4942 /* If this was alloca, record the new stack level. */
4943 if (flags & ECF_MAY_BE_ALLOCA)
4944 record_new_stack_level ();
4946 /* Free up storage we no longer need. */
4947 for (i = 0; i < num_actuals; ++i)
4948 free (args[i].aligned_regs);
4950 targetm.calls.end_call_args ();
4952 insns = get_insns ();
4953 end_sequence ();
4955 if (pass == 0)
4957 tail_call_insns = insns;
4959 /* Restore the pending stack adjustment now that we have
4960 finished generating the sibling call sequence. */
4962 restore_pending_stack_adjust (&save);
4964 /* Prepare arg structure for next iteration. */
4965 for (i = 0; i < num_actuals; i++)
4967 args[i].value = 0;
4968 args[i].aligned_regs = 0;
4969 args[i].stack = 0;
4972 sbitmap_free (stored_args_map);
4973 internal_arg_pointer_exp_state.scan_start = NULL;
4974 internal_arg_pointer_exp_state.cache.release ();
4976 else
4978 normal_call_insns = insns;
4980 /* Verify that we've deallocated all the stack we used. */
4981 gcc_assert ((flags & ECF_NORETURN)
4982 || known_eq (old_stack_allocated,
4983 stack_pointer_delta
4984 - pending_stack_adjust));
4987 /* If something prevents making this a sibling call,
4988 zero out the sequence. */
4989 if (sibcall_failure)
4990 tail_call_insns = NULL;
4991 else
4992 break;
4995 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
4996 arguments too, as argument area is now clobbered by the call. */
4997 if (tail_call_insns)
4999 emit_insn (tail_call_insns);
5000 crtl->tail_call_emit = true;
5002 else
5004 emit_insn (normal_call_insns);
5005 if (try_tail_call)
5006 /* Ideally we'd emit a message for all of the ways that it could
5007 have failed. */
5008 maybe_complain_about_tail_call (exp, "tail call production failed");
5011 currently_expanding_call--;
5013 free (stack_usage_map_buf);
5014 free (args);
5015 return target;
5018 /* A sibling call sequence invalidates any REG_EQUIV notes made for
5019 this function's incoming arguments.
5021 At the start of RTL generation we know the only REG_EQUIV notes
5022 in the rtl chain are those for incoming arguments, so we can look
5023 for REG_EQUIV notes between the start of the function and the
5024 NOTE_INSN_FUNCTION_BEG.
5026 This is (slight) overkill. We could keep track of the highest
5027 argument we clobber and be more selective in removing notes, but it
5028 does not seem to be worth the effort. */
5030 void
5031 fixup_tail_calls (void)
5033 rtx_insn *insn;
5035 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5037 rtx note;
5039 /* There are never REG_EQUIV notes for the incoming arguments
5040 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
5041 if (NOTE_P (insn)
5042 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
5043 break;
5045 note = find_reg_note (insn, REG_EQUIV, 0);
5046 if (note)
5047 remove_note (insn, note);
5048 note = find_reg_note (insn, REG_EQUIV, 0);
5049 gcc_assert (!note);
5053 /* Traverse a list of TYPES and expand all complex types into their
5054 components. */
5055 static tree
5056 split_complex_types (tree types)
5058 tree p;
5060 /* Before allocating memory, check for the common case of no complex. */
5061 for (p = types; p; p = TREE_CHAIN (p))
5063 tree type = TREE_VALUE (p);
5064 if (TREE_CODE (type) == COMPLEX_TYPE
5065 && targetm.calls.split_complex_arg (type))
5066 goto found;
5068 return types;
5070 found:
5071 types = copy_list (types);
5073 for (p = types; p; p = TREE_CHAIN (p))
5075 tree complex_type = TREE_VALUE (p);
5077 if (TREE_CODE (complex_type) == COMPLEX_TYPE
5078 && targetm.calls.split_complex_arg (complex_type))
5080 tree next, imag;
5082 /* Rewrite complex type with component type. */
5083 TREE_VALUE (p) = TREE_TYPE (complex_type);
5084 next = TREE_CHAIN (p);
5086 /* Add another component type for the imaginary part. */
5087 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
5088 TREE_CHAIN (p) = imag;
5089 TREE_CHAIN (imag) = next;
5091 /* Skip the newly created node. */
5092 p = TREE_CHAIN (p);
5096 return types;
5099 /* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
5100 for a value of mode OUTMODE,
5101 with NARGS different arguments, passed as ARGS.
5102 Store the return value if RETVAL is nonzero: store it in VALUE if
5103 VALUE is nonnull, otherwise pick a convenient location. In either
5104 case return the location of the stored value.
5106 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
5107 `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
5108 other types of library calls. */
5111 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
5112 enum libcall_type fn_type,
5113 machine_mode outmode, int nargs, rtx_mode_t *args)
5115 /* Total size in bytes of all the stack-parms scanned so far. */
5116 struct args_size args_size;
5117 /* Size of arguments before any adjustments (such as rounding). */
5118 struct args_size original_args_size;
5119 int argnum;
5120 rtx fun;
5121 /* Todo, choose the correct decl type of orgfun. Sadly this information
5122 isn't present here, so we default to native calling abi here. */
5123 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
5124 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
5125 int count;
5126 rtx argblock = 0;
5127 CUMULATIVE_ARGS args_so_far_v;
5128 cumulative_args_t args_so_far;
5129 struct arg
5131 rtx value;
5132 machine_mode mode;
5133 rtx reg;
5134 int partial;
5135 struct locate_and_pad_arg_data locate;
5136 rtx save_area;
5138 struct arg *argvec;
5139 int old_inhibit_defer_pop = inhibit_defer_pop;
5140 rtx call_fusage = 0;
5141 rtx mem_value = 0;
5142 rtx valreg;
5143 int pcc_struct_value = 0;
5144 poly_int64 struct_value_size = 0;
5145 int flags;
5146 int reg_parm_stack_space = 0;
5147 poly_int64 needed;
5148 rtx_insn *before_call;
5149 bool have_push_fusage;
5150 tree tfom; /* type_for_mode (outmode, 0) */
5152 #ifdef REG_PARM_STACK_SPACE
5153 /* Define the boundary of the register parm stack space that needs to be
5154 save, if any. */
5155 int low_to_save = 0, high_to_save = 0;
5156 rtx save_area = 0; /* Place that it is saved. */
5157 #endif
5159 /* Size of the stack reserved for parameter registers. */
5160 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
5161 char *initial_stack_usage_map = stack_usage_map;
5162 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
5163 char *stack_usage_map_buf = NULL;
5165 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
5167 #ifdef REG_PARM_STACK_SPACE
5168 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
5169 #endif
5171 /* By default, library functions cannot throw. */
5172 flags = ECF_NOTHROW;
5174 switch (fn_type)
5176 case LCT_NORMAL:
5177 break;
5178 case LCT_CONST:
5179 flags |= ECF_CONST;
5180 break;
5181 case LCT_PURE:
5182 flags |= ECF_PURE;
5183 break;
5184 case LCT_NORETURN:
5185 flags |= ECF_NORETURN;
5186 break;
5187 case LCT_THROW:
5188 flags &= ~ECF_NOTHROW;
5189 break;
5190 case LCT_RETURNS_TWICE:
5191 flags = ECF_RETURNS_TWICE;
5192 break;
5194 fun = orgfun;
5196 /* Ensure current function's preferred stack boundary is at least
5197 what we need. */
5198 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
5199 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5201 /* If this kind of value comes back in memory,
5202 decide where in memory it should come back. */
5203 if (outmode != VOIDmode)
5205 tfom = lang_hooks.types.type_for_mode (outmode, 0);
5206 if (aggregate_value_p (tfom, 0))
5208 #ifdef PCC_STATIC_STRUCT_RETURN
5209 rtx pointer_reg
5210 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
5211 mem_value = gen_rtx_MEM (outmode, pointer_reg);
5212 pcc_struct_value = 1;
5213 if (value == 0)
5214 value = gen_reg_rtx (outmode);
5215 #else /* not PCC_STATIC_STRUCT_RETURN */
5216 struct_value_size = GET_MODE_SIZE (outmode);
5217 if (value != 0 && MEM_P (value))
5218 mem_value = value;
5219 else
5220 mem_value = assign_temp (tfom, 1, 1);
5221 #endif
5222 /* This call returns a big structure. */
5223 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
5226 else
5227 tfom = void_type_node;
5229 /* ??? Unfinished: must pass the memory address as an argument. */
5231 /* Copy all the libcall-arguments out of the varargs data
5232 and into a vector ARGVEC.
5234 Compute how to pass each argument. We only support a very small subset
5235 of the full argument passing conventions to limit complexity here since
5236 library functions shouldn't have many args. */
5238 argvec = XALLOCAVEC (struct arg, nargs + 1);
5239 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
5241 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
5242 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
5243 #else
5244 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
5245 #endif
5246 args_so_far = pack_cumulative_args (&args_so_far_v);
5248 args_size.constant = 0;
5249 args_size.var = 0;
5251 count = 0;
5253 push_temp_slots ();
5255 /* If there's a structure value address to be passed,
5256 either pass it in the special place, or pass it as an extra argument. */
5257 if (mem_value && struct_value == 0 && ! pcc_struct_value)
5259 rtx addr = XEXP (mem_value, 0);
5261 nargs++;
5263 /* Make sure it is a reasonable operand for a move or push insn. */
5264 if (!REG_P (addr) && !MEM_P (addr)
5265 && !(CONSTANT_P (addr)
5266 && targetm.legitimate_constant_p (Pmode, addr)))
5267 addr = force_operand (addr, NULL_RTX);
5269 argvec[count].value = addr;
5270 argvec[count].mode = Pmode;
5271 argvec[count].partial = 0;
5273 function_arg_info ptr_arg (Pmode, /*named=*/true);
5274 argvec[count].reg = targetm.calls.function_arg (args_so_far, ptr_arg);
5275 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, ptr_arg) == 0);
5277 locate_and_pad_parm (Pmode, NULL_TREE,
5278 #ifdef STACK_PARMS_IN_REG_PARM_AREA
5280 #else
5281 argvec[count].reg != 0,
5282 #endif
5283 reg_parm_stack_space, 0,
5284 NULL_TREE, &args_size, &argvec[count].locate);
5286 if (argvec[count].reg == 0 || argvec[count].partial != 0
5287 || reg_parm_stack_space > 0)
5288 args_size.constant += argvec[count].locate.size.constant;
5290 targetm.calls.function_arg_advance (args_so_far, ptr_arg);
5292 count++;
5295 for (unsigned int i = 0; count < nargs; i++, count++)
5297 rtx val = args[i].first;
5298 function_arg_info arg (args[i].second, /*named=*/true);
5299 int unsigned_p = 0;
5301 /* We cannot convert the arg value to the mode the library wants here;
5302 must do it earlier where we know the signedness of the arg. */
5303 gcc_assert (arg.mode != BLKmode
5304 && (GET_MODE (val) == arg.mode
5305 || GET_MODE (val) == VOIDmode));
5307 /* Make sure it is a reasonable operand for a move or push insn. */
5308 if (!REG_P (val) && !MEM_P (val)
5309 && !(CONSTANT_P (val)
5310 && targetm.legitimate_constant_p (arg.mode, val)))
5311 val = force_operand (val, NULL_RTX);
5313 if (pass_by_reference (&args_so_far_v, arg))
5315 rtx slot;
5316 int must_copy = !reference_callee_copied (&args_so_far_v, arg);
5318 /* If this was a CONST function, it is now PURE since it now
5319 reads memory. */
5320 if (flags & ECF_CONST)
5322 flags &= ~ECF_CONST;
5323 flags |= ECF_PURE;
5326 if (MEM_P (val) && !must_copy)
5328 tree val_expr = MEM_EXPR (val);
5329 if (val_expr)
5330 mark_addressable (val_expr);
5331 slot = val;
5333 else
5335 slot = assign_temp (lang_hooks.types.type_for_mode (arg.mode, 0),
5336 1, 1);
5337 emit_move_insn (slot, val);
5340 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
5341 gen_rtx_USE (VOIDmode, slot),
5342 call_fusage);
5343 if (must_copy)
5344 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
5345 gen_rtx_CLOBBER (VOIDmode,
5346 slot),
5347 call_fusage);
5349 arg.mode = Pmode;
5350 arg.pass_by_reference = true;
5351 val = force_operand (XEXP (slot, 0), NULL_RTX);
5354 arg.mode = promote_function_mode (NULL_TREE, arg.mode, &unsigned_p,
5355 NULL_TREE, 0);
5356 argvec[count].mode = arg.mode;
5357 argvec[count].value = convert_modes (arg.mode, GET_MODE (val), val,
5358 unsigned_p);
5359 argvec[count].reg = targetm.calls.function_arg (args_so_far, arg);
5361 argvec[count].partial
5362 = targetm.calls.arg_partial_bytes (args_so_far, arg);
5364 if (argvec[count].reg == 0
5365 || argvec[count].partial != 0
5366 || reg_parm_stack_space > 0)
5368 locate_and_pad_parm (arg.mode, NULL_TREE,
5369 #ifdef STACK_PARMS_IN_REG_PARM_AREA
5371 #else
5372 argvec[count].reg != 0,
5373 #endif
5374 reg_parm_stack_space, argvec[count].partial,
5375 NULL_TREE, &args_size, &argvec[count].locate);
5376 args_size.constant += argvec[count].locate.size.constant;
5377 gcc_assert (!argvec[count].locate.size.var);
5379 #ifdef BLOCK_REG_PADDING
5380 else
5381 /* The argument is passed entirely in registers. See at which
5382 end it should be padded. */
5383 argvec[count].locate.where_pad =
5384 BLOCK_REG_PADDING (arg.mode, NULL_TREE,
5385 known_le (GET_MODE_SIZE (arg.mode),
5386 UNITS_PER_WORD));
5387 #endif
5389 targetm.calls.function_arg_advance (args_so_far, arg);
5392 for (int i = 0; i < nargs; i++)
5393 if (reg_parm_stack_space > 0
5394 || argvec[i].reg == 0
5395 || argvec[i].partial != 0)
5396 update_stack_alignment_for_call (&argvec[i].locate);
5398 /* If this machine requires an external definition for library
5399 functions, write one out. */
5400 assemble_external_libcall (fun);
5402 original_args_size = args_size;
5403 args_size.constant = (aligned_upper_bound (args_size.constant
5404 + stack_pointer_delta,
5405 STACK_BYTES)
5406 - stack_pointer_delta);
5408 args_size.constant = upper_bound (args_size.constant,
5409 reg_parm_stack_space);
5411 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
5412 args_size.constant -= reg_parm_stack_space;
5414 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
5415 args_size.constant);
5417 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
5419 poly_int64 pushed = args_size.constant + pending_stack_adjust;
5420 current_function_pushed_stack_size
5421 = upper_bound (current_function_pushed_stack_size, pushed);
5424 if (ACCUMULATE_OUTGOING_ARGS)
5426 /* Since the stack pointer will never be pushed, it is possible for
5427 the evaluation of a parm to clobber something we have already
5428 written to the stack. Since most function calls on RISC machines
5429 do not use the stack, this is uncommon, but must work correctly.
5431 Therefore, we save any area of the stack that was already written
5432 and that we are using. Here we set up to do this by making a new
5433 stack usage map from the old one.
5435 Another approach might be to try to reorder the argument
5436 evaluations to avoid this conflicting stack usage. */
5438 needed = args_size.constant;
5440 /* Since we will be writing into the entire argument area, the
5441 map must be allocated for its entire size, not just the part that
5442 is the responsibility of the caller. */
5443 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
5444 needed += reg_parm_stack_space;
5446 poly_int64 limit = needed;
5447 if (ARGS_GROW_DOWNWARD)
5448 limit += 1;
5450 /* For polynomial sizes, this is the maximum possible size needed
5451 for arguments with a constant size and offset. */
5452 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
5453 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
5454 const_limit);
5456 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
5457 stack_usage_map = stack_usage_map_buf;
5459 if (initial_highest_arg_in_use)
5460 memcpy (stack_usage_map, initial_stack_usage_map,
5461 initial_highest_arg_in_use);
5463 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
5464 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
5465 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
5466 needed = 0;
5468 /* We must be careful to use virtual regs before they're instantiated,
5469 and real regs afterwards. Loop optimization, for example, can create
5470 new libcalls after we've instantiated the virtual regs, and if we
5471 use virtuals anyway, they won't match the rtl patterns. */
5473 if (virtuals_instantiated)
5474 argblock = plus_constant (Pmode, stack_pointer_rtx,
5475 STACK_POINTER_OFFSET);
5476 else
5477 argblock = virtual_outgoing_args_rtx;
5479 else
5481 if (!PUSH_ARGS)
5482 argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
5485 /* We push args individually in reverse order, perform stack alignment
5486 before the first push (the last arg). */
5487 if (argblock == 0)
5488 anti_adjust_stack (gen_int_mode (args_size.constant
5489 - original_args_size.constant,
5490 Pmode));
5492 argnum = nargs - 1;
5494 #ifdef REG_PARM_STACK_SPACE
5495 if (ACCUMULATE_OUTGOING_ARGS)
5497 /* The argument list is the property of the called routine and it
5498 may clobber it. If the fixed area has been used for previous
5499 parameters, we must save and restore it. */
5500 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
5501 &low_to_save, &high_to_save);
5503 #endif
5505 /* When expanding a normal call, args are stored in push order,
5506 which is the reverse of what we have here. */
5507 bool any_regs = false;
5508 for (int i = nargs; i-- > 0; )
5509 if (argvec[i].reg != NULL_RTX)
5511 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
5512 any_regs = true;
5514 if (!any_regs)
5515 targetm.calls.call_args (pc_rtx, NULL_TREE);
5517 /* Push the args that need to be pushed. */
5519 have_push_fusage = false;
5521 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5522 are to be pushed. */
5523 for (count = 0; count < nargs; count++, argnum--)
5525 machine_mode mode = argvec[argnum].mode;
5526 rtx val = argvec[argnum].value;
5527 rtx reg = argvec[argnum].reg;
5528 int partial = argvec[argnum].partial;
5529 unsigned int parm_align = argvec[argnum].locate.boundary;
5530 poly_int64 lower_bound = 0, upper_bound = 0;
5532 if (! (reg != 0 && partial == 0))
5534 rtx use;
5536 if (ACCUMULATE_OUTGOING_ARGS)
5538 /* If this is being stored into a pre-allocated, fixed-size,
5539 stack area, save any previous data at that location. */
5541 if (ARGS_GROW_DOWNWARD)
5543 /* stack_slot is negative, but we want to index stack_usage_map
5544 with positive values. */
5545 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
5546 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
5548 else
5550 lower_bound = argvec[argnum].locate.slot_offset.constant;
5551 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
5554 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5555 reg_parm_stack_space))
5557 /* We need to make a save area. */
5558 poly_uint64 size
5559 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
5560 machine_mode save_mode
5561 = int_mode_for_size (size, 1).else_blk ();
5562 rtx adr
5563 = plus_constant (Pmode, argblock,
5564 argvec[argnum].locate.offset.constant);
5565 rtx stack_area
5566 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
5568 if (save_mode == BLKmode)
5570 argvec[argnum].save_area
5571 = assign_stack_temp (BLKmode,
5572 argvec[argnum].locate.size.constant
5575 emit_block_move (validize_mem
5576 (copy_rtx (argvec[argnum].save_area)),
5577 stack_area,
5578 (gen_int_mode
5579 (argvec[argnum].locate.size.constant,
5580 Pmode)),
5581 BLOCK_OP_CALL_PARM);
5583 else
5585 argvec[argnum].save_area = gen_reg_rtx (save_mode);
5587 emit_move_insn (argvec[argnum].save_area, stack_area);
5592 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
5593 partial, reg, 0, argblock,
5594 (gen_int_mode
5595 (argvec[argnum].locate.offset.constant, Pmode)),
5596 reg_parm_stack_space,
5597 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
5599 /* Now mark the segment we just used. */
5600 if (ACCUMULATE_OUTGOING_ARGS)
5601 mark_stack_region_used (lower_bound, upper_bound);
5603 NO_DEFER_POP;
5605 /* Indicate argument access so that alias.c knows that these
5606 values are live. */
5607 if (argblock)
5608 use = plus_constant (Pmode, argblock,
5609 argvec[argnum].locate.offset.constant);
5610 else if (have_push_fusage)
5611 continue;
5612 else
5614 /* When arguments are pushed, trying to tell alias.c where
5615 exactly this argument is won't work, because the
5616 auto-increment causes confusion. So we merely indicate
5617 that we access something with a known mode somewhere on
5618 the stack. */
5619 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5620 gen_rtx_SCRATCH (Pmode));
5621 have_push_fusage = true;
5623 use = gen_rtx_MEM (argvec[argnum].mode, use);
5624 use = gen_rtx_USE (VOIDmode, use);
5625 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
5629 argnum = nargs - 1;
5631 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
5633 /* Now load any reg parms into their regs. */
5635 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5636 are to be pushed. */
5637 for (count = 0; count < nargs; count++, argnum--)
5639 machine_mode mode = argvec[argnum].mode;
5640 rtx val = argvec[argnum].value;
5641 rtx reg = argvec[argnum].reg;
5642 int partial = argvec[argnum].partial;
5644 /* Handle calls that pass values in multiple non-contiguous
5645 locations. The PA64 has examples of this for library calls. */
5646 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5647 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
5648 else if (reg != 0 && partial == 0)
5650 emit_move_insn (reg, val);
5651 #ifdef BLOCK_REG_PADDING
5652 poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
5654 /* Copied from load_register_parameters. */
5656 /* Handle case where we have a value that needs shifting
5657 up to the msb. eg. a QImode value and we're padding
5658 upward on a BYTES_BIG_ENDIAN machine. */
5659 if (known_lt (size, UNITS_PER_WORD)
5660 && (argvec[argnum].locate.where_pad
5661 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
5663 rtx x;
5664 poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
5666 /* Assigning REG here rather than a temp makes CALL_FUSAGE
5667 report the whole reg as used. Strictly speaking, the
5668 call only uses SIZE bytes at the msb end, but it doesn't
5669 seem worth generating rtl to say that. */
5670 reg = gen_rtx_REG (word_mode, REGNO (reg));
5671 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
5672 if (x != reg)
5673 emit_move_insn (reg, x);
5675 #endif
5678 NO_DEFER_POP;
5681 /* Any regs containing parms remain in use through the call. */
5682 for (count = 0; count < nargs; count++)
5684 rtx reg = argvec[count].reg;
5685 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5686 use_group_regs (&call_fusage, reg);
5687 else if (reg != 0)
5689 int partial = argvec[count].partial;
5690 if (partial)
5692 int nregs;
5693 gcc_assert (partial % UNITS_PER_WORD == 0);
5694 nregs = partial / UNITS_PER_WORD;
5695 use_regs (&call_fusage, REGNO (reg), nregs);
5697 else
5698 use_reg (&call_fusage, reg);
5702 /* Pass the function the address in which to return a structure value. */
5703 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
5705 emit_move_insn (struct_value,
5706 force_reg (Pmode,
5707 force_operand (XEXP (mem_value, 0),
5708 NULL_RTX)));
5709 if (REG_P (struct_value))
5710 use_reg (&call_fusage, struct_value);
5713 /* Don't allow popping to be deferred, since then
5714 cse'ing of library calls could delete a call and leave the pop. */
5715 NO_DEFER_POP;
5716 valreg = (mem_value == 0 && outmode != VOIDmode
5717 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
5719 /* Stack must be properly aligned now. */
5720 gcc_assert (multiple_p (stack_pointer_delta,
5721 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
5723 before_call = get_last_insn ();
5725 if (flag_callgraph_info)
5726 record_final_call (SYMBOL_REF_DECL (orgfun), UNKNOWN_LOCATION);
5728 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
5729 will set inhibit_defer_pop to that value. */
5730 /* The return type is needed to decide how many bytes the function pops.
5731 Signedness plays no role in that, so for simplicity, we pretend it's
5732 always signed. We also assume that the list of arguments passed has
5733 no impact, so we pretend it is unknown. */
5735 emit_call_1 (fun, NULL,
5736 get_identifier (XSTR (orgfun, 0)),
5737 build_function_type (tfom, NULL_TREE),
5738 original_args_size.constant, args_size.constant,
5739 struct_value_size,
5740 targetm.calls.function_arg (args_so_far,
5741 function_arg_info::end_marker ()),
5742 valreg,
5743 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
5745 if (flag_ipa_ra)
5747 rtx datum = orgfun;
5748 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
5749 rtx_call_insn *last = last_call_insn ();
5750 add_reg_note (last, REG_CALL_DECL, datum);
5753 /* Right-shift returned value if necessary. */
5754 if (!pcc_struct_value
5755 && TYPE_MODE (tfom) != BLKmode
5756 && targetm.calls.return_in_msb (tfom))
5758 shift_return_value (TYPE_MODE (tfom), false, valreg);
5759 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
5762 targetm.calls.end_call_args ();
5764 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
5765 that it should complain if nonvolatile values are live. For
5766 functions that cannot return, inform flow that control does not
5767 fall through. */
5768 if (flags & ECF_NORETURN)
5770 /* The barrier note must be emitted
5771 immediately after the CALL_INSN. Some ports emit more than
5772 just a CALL_INSN above, so we must search for it here. */
5773 rtx_insn *last = get_last_insn ();
5774 while (!CALL_P (last))
5776 last = PREV_INSN (last);
5777 /* There was no CALL_INSN? */
5778 gcc_assert (last != before_call);
5781 emit_barrier_after (last);
5784 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
5785 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
5786 if (flags & ECF_NOTHROW)
5788 rtx_insn *last = get_last_insn ();
5789 while (!CALL_P (last))
5791 last = PREV_INSN (last);
5792 /* There was no CALL_INSN? */
5793 gcc_assert (last != before_call);
5796 make_reg_eh_region_note_nothrow_nononlocal (last);
5799 /* Now restore inhibit_defer_pop to its actual original value. */
5800 OK_DEFER_POP;
5802 pop_temp_slots ();
5804 /* Copy the value to the right place. */
5805 if (outmode != VOIDmode && retval)
5807 if (mem_value)
5809 if (value == 0)
5810 value = mem_value;
5811 if (value != mem_value)
5812 emit_move_insn (value, mem_value);
5814 else if (GET_CODE (valreg) == PARALLEL)
5816 if (value == 0)
5817 value = gen_reg_rtx (outmode);
5818 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
5820 else
5822 /* Convert to the proper mode if a promotion has been active. */
5823 if (GET_MODE (valreg) != outmode)
5825 int unsignedp = TYPE_UNSIGNED (tfom);
5827 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
5828 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
5829 == GET_MODE (valreg));
5830 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
5833 if (value != 0)
5834 emit_move_insn (value, valreg);
5835 else
5836 value = valreg;
5840 if (ACCUMULATE_OUTGOING_ARGS)
5842 #ifdef REG_PARM_STACK_SPACE
5843 if (save_area)
5844 restore_fixed_argument_area (save_area, argblock,
5845 high_to_save, low_to_save);
5846 #endif
5848 /* If we saved any argument areas, restore them. */
5849 for (count = 0; count < nargs; count++)
5850 if (argvec[count].save_area)
5852 machine_mode save_mode = GET_MODE (argvec[count].save_area);
5853 rtx adr = plus_constant (Pmode, argblock,
5854 argvec[count].locate.offset.constant);
5855 rtx stack_area = gen_rtx_MEM (save_mode,
5856 memory_address (save_mode, adr));
5858 if (save_mode == BLKmode)
5859 emit_block_move (stack_area,
5860 validize_mem
5861 (copy_rtx (argvec[count].save_area)),
5862 (gen_int_mode
5863 (argvec[count].locate.size.constant, Pmode)),
5864 BLOCK_OP_CALL_PARM);
5865 else
5866 emit_move_insn (stack_area, argvec[count].save_area);
5869 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
5870 stack_usage_map = initial_stack_usage_map;
5871 stack_usage_watermark = initial_stack_usage_watermark;
5874 free (stack_usage_map_buf);
5876 return value;
5881 /* Store a single argument for a function call
5882 into the register or memory area where it must be passed.
5883 *ARG describes the argument value and where to pass it.
5885 ARGBLOCK is the address of the stack-block for all the arguments,
5886 or 0 on a machine where arguments are pushed individually.
5888 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
5889 so must be careful about how the stack is used.
5891 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
5892 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
5893 that we need not worry about saving and restoring the stack.
5895 FNDECL is the declaration of the function we are calling.
5897 Return nonzero if this arg should cause sibcall failure,
5898 zero otherwise. */
5900 static int
5901 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
5902 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
5904 tree pval = arg->tree_value;
5905 rtx reg = 0;
5906 int partial = 0;
5907 poly_int64 used = 0;
5908 poly_int64 lower_bound = 0, upper_bound = 0;
5909 int sibcall_failure = 0;
5911 if (TREE_CODE (pval) == ERROR_MARK)
5912 return 1;
5914 /* Push a new temporary level for any temporaries we make for
5915 this argument. */
5916 push_temp_slots ();
5918 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
5920 /* If this is being stored into a pre-allocated, fixed-size, stack area,
5921 save any previous data at that location. */
5922 if (argblock && ! variable_size && arg->stack)
5924 if (ARGS_GROW_DOWNWARD)
5926 /* stack_slot is negative, but we want to index stack_usage_map
5927 with positive values. */
5928 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5930 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5931 upper_bound = -rtx_to_poly_int64 (offset) + 1;
5933 else
5934 upper_bound = 0;
5936 lower_bound = upper_bound - arg->locate.size.constant;
5938 else
5940 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5942 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5943 lower_bound = rtx_to_poly_int64 (offset);
5945 else
5946 lower_bound = 0;
5948 upper_bound = lower_bound + arg->locate.size.constant;
5951 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5952 reg_parm_stack_space))
5954 /* We need to make a save area. */
5955 poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT;
5956 machine_mode save_mode
5957 = int_mode_for_size (size, 1).else_blk ();
5958 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
5959 rtx stack_area = gen_rtx_MEM (save_mode, adr);
5961 if (save_mode == BLKmode)
5963 arg->save_area
5964 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
5965 preserve_temp_slots (arg->save_area);
5966 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
5967 stack_area,
5968 (gen_int_mode
5969 (arg->locate.size.constant, Pmode)),
5970 BLOCK_OP_CALL_PARM);
5972 else
5974 arg->save_area = gen_reg_rtx (save_mode);
5975 emit_move_insn (arg->save_area, stack_area);
5981 /* If this isn't going to be placed on both the stack and in registers,
5982 set up the register and number of words. */
5983 if (! arg->pass_on_stack)
5985 if (flags & ECF_SIBCALL)
5986 reg = arg->tail_call_reg;
5987 else
5988 reg = arg->reg;
5989 partial = arg->partial;
5992 /* Being passed entirely in a register. We shouldn't be called in
5993 this case. */
5994 gcc_assert (reg == 0 || partial != 0);
5996 /* If this arg needs special alignment, don't load the registers
5997 here. */
5998 if (arg->n_aligned_regs != 0)
5999 reg = 0;
6001 /* If this is being passed partially in a register, we can't evaluate
6002 it directly into its stack slot. Otherwise, we can. */
6003 if (arg->value == 0)
6005 /* stack_arg_under_construction is nonzero if a function argument is
6006 being evaluated directly into the outgoing argument list and
6007 expand_call must take special action to preserve the argument list
6008 if it is called recursively.
6010 For scalar function arguments stack_usage_map is sufficient to
6011 determine which stack slots must be saved and restored. Scalar
6012 arguments in general have pass_on_stack == 0.
6014 If this argument is initialized by a function which takes the
6015 address of the argument (a C++ constructor or a C function
6016 returning a BLKmode structure), then stack_usage_map is
6017 insufficient and expand_call must push the stack around the
6018 function call. Such arguments have pass_on_stack == 1.
6020 Note that it is always safe to set stack_arg_under_construction,
6021 but this generates suboptimal code if set when not needed. */
6023 if (arg->pass_on_stack)
6024 stack_arg_under_construction++;
6026 arg->value = expand_expr (pval,
6027 (partial
6028 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
6029 ? NULL_RTX : arg->stack,
6030 VOIDmode, EXPAND_STACK_PARM);
6032 /* If we are promoting object (or for any other reason) the mode
6033 doesn't agree, convert the mode. */
6035 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
6036 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
6037 arg->value, arg->unsignedp);
6039 if (arg->pass_on_stack)
6040 stack_arg_under_construction--;
6043 /* Check for overlap with already clobbered argument area. */
6044 if ((flags & ECF_SIBCALL)
6045 && MEM_P (arg->value)
6046 && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0),
6047 arg->locate.size.constant))
6048 sibcall_failure = 1;
6050 /* Don't allow anything left on stack from computation
6051 of argument to alloca. */
6052 if (flags & ECF_MAY_BE_ALLOCA)
6053 do_pending_stack_adjust ();
6055 if (arg->value == arg->stack)
6056 /* If the value is already in the stack slot, we are done. */
6058 else if (arg->mode != BLKmode)
6060 unsigned int parm_align;
6062 /* Argument is a scalar, not entirely passed in registers.
6063 (If part is passed in registers, arg->partial says how much
6064 and emit_push_insn will take care of putting it there.)
6066 Push it, and if its size is less than the
6067 amount of space allocated to it,
6068 also bump stack pointer by the additional space.
6069 Note that in C the default argument promotions
6070 will prevent such mismatches. */
6072 poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval))
6073 ? 0 : GET_MODE_SIZE (arg->mode));
6075 /* Compute how much space the push instruction will push.
6076 On many machines, pushing a byte will advance the stack
6077 pointer by a halfword. */
6078 #ifdef PUSH_ROUNDING
6079 size = PUSH_ROUNDING (size);
6080 #endif
6081 used = size;
6083 /* Compute how much space the argument should get:
6084 round up to a multiple of the alignment for arguments. */
6085 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6086 != PAD_NONE)
6087 /* At the moment we don't (need to) support ABIs for which the
6088 padding isn't known at compile time. In principle it should
6089 be easy to add though. */
6090 used = force_align_up (size, PARM_BOUNDARY / BITS_PER_UNIT);
6092 /* Compute the alignment of the pushed argument. */
6093 parm_align = arg->locate.boundary;
6094 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6095 == PAD_DOWNWARD)
6097 poly_int64 pad = used - size;
6098 unsigned int pad_align = known_alignment (pad) * BITS_PER_UNIT;
6099 if (pad_align != 0)
6100 parm_align = MIN (parm_align, pad_align);
6103 /* This isn't already where we want it on the stack, so put it there.
6104 This can either be done with push or copy insns. */
6105 if (maybe_ne (used, 0)
6106 && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
6107 NULL_RTX, parm_align, partial, reg, used - size,
6108 argblock, ARGS_SIZE_RTX (arg->locate.offset),
6109 reg_parm_stack_space,
6110 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
6111 sibcall_failure = 1;
6113 /* Unless this is a partially-in-register argument, the argument is now
6114 in the stack. */
6115 if (partial == 0)
6116 arg->value = arg->stack;
6118 else
6120 /* BLKmode, at least partly to be pushed. */
6122 unsigned int parm_align;
6123 poly_int64 excess;
6124 rtx size_rtx;
6126 /* Pushing a nonscalar.
6127 If part is passed in registers, PARTIAL says how much
6128 and emit_push_insn will take care of putting it there. */
6130 /* Round its size up to a multiple
6131 of the allocation unit for arguments. */
6133 if (arg->locate.size.var != 0)
6135 excess = 0;
6136 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
6138 else
6140 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
6141 for BLKmode is careful to avoid it. */
6142 excess = (arg->locate.size.constant
6143 - arg_int_size_in_bytes (TREE_TYPE (pval))
6144 + partial);
6145 size_rtx = expand_expr (arg_size_in_bytes (TREE_TYPE (pval)),
6146 NULL_RTX, TYPE_MODE (sizetype),
6147 EXPAND_NORMAL);
6150 parm_align = arg->locate.boundary;
6152 /* When an argument is padded down, the block is aligned to
6153 PARM_BOUNDARY, but the actual argument isn't. */
6154 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6155 == PAD_DOWNWARD)
6157 if (arg->locate.size.var)
6158 parm_align = BITS_PER_UNIT;
6159 else
6161 unsigned int excess_align
6162 = known_alignment (excess) * BITS_PER_UNIT;
6163 if (excess_align != 0)
6164 parm_align = MIN (parm_align, excess_align);
6168 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
6170 /* emit_push_insn might not work properly if arg->value and
6171 argblock + arg->locate.offset areas overlap. */
6172 rtx x = arg->value;
6173 poly_int64 i = 0;
6175 if (strip_offset (XEXP (x, 0), &i)
6176 == crtl->args.internal_arg_pointer)
6178 /* arg.locate doesn't contain the pretend_args_size offset,
6179 it's part of argblock. Ensure we don't count it in I. */
6180 if (STACK_GROWS_DOWNWARD)
6181 i -= crtl->args.pretend_args_size;
6182 else
6183 i += crtl->args.pretend_args_size;
6185 /* expand_call should ensure this. */
6186 gcc_assert (!arg->locate.offset.var
6187 && arg->locate.size.var == 0);
6188 poly_int64 size_val = rtx_to_poly_int64 (size_rtx);
6190 if (known_eq (arg->locate.offset.constant, i))
6192 /* Even though they appear to be at the same location,
6193 if part of the outgoing argument is in registers,
6194 they aren't really at the same location. Check for
6195 this by making sure that the incoming size is the
6196 same as the outgoing size. */
6197 if (maybe_ne (arg->locate.size.constant, size_val))
6198 sibcall_failure = 1;
6200 else if (maybe_in_range_p (arg->locate.offset.constant,
6201 i, size_val))
6202 sibcall_failure = 1;
6203 /* Use arg->locate.size.constant instead of size_rtx
6204 because we only care about the part of the argument
6205 on the stack. */
6206 else if (maybe_in_range_p (i, arg->locate.offset.constant,
6207 arg->locate.size.constant))
6208 sibcall_failure = 1;
6212 if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
6213 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
6214 parm_align, partial, reg, excess, argblock,
6215 ARGS_SIZE_RTX (arg->locate.offset),
6216 reg_parm_stack_space,
6217 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
6219 /* Unless this is a partially-in-register argument, the argument is now
6220 in the stack.
6222 ??? Unlike the case above, in which we want the actual
6223 address of the data, so that we can load it directly into a
6224 register, here we want the address of the stack slot, so that
6225 it's properly aligned for word-by-word copying or something
6226 like that. It's not clear that this is always correct. */
6227 if (partial == 0)
6228 arg->value = arg->stack_slot;
6231 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
6233 tree type = TREE_TYPE (arg->tree_value);
6234 arg->parallel_value
6235 = emit_group_load_into_temps (arg->reg, arg->value, type,
6236 int_size_in_bytes (type));
6239 /* Mark all slots this store used. */
6240 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
6241 && argblock && ! variable_size && arg->stack)
6242 mark_stack_region_used (lower_bound, upper_bound);
6244 /* Once we have pushed something, pops can't safely
6245 be deferred during the rest of the arguments. */
6246 NO_DEFER_POP;
6248 /* Free any temporary slots made in processing this argument. */
6249 pop_temp_slots ();
6251 return sibcall_failure;
6254 /* Nonzero if we do not know how to pass ARG solely in registers. */
6256 bool
6257 must_pass_in_stack_var_size (const function_arg_info &arg)
6259 if (!arg.type)
6260 return false;
6262 /* If the type has variable size... */
6263 if (!poly_int_tree_p (TYPE_SIZE (arg.type)))
6264 return true;
6266 /* If the type is marked as addressable (it is required
6267 to be constructed into the stack)... */
6268 if (TREE_ADDRESSABLE (arg.type))
6269 return true;
6271 return false;
6274 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
6275 takes trailing padding of a structure into account. */
6276 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
6278 bool
6279 must_pass_in_stack_var_size_or_pad (const function_arg_info &arg)
6281 if (!arg.type)
6282 return false;
6284 /* If the type has variable size... */
6285 if (TREE_CODE (TYPE_SIZE (arg.type)) != INTEGER_CST)
6286 return true;
6288 /* If the type is marked as addressable (it is required
6289 to be constructed into the stack)... */
6290 if (TREE_ADDRESSABLE (arg.type))
6291 return true;
6293 if (TYPE_EMPTY_P (arg.type))
6294 return false;
6296 /* If the padding and mode of the type is such that a copy into
6297 a register would put it into the wrong part of the register. */
6298 if (arg.mode == BLKmode
6299 && int_size_in_bytes (arg.type) % (PARM_BOUNDARY / BITS_PER_UNIT)
6300 && (targetm.calls.function_arg_padding (arg.mode, arg.type)
6301 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
6302 return true;
6304 return false;
6307 /* Return true if TYPE must be passed on the stack when passed to
6308 the "..." arguments of a function. */
6310 bool
6311 must_pass_va_arg_in_stack (tree type)
6313 function_arg_info arg (type, /*named=*/false);
6314 return targetm.calls.must_pass_in_stack (arg);
6317 /* Return true if FIELD is the C++17 empty base field that should
6318 be ignored for ABI calling convention decisions in order to
6319 maintain ABI compatibility between C++14 and earlier, which doesn't
6320 add this FIELD to classes with empty bases, and C++17 and later
6321 which does. */
6323 bool
6324 cxx17_empty_base_field_p (const_tree field)
6326 return (DECL_FIELD_ABI_IGNORED (field)
6327 && DECL_ARTIFICIAL (field)
6328 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))
6329 && !lookup_attribute ("no_unique_address", DECL_ATTRIBUTES (field)));
6332 /* Tell the garbage collector about GTY markers in this source file. */
6333 #include "gt-calls.h"