testsuite: localclass2 require LTO
[official-gcc.git] / gcc / calls.c
bloba93d4bf078743e0f2b4618fe4a60942773a4dfd7
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #define INCLUDE_STRING
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "stringpool.h"
33 #include "expmed.h"
34 #include "optabs.h"
35 #include "emit-rtl.h"
36 #include "cgraph.h"
37 #include "diagnostic-core.h"
38 #include "fold-const.h"
39 #include "stor-layout.h"
40 #include "varasm.h"
41 #include "internal-fn.h"
42 #include "dojump.h"
43 #include "explow.h"
44 #include "calls.h"
45 #include "expr.h"
46 #include "output.h"
47 #include "langhooks.h"
48 #include "except.h"
49 #include "dbgcnt.h"
50 #include "rtl-iter.h"
51 #include "tree-vrp.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-strlen.h"
54 #include "intl.h"
55 #include "stringpool.h"
56 #include "hash-map.h"
57 #include "hash-traits.h"
58 #include "attribs.h"
59 #include "builtins.h"
60 #include "gimple-fold.h"
61 #include "attr-fnspec.h"
62 #include "value-query.h"
64 #include "tree-pretty-print.h"
66 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
67 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
69 /* Data structure and subroutines used within expand_call. */
71 struct arg_data
73 /* Tree node for this argument. */
74 tree tree_value;
75 /* Mode for value; TYPE_MODE unless promoted. */
76 machine_mode mode;
77 /* Current RTL value for argument, or 0 if it isn't precomputed. */
78 rtx value;
79 /* Initially-compute RTL value for argument; only for const functions. */
80 rtx initial_value;
81 /* Register to pass this argument in, 0 if passed on stack, or an
82 PARALLEL if the arg is to be copied into multiple non-contiguous
83 registers. */
84 rtx reg;
85 /* Register to pass this argument in when generating tail call sequence.
86 This is not the same register as for normal calls on machines with
87 register windows. */
88 rtx tail_call_reg;
89 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
90 form for emit_group_move. */
91 rtx parallel_value;
92 /* If REG was promoted from the actual mode of the argument expression,
93 indicates whether the promotion is sign- or zero-extended. */
94 int unsignedp;
95 /* Number of bytes to put in registers. 0 means put the whole arg
96 in registers. Also 0 if not passed in registers. */
97 int partial;
98 /* Nonzero if argument must be passed on stack.
99 Note that some arguments may be passed on the stack
100 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
101 pass_on_stack identifies arguments that *cannot* go in registers. */
102 int pass_on_stack;
103 /* Some fields packaged up for locate_and_pad_parm. */
104 struct locate_and_pad_arg_data locate;
105 /* Location on the stack at which parameter should be stored. The store
106 has already been done if STACK == VALUE. */
107 rtx stack;
108 /* Location on the stack of the start of this argument slot. This can
109 differ from STACK if this arg pads downward. This location is known
110 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
111 rtx stack_slot;
112 /* Place that this stack area has been saved, if needed. */
113 rtx save_area;
114 /* If an argument's alignment does not permit direct copying into registers,
115 copy in smaller-sized pieces into pseudos. These are stored in a
116 block pointed to by this field. The next field says how many
117 word-sized pseudos we made. */
118 rtx *aligned_regs;
119 int n_aligned_regs;
122 /* A vector of one char per byte of stack space. A byte if nonzero if
123 the corresponding stack location has been used.
124 This vector is used to prevent a function call within an argument from
125 clobbering any stack already set up. */
126 static char *stack_usage_map;
128 /* Size of STACK_USAGE_MAP. */
129 static unsigned int highest_outgoing_arg_in_use;
131 /* Assume that any stack location at this byte index is used,
132 without checking the contents of stack_usage_map. */
133 static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U;
135 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
136 stack location's tail call argument has been already stored into the stack.
137 This bitmap is used to prevent sibling call optimization if function tries
138 to use parent's incoming argument slots when they have been already
139 overwritten with tail call arguments. */
140 static sbitmap stored_args_map;
142 /* Assume that any virtual-incoming location at this byte index has been
143 stored, without checking the contents of stored_args_map. */
144 static unsigned HOST_WIDE_INT stored_args_watermark;
146 /* stack_arg_under_construction is nonzero when an argument may be
147 initialized with a constructor call (including a C function that
148 returns a BLKmode struct) and expand_call must take special action
149 to make sure the object being constructed does not overlap the
150 argument list for the constructor call. */
151 static int stack_arg_under_construction;
153 static void precompute_register_parameters (int, struct arg_data *, int *);
154 static int store_one_arg (struct arg_data *, rtx, int, int, int);
155 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
156 static int finalize_must_preallocate (int, int, struct arg_data *,
157 struct args_size *);
158 static void precompute_arguments (int, struct arg_data *);
159 static void compute_argument_addresses (struct arg_data *, rtx, int);
160 static rtx rtx_for_function_call (tree, tree);
161 static void load_register_parameters (struct arg_data *, int, rtx *, int,
162 int, int *);
163 static int special_function_p (const_tree, int);
164 static int check_sibcall_argument_overlap_1 (rtx);
165 static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
167 static tree split_complex_types (tree);
169 #ifdef REG_PARM_STACK_SPACE
170 static rtx save_fixed_argument_area (int, rtx, int *, int *);
171 static void restore_fixed_argument_area (rtx, rtx, int, int);
172 #endif
174 /* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
175 stack region might already be in use. */
177 static bool
178 stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound,
179 unsigned int reg_parm_stack_space)
181 unsigned HOST_WIDE_INT const_lower, const_upper;
182 const_lower = constant_lower_bound (lower_bound);
183 if (!upper_bound.is_constant (&const_upper))
184 const_upper = HOST_WIDE_INT_M1U;
186 if (const_upper > stack_usage_watermark)
187 return true;
189 /* Don't worry about things in the fixed argument area;
190 it has already been saved. */
191 const_lower = MAX (const_lower, reg_parm_stack_space);
192 const_upper = MIN (const_upper, highest_outgoing_arg_in_use);
193 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
194 if (stack_usage_map[i])
195 return true;
196 return false;
199 /* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
200 stack region are now in use. */
202 static void
203 mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound)
205 unsigned HOST_WIDE_INT const_lower, const_upper;
206 const_lower = constant_lower_bound (lower_bound);
207 if (upper_bound.is_constant (&const_upper))
208 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
209 stack_usage_map[i] = 1;
210 else
211 stack_usage_watermark = MIN (stack_usage_watermark, const_lower);
214 /* Force FUNEXP into a form suitable for the address of a CALL,
215 and return that as an rtx. Also load the static chain register
216 if FNDECL is a nested function.
218 CALL_FUSAGE points to a variable holding the prospective
219 CALL_INSN_FUNCTION_USAGE information. */
222 prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
223 rtx *call_fusage, int reg_parm_seen, int flags)
225 /* Make a valid memory address and copy constants through pseudo-regs,
226 but not for a constant address if -fno-function-cse. */
227 if (GET_CODE (funexp) != SYMBOL_REF)
229 /* If it's an indirect call by descriptor, generate code to perform
230 runtime identification of the pointer and load the descriptor. */
231 if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
233 const int bit_val = targetm.calls.custom_function_descriptors;
234 rtx call_lab = gen_label_rtx ();
236 gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
237 fndecl_or_type
238 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
239 fndecl_or_type);
240 DECL_STATIC_CHAIN (fndecl_or_type) = 1;
241 rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
243 if (GET_MODE (funexp) != Pmode)
244 funexp = convert_memory_address (Pmode, funexp);
246 /* Avoid long live ranges around function calls. */
247 funexp = copy_to_mode_reg (Pmode, funexp);
249 if (REG_P (chain))
250 emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
252 /* Emit the runtime identification pattern. */
253 rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
254 emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
255 call_lab);
257 /* Statically predict the branch to very likely taken. */
258 rtx_insn *insn = get_last_insn ();
259 if (JUMP_P (insn))
260 predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
262 /* Load the descriptor. */
263 rtx mem = gen_rtx_MEM (ptr_mode,
264 plus_constant (Pmode, funexp, - bit_val));
265 MEM_NOTRAP_P (mem) = 1;
266 mem = convert_memory_address (Pmode, mem);
267 emit_move_insn (chain, mem);
269 mem = gen_rtx_MEM (ptr_mode,
270 plus_constant (Pmode, funexp,
271 POINTER_SIZE / BITS_PER_UNIT
272 - bit_val));
273 MEM_NOTRAP_P (mem) = 1;
274 mem = convert_memory_address (Pmode, mem);
275 emit_move_insn (funexp, mem);
277 emit_label (call_lab);
279 if (REG_P (chain))
281 use_reg (call_fusage, chain);
282 STATIC_CHAIN_REG_P (chain) = 1;
285 /* Make sure we're not going to be overwritten below. */
286 gcc_assert (!static_chain_value);
289 /* If we are using registers for parameters, force the
290 function address into a register now. */
291 funexp = ((reg_parm_seen
292 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
293 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
294 : memory_address (FUNCTION_MODE, funexp));
296 else
298 /* funexp could be a SYMBOL_REF represents a function pointer which is
299 of ptr_mode. In this case, it should be converted into address mode
300 to be a valid address for memory rtx pattern. See PR 64971. */
301 if (GET_MODE (funexp) != Pmode)
302 funexp = convert_memory_address (Pmode, funexp);
304 if (!(flags & ECF_SIBCALL))
306 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
307 funexp = force_reg (Pmode, funexp);
311 if (static_chain_value != 0
312 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
313 || DECL_STATIC_CHAIN (fndecl_or_type)))
315 rtx chain;
317 chain = targetm.calls.static_chain (fndecl_or_type, false);
318 static_chain_value = convert_memory_address (Pmode, static_chain_value);
320 emit_move_insn (chain, static_chain_value);
321 if (REG_P (chain))
323 use_reg (call_fusage, chain);
324 STATIC_CHAIN_REG_P (chain) = 1;
328 return funexp;
331 /* Generate instructions to call function FUNEXP,
332 and optionally pop the results.
333 The CALL_INSN is the first insn generated.
335 FNDECL is the declaration node of the function. This is given to the
336 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
337 its own args.
339 FUNTYPE is the data type of the function. This is given to the hook
340 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
341 own args. We used to allow an identifier for library functions, but
342 that doesn't work when the return type is an aggregate type and the
343 calling convention says that the pointer to this aggregate is to be
344 popped by the callee.
346 STACK_SIZE is the number of bytes of arguments on the stack,
347 ROUNDED_STACK_SIZE is that number rounded up to
348 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
349 both to put into the call insn and to generate explicit popping
350 code if necessary.
352 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
353 It is zero if this call doesn't want a structure value.
355 NEXT_ARG_REG is the rtx that results from executing
356 targetm.calls.function_arg (&args_so_far,
357 function_arg_info::end_marker ());
358 just after all the args have had their registers assigned.
359 This could be whatever you like, but normally it is the first
360 arg-register beyond those used for args in this call,
361 or 0 if all the arg-registers are used in this call.
362 It is passed on to `gen_call' so you can put this info in the call insn.
364 VALREG is a hard register in which a value is returned,
365 or 0 if the call does not return a value.
367 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
368 the args to this call were processed.
369 We restore `inhibit_defer_pop' to that value.
371 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
372 denote registers used by the called function. */
374 static void
375 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
376 tree funtype ATTRIBUTE_UNUSED,
377 poly_int64 stack_size ATTRIBUTE_UNUSED,
378 poly_int64 rounded_stack_size,
379 poly_int64 struct_value_size ATTRIBUTE_UNUSED,
380 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
381 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
382 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
384 rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
385 rtx call, funmem, pat;
386 int already_popped = 0;
387 poly_int64 n_popped = 0;
389 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
390 patterns exist). Any popping that the callee does on return will
391 be from our caller's frame rather than ours. */
392 if (!(ecf_flags & ECF_SIBCALL))
394 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
396 #ifdef CALL_POPS_ARGS
397 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
398 #endif
401 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
402 and we don't want to load it into a register as an optimization,
403 because prepare_call_address already did it if it should be done. */
404 if (GET_CODE (funexp) != SYMBOL_REF)
405 funexp = memory_address (FUNCTION_MODE, funexp);
407 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
408 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
410 tree t = fndecl;
412 /* Although a built-in FUNCTION_DECL and its non-__builtin
413 counterpart compare equal and get a shared mem_attrs, they
414 produce different dump output in compare-debug compilations,
415 if an entry gets garbage collected in one compilation, then
416 adds a different (but equivalent) entry, while the other
417 doesn't run the garbage collector at the same spot and then
418 shares the mem_attr with the equivalent entry. */
419 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
421 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
422 if (t2)
423 t = t2;
426 set_mem_expr (funmem, t);
428 else if (fntree)
429 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
431 if (ecf_flags & ECF_SIBCALL)
433 if (valreg)
434 pat = targetm.gen_sibcall_value (valreg, funmem,
435 rounded_stack_size_rtx,
436 next_arg_reg, NULL_RTX);
437 else
438 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
439 next_arg_reg,
440 gen_int_mode (struct_value_size, Pmode));
442 /* If the target has "call" or "call_value" insns, then prefer them
443 if no arguments are actually popped. If the target does not have
444 "call" or "call_value" insns, then we must use the popping versions
445 even if the call has no arguments to pop. */
446 else if (maybe_ne (n_popped, 0)
447 || !(valreg
448 ? targetm.have_call_value ()
449 : targetm.have_call ()))
451 rtx n_pop = gen_int_mode (n_popped, Pmode);
453 /* If this subroutine pops its own args, record that in the call insn
454 if possible, for the sake of frame pointer elimination. */
456 if (valreg)
457 pat = targetm.gen_call_value_pop (valreg, funmem,
458 rounded_stack_size_rtx,
459 next_arg_reg, n_pop);
460 else
461 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
462 next_arg_reg, n_pop);
464 already_popped = 1;
466 else
468 if (valreg)
469 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
470 next_arg_reg, NULL_RTX);
471 else
472 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
473 gen_int_mode (struct_value_size, Pmode));
475 emit_insn (pat);
477 /* Find the call we just emitted. */
478 rtx_call_insn *call_insn = last_call_insn ();
480 /* Some target create a fresh MEM instead of reusing the one provided
481 above. Set its MEM_EXPR. */
482 call = get_call_rtx_from (call_insn);
483 if (call
484 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
485 && MEM_EXPR (funmem) != NULL_TREE)
486 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
488 /* Put the register usage information there. */
489 add_function_usage_to (call_insn, call_fusage);
491 /* If this is a const call, then set the insn's unchanging bit. */
492 if (ecf_flags & ECF_CONST)
493 RTL_CONST_CALL_P (call_insn) = 1;
495 /* If this is a pure call, then set the insn's unchanging bit. */
496 if (ecf_flags & ECF_PURE)
497 RTL_PURE_CALL_P (call_insn) = 1;
499 /* If this is a const call, then set the insn's unchanging bit. */
500 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
501 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
503 /* Create a nothrow REG_EH_REGION note, if needed. */
504 make_reg_eh_region_note (call_insn, ecf_flags, 0);
506 if (ecf_flags & ECF_NORETURN)
507 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
509 if (ecf_flags & ECF_RETURNS_TWICE)
511 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
512 cfun->calls_setjmp = 1;
515 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
517 /* Restore this now, so that we do defer pops for this call's args
518 if the context of the call as a whole permits. */
519 inhibit_defer_pop = old_inhibit_defer_pop;
521 if (maybe_ne (n_popped, 0))
523 if (!already_popped)
524 CALL_INSN_FUNCTION_USAGE (call_insn)
525 = gen_rtx_EXPR_LIST (VOIDmode,
526 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
527 CALL_INSN_FUNCTION_USAGE (call_insn));
528 rounded_stack_size -= n_popped;
529 rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
530 stack_pointer_delta -= n_popped;
532 add_args_size_note (call_insn, stack_pointer_delta);
534 /* If popup is needed, stack realign must use DRAP */
535 if (SUPPORTS_STACK_ALIGNMENT)
536 crtl->need_drap = true;
538 /* For noreturn calls when not accumulating outgoing args force
539 REG_ARGS_SIZE note to prevent crossjumping of calls with different
540 args sizes. */
541 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
542 add_args_size_note (call_insn, stack_pointer_delta);
544 if (!ACCUMULATE_OUTGOING_ARGS)
546 /* If returning from the subroutine does not automatically pop the args,
547 we need an instruction to pop them sooner or later.
548 Perhaps do it now; perhaps just record how much space to pop later.
550 If returning from the subroutine does pop the args, indicate that the
551 stack pointer will be changed. */
553 if (maybe_ne (rounded_stack_size, 0))
555 if (ecf_flags & ECF_NORETURN)
556 /* Just pretend we did the pop. */
557 stack_pointer_delta -= rounded_stack_size;
558 else if (flag_defer_pop && inhibit_defer_pop == 0
559 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
560 pending_stack_adjust += rounded_stack_size;
561 else
562 adjust_stack (rounded_stack_size_rtx);
565 /* When we accumulate outgoing args, we must avoid any stack manipulations.
566 Restore the stack pointer to its original value now. Usually
567 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
568 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
569 popping variants of functions exist as well.
571 ??? We may optimize similar to defer_pop above, but it is
572 probably not worthwhile.
574 ??? It will be worthwhile to enable combine_stack_adjustments even for
575 such machines. */
576 else if (maybe_ne (n_popped, 0))
577 anti_adjust_stack (gen_int_mode (n_popped, Pmode));
580 /* Determine if the function identified by FNDECL is one with
581 special properties we wish to know about. Modify FLAGS accordingly.
583 For example, if the function might return more than one time (setjmp), then
584 set ECF_RETURNS_TWICE.
586 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
587 space from the stack such as alloca. */
589 static int
590 special_function_p (const_tree fndecl, int flags)
592 tree name_decl = DECL_NAME (fndecl);
594 if (maybe_special_function_p (fndecl)
595 && IDENTIFIER_LENGTH (name_decl) <= 11)
597 const char *name = IDENTIFIER_POINTER (name_decl);
598 const char *tname = name;
600 /* We assume that alloca will always be called by name. It
601 makes no sense to pass it as a pointer-to-function to
602 anything that does not understand its behavior. */
603 if (IDENTIFIER_LENGTH (name_decl) == 6
604 && name[0] == 'a'
605 && ! strcmp (name, "alloca"))
606 flags |= ECF_MAY_BE_ALLOCA;
608 /* Disregard prefix _ or __. */
609 if (name[0] == '_')
611 if (name[1] == '_')
612 tname += 2;
613 else
614 tname += 1;
617 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
618 if (! strcmp (tname, "setjmp")
619 || ! strcmp (tname, "sigsetjmp")
620 || ! strcmp (name, "savectx")
621 || ! strcmp (name, "vfork")
622 || ! strcmp (name, "getcontext"))
623 flags |= ECF_RETURNS_TWICE;
626 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
627 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
628 flags |= ECF_MAY_BE_ALLOCA;
630 return flags;
633 /* Return fnspec for DECL. */
635 static attr_fnspec
636 decl_fnspec (tree fndecl)
638 tree attr;
639 tree type = TREE_TYPE (fndecl);
640 if (type)
642 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
643 if (attr)
645 return TREE_VALUE (TREE_VALUE (attr));
648 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
649 return builtin_fnspec (fndecl);
650 return "";
653 /* Similar to special_function_p; return a set of ERF_ flags for the
654 function FNDECL. */
655 static int
656 decl_return_flags (tree fndecl)
658 attr_fnspec fnspec = decl_fnspec (fndecl);
660 unsigned int arg;
661 if (fnspec.returns_arg (&arg))
662 return ERF_RETURNS_ARG | arg;
664 if (fnspec.returns_noalias_p ())
665 return ERF_NOALIAS;
666 return 0;
669 /* Return nonzero when FNDECL represents a call to setjmp. */
672 setjmp_call_p (const_tree fndecl)
674 if (DECL_IS_RETURNS_TWICE (fndecl))
675 return ECF_RETURNS_TWICE;
676 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
680 /* Return true if STMT may be an alloca call. */
682 bool
683 gimple_maybe_alloca_call_p (const gimple *stmt)
685 tree fndecl;
687 if (!is_gimple_call (stmt))
688 return false;
690 fndecl = gimple_call_fndecl (stmt);
691 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
692 return true;
694 return false;
697 /* Return true if STMT is a builtin alloca call. */
699 bool
700 gimple_alloca_call_p (const gimple *stmt)
702 tree fndecl;
704 if (!is_gimple_call (stmt))
705 return false;
707 fndecl = gimple_call_fndecl (stmt);
708 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
709 switch (DECL_FUNCTION_CODE (fndecl))
711 CASE_BUILT_IN_ALLOCA:
712 return gimple_call_num_args (stmt) > 0;
713 default:
714 break;
717 return false;
720 /* Return true when exp contains a builtin alloca call. */
722 bool
723 alloca_call_p (const_tree exp)
725 tree fndecl;
726 if (TREE_CODE (exp) == CALL_EXPR
727 && (fndecl = get_callee_fndecl (exp))
728 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
729 switch (DECL_FUNCTION_CODE (fndecl))
731 CASE_BUILT_IN_ALLOCA:
732 return true;
733 default:
734 break;
737 return false;
740 /* Return TRUE if FNDECL is either a TM builtin or a TM cloned
741 function. Return FALSE otherwise. */
743 static bool
744 is_tm_builtin (const_tree fndecl)
746 if (fndecl == NULL)
747 return false;
749 if (decl_is_tm_clone (fndecl))
750 return true;
752 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
754 switch (DECL_FUNCTION_CODE (fndecl))
756 case BUILT_IN_TM_COMMIT:
757 case BUILT_IN_TM_COMMIT_EH:
758 case BUILT_IN_TM_ABORT:
759 case BUILT_IN_TM_IRREVOCABLE:
760 case BUILT_IN_TM_GETTMCLONE_IRR:
761 case BUILT_IN_TM_MEMCPY:
762 case BUILT_IN_TM_MEMMOVE:
763 case BUILT_IN_TM_MEMSET:
764 CASE_BUILT_IN_TM_STORE (1):
765 CASE_BUILT_IN_TM_STORE (2):
766 CASE_BUILT_IN_TM_STORE (4):
767 CASE_BUILT_IN_TM_STORE (8):
768 CASE_BUILT_IN_TM_STORE (FLOAT):
769 CASE_BUILT_IN_TM_STORE (DOUBLE):
770 CASE_BUILT_IN_TM_STORE (LDOUBLE):
771 CASE_BUILT_IN_TM_STORE (M64):
772 CASE_BUILT_IN_TM_STORE (M128):
773 CASE_BUILT_IN_TM_STORE (M256):
774 CASE_BUILT_IN_TM_LOAD (1):
775 CASE_BUILT_IN_TM_LOAD (2):
776 CASE_BUILT_IN_TM_LOAD (4):
777 CASE_BUILT_IN_TM_LOAD (8):
778 CASE_BUILT_IN_TM_LOAD (FLOAT):
779 CASE_BUILT_IN_TM_LOAD (DOUBLE):
780 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
781 CASE_BUILT_IN_TM_LOAD (M64):
782 CASE_BUILT_IN_TM_LOAD (M128):
783 CASE_BUILT_IN_TM_LOAD (M256):
784 case BUILT_IN_TM_LOG:
785 case BUILT_IN_TM_LOG_1:
786 case BUILT_IN_TM_LOG_2:
787 case BUILT_IN_TM_LOG_4:
788 case BUILT_IN_TM_LOG_8:
789 case BUILT_IN_TM_LOG_FLOAT:
790 case BUILT_IN_TM_LOG_DOUBLE:
791 case BUILT_IN_TM_LOG_LDOUBLE:
792 case BUILT_IN_TM_LOG_M64:
793 case BUILT_IN_TM_LOG_M128:
794 case BUILT_IN_TM_LOG_M256:
795 return true;
796 default:
797 break;
800 return false;
803 /* Detect flags (function attributes) from the function decl or type node. */
806 flags_from_decl_or_type (const_tree exp)
808 int flags = 0;
810 if (DECL_P (exp))
812 /* The function exp may have the `malloc' attribute. */
813 if (DECL_IS_MALLOC (exp))
814 flags |= ECF_MALLOC;
816 /* The function exp may have the `returns_twice' attribute. */
817 if (DECL_IS_RETURNS_TWICE (exp))
818 flags |= ECF_RETURNS_TWICE;
820 /* Process the pure and const attributes. */
821 if (TREE_READONLY (exp))
822 flags |= ECF_CONST;
823 if (DECL_PURE_P (exp))
824 flags |= ECF_PURE;
825 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
826 flags |= ECF_LOOPING_CONST_OR_PURE;
828 if (DECL_IS_NOVOPS (exp))
829 flags |= ECF_NOVOPS;
830 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
831 flags |= ECF_LEAF;
832 if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp)))
833 flags |= ECF_COLD;
835 if (TREE_NOTHROW (exp))
836 flags |= ECF_NOTHROW;
838 if (flag_tm)
840 if (is_tm_builtin (exp))
841 flags |= ECF_TM_BUILTIN;
842 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
843 || lookup_attribute ("transaction_pure",
844 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
845 flags |= ECF_TM_PURE;
848 flags = special_function_p (exp, flags);
850 else if (TYPE_P (exp))
852 if (TYPE_READONLY (exp))
853 flags |= ECF_CONST;
855 if (flag_tm
856 && ((flags & ECF_CONST) != 0
857 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
858 flags |= ECF_TM_PURE;
860 else
861 gcc_unreachable ();
863 if (TREE_THIS_VOLATILE (exp))
865 flags |= ECF_NORETURN;
866 if (flags & (ECF_CONST|ECF_PURE))
867 flags |= ECF_LOOPING_CONST_OR_PURE;
870 return flags;
873 /* Detect flags from a CALL_EXPR. */
876 call_expr_flags (const_tree t)
878 int flags;
879 tree decl = get_callee_fndecl (t);
881 if (decl)
882 flags = flags_from_decl_or_type (decl);
883 else if (CALL_EXPR_FN (t) == NULL_TREE)
884 flags = internal_fn_flags (CALL_EXPR_IFN (t));
885 else
887 tree type = TREE_TYPE (CALL_EXPR_FN (t));
888 if (type && TREE_CODE (type) == POINTER_TYPE)
889 flags = flags_from_decl_or_type (TREE_TYPE (type));
890 else
891 flags = 0;
892 if (CALL_EXPR_BY_DESCRIPTOR (t))
893 flags |= ECF_BY_DESCRIPTOR;
896 return flags;
899 /* Return true if ARG should be passed by invisible reference. */
901 bool
902 pass_by_reference (CUMULATIVE_ARGS *ca, function_arg_info arg)
904 if (tree type = arg.type)
906 /* If this type contains non-trivial constructors, then it is
907 forbidden for the middle-end to create any new copies. */
908 if (TREE_ADDRESSABLE (type))
909 return true;
911 /* GCC post 3.4 passes *all* variable sized types by reference. */
912 if (!TYPE_SIZE (type) || !poly_int_tree_p (TYPE_SIZE (type)))
913 return true;
915 /* If a record type should be passed the same as its first (and only)
916 member, use the type and mode of that member. */
917 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
919 arg.type = TREE_TYPE (first_field (type));
920 arg.mode = TYPE_MODE (arg.type);
924 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), arg);
927 /* Return true if TYPE should be passed by reference when passed to
928 the "..." arguments of a function. */
930 bool
931 pass_va_arg_by_reference (tree type)
933 return pass_by_reference (NULL, function_arg_info (type, /*named=*/false));
936 /* Decide whether ARG, which occurs in the state described by CA,
937 should be passed by reference. Return true if so and update
938 ARG accordingly. */
940 bool
941 apply_pass_by_reference_rules (CUMULATIVE_ARGS *ca, function_arg_info &arg)
943 if (pass_by_reference (ca, arg))
945 arg.type = build_pointer_type (arg.type);
946 arg.mode = TYPE_MODE (arg.type);
947 arg.pass_by_reference = true;
948 return true;
950 return false;
953 /* Return true if ARG, which is passed by reference, should be callee
954 copied instead of caller copied. */
956 bool
957 reference_callee_copied (CUMULATIVE_ARGS *ca, const function_arg_info &arg)
959 if (arg.type && TREE_ADDRESSABLE (arg.type))
960 return false;
961 return targetm.calls.callee_copies (pack_cumulative_args (ca), arg);
965 /* Precompute all register parameters as described by ARGS, storing values
966 into fields within the ARGS array.
968 NUM_ACTUALS indicates the total number elements in the ARGS array.
970 Set REG_PARM_SEEN if we encounter a register parameter. */
972 static void
973 precompute_register_parameters (int num_actuals, struct arg_data *args,
974 int *reg_parm_seen)
976 int i;
978 *reg_parm_seen = 0;
980 for (i = 0; i < num_actuals; i++)
981 if (args[i].reg != 0 && ! args[i].pass_on_stack)
983 *reg_parm_seen = 1;
985 if (args[i].value == 0)
987 push_temp_slots ();
988 args[i].value = expand_normal (args[i].tree_value);
989 preserve_temp_slots (args[i].value);
990 pop_temp_slots ();
993 /* If we are to promote the function arg to a wider mode,
994 do it now. */
996 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
997 args[i].value
998 = convert_modes (args[i].mode,
999 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1000 args[i].value, args[i].unsignedp);
1002 /* If the value is a non-legitimate constant, force it into a
1003 pseudo now. TLS symbols sometimes need a call to resolve. */
1004 if (CONSTANT_P (args[i].value)
1005 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
1006 args[i].value = force_reg (args[i].mode, args[i].value);
1008 /* If we're going to have to load the value by parts, pull the
1009 parts into pseudos. The part extraction process can involve
1010 non-trivial computation. */
1011 if (GET_CODE (args[i].reg) == PARALLEL)
1013 tree type = TREE_TYPE (args[i].tree_value);
1014 args[i].parallel_value
1015 = emit_group_load_into_temps (args[i].reg, args[i].value,
1016 type, int_size_in_bytes (type));
1019 /* If the value is expensive, and we are inside an appropriately
1020 short loop, put the value into a pseudo and then put the pseudo
1021 into the hard reg.
1023 For small register classes, also do this if this call uses
1024 register parameters. This is to avoid reload conflicts while
1025 loading the parameters registers. */
1027 else if ((! (REG_P (args[i].value)
1028 || (GET_CODE (args[i].value) == SUBREG
1029 && REG_P (SUBREG_REG (args[i].value)))))
1030 && args[i].mode != BLKmode
1031 && (set_src_cost (args[i].value, args[i].mode,
1032 optimize_insn_for_speed_p ())
1033 > COSTS_N_INSNS (1))
1034 && ((*reg_parm_seen
1035 && targetm.small_register_classes_for_mode_p (args[i].mode))
1036 || optimize))
1037 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1041 #ifdef REG_PARM_STACK_SPACE
1043 /* The argument list is the property of the called routine and it
1044 may clobber it. If the fixed area has been used for previous
1045 parameters, we must save and restore it. */
1047 static rtx
1048 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
1050 unsigned int low;
1051 unsigned int high;
1053 /* Compute the boundary of the area that needs to be saved, if any. */
1054 high = reg_parm_stack_space;
1055 if (ARGS_GROW_DOWNWARD)
1056 high += 1;
1058 if (high > highest_outgoing_arg_in_use)
1059 high = highest_outgoing_arg_in_use;
1061 for (low = 0; low < high; low++)
1062 if (stack_usage_map[low] != 0 || low >= stack_usage_watermark)
1064 int num_to_save;
1065 machine_mode save_mode;
1066 int delta;
1067 rtx addr;
1068 rtx stack_area;
1069 rtx save_area;
1071 while (stack_usage_map[--high] == 0)
1074 *low_to_save = low;
1075 *high_to_save = high;
1077 num_to_save = high - low + 1;
1079 /* If we don't have the required alignment, must do this
1080 in BLKmode. */
1081 scalar_int_mode imode;
1082 if (int_mode_for_size (num_to_save * BITS_PER_UNIT, 1).exists (&imode)
1083 && (low & (MIN (GET_MODE_SIZE (imode),
1084 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
1085 save_mode = imode;
1086 else
1087 save_mode = BLKmode;
1089 if (ARGS_GROW_DOWNWARD)
1090 delta = -high;
1091 else
1092 delta = low;
1094 addr = plus_constant (Pmode, argblock, delta);
1095 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1097 set_mem_align (stack_area, PARM_BOUNDARY);
1098 if (save_mode == BLKmode)
1100 save_area = assign_stack_temp (BLKmode, num_to_save);
1101 emit_block_move (validize_mem (save_area), stack_area,
1102 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1104 else
1106 save_area = gen_reg_rtx (save_mode);
1107 emit_move_insn (save_area, stack_area);
1110 return save_area;
1113 return NULL_RTX;
1116 static void
1117 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
1119 machine_mode save_mode = GET_MODE (save_area);
1120 int delta;
1121 rtx addr, stack_area;
1123 if (ARGS_GROW_DOWNWARD)
1124 delta = -high_to_save;
1125 else
1126 delta = low_to_save;
1128 addr = plus_constant (Pmode, argblock, delta);
1129 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1130 set_mem_align (stack_area, PARM_BOUNDARY);
1132 if (save_mode != BLKmode)
1133 emit_move_insn (stack_area, save_area);
1134 else
1135 emit_block_move (stack_area, validize_mem (save_area),
1136 GEN_INT (high_to_save - low_to_save + 1),
1137 BLOCK_OP_CALL_PARM);
1139 #endif /* REG_PARM_STACK_SPACE */
1141 /* If any elements in ARGS refer to parameters that are to be passed in
1142 registers, but not in memory, and whose alignment does not permit a
1143 direct copy into registers. Copy the values into a group of pseudos
1144 which we will later copy into the appropriate hard registers.
1146 Pseudos for each unaligned argument will be stored into the array
1147 args[argnum].aligned_regs. The caller is responsible for deallocating
1148 the aligned_regs array if it is nonzero. */
1150 static void
1151 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
1153 int i, j;
1155 for (i = 0; i < num_actuals; i++)
1156 if (args[i].reg != 0 && ! args[i].pass_on_stack
1157 && GET_CODE (args[i].reg) != PARALLEL
1158 && args[i].mode == BLKmode
1159 && MEM_P (args[i].value)
1160 && (MEM_ALIGN (args[i].value)
1161 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1163 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1164 int endian_correction = 0;
1166 if (args[i].partial)
1168 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1169 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1171 else
1173 args[i].n_aligned_regs
1174 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1177 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
1179 /* Structures smaller than a word are normally aligned to the
1180 least significant byte. On a BYTES_BIG_ENDIAN machine,
1181 this means we must skip the empty high order bytes when
1182 calculating the bit offset. */
1183 if (bytes < UNITS_PER_WORD
1184 #ifdef BLOCK_REG_PADDING
1185 && (BLOCK_REG_PADDING (args[i].mode,
1186 TREE_TYPE (args[i].tree_value), 1)
1187 == PAD_DOWNWARD)
1188 #else
1189 && BYTES_BIG_ENDIAN
1190 #endif
1192 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
1194 for (j = 0; j < args[i].n_aligned_regs; j++)
1196 rtx reg = gen_reg_rtx (word_mode);
1197 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1198 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1200 args[i].aligned_regs[j] = reg;
1201 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1202 word_mode, word_mode, false, NULL);
1204 /* There is no need to restrict this code to loading items
1205 in TYPE_ALIGN sized hunks. The bitfield instructions can
1206 load up entire word sized registers efficiently.
1208 ??? This may not be needed anymore.
1209 We use to emit a clobber here but that doesn't let later
1210 passes optimize the instructions we emit. By storing 0 into
1211 the register later passes know the first AND to zero out the
1212 bitfield being set in the register is unnecessary. The store
1213 of 0 will be deleted as will at least the first AND. */
1215 emit_move_insn (reg, const0_rtx);
1217 bytes -= bitsize / BITS_PER_UNIT;
1218 store_bit_field (reg, bitsize, endian_correction, 0, 0,
1219 word_mode, word, false);
1224 /* The limit set by -Walloc-larger-than=. */
1225 static GTY(()) tree alloc_object_size_limit;
1227 /* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
1228 setting if the option is specified, or to the maximum object size if it
1229 is not. Return the initialized value. */
1231 static tree
1232 alloc_max_size (void)
1234 if (alloc_object_size_limit)
1235 return alloc_object_size_limit;
1237 HOST_WIDE_INT limit = warn_alloc_size_limit;
1238 if (limit == HOST_WIDE_INT_MAX)
1239 limit = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
1241 alloc_object_size_limit = build_int_cst (size_type_node, limit);
1243 return alloc_object_size_limit;
1246 /* Return true when EXP's range can be determined and set RANGE[] to it
1247 after adjusting it if necessary to make EXP a represents a valid size
1248 of object, or a valid size argument to an allocation function declared
1249 with attribute alloc_size (whose argument may be signed), or to a string
1250 manipulation function like memset.
1251 When ALLOW_ZERO is set in FLAGS, allow returning a range of [0, 0] for
1252 a size in an anti-range [1, N] where N > PTRDIFF_MAX. A zero range is
1253 a (nearly) invalid argument to allocation functions like malloc but it
1254 is a valid argument to functions like memset.
1255 When USE_LARGEST is set in FLAGS set RANGE to the largest valid subrange
1256 in a multi-range, otherwise to the smallest valid subrange. */
1258 bool
1259 get_size_range (range_query *query, tree exp, gimple *stmt, tree range[2],
1260 int flags /* = 0 */)
1262 if (!exp)
1263 return false;
1265 if (tree_fits_uhwi_p (exp))
1267 /* EXP is a constant. */
1268 range[0] = range[1] = exp;
1269 return true;
1272 tree exptype = TREE_TYPE (exp);
1273 bool integral = INTEGRAL_TYPE_P (exptype);
1275 wide_int min, max;
1276 enum value_range_kind range_type;
1278 if (integral)
1280 value_range vr;
1281 if (query && query->range_of_expr (vr, exp, stmt))
1283 if (vr.undefined_p ())
1284 vr.set_varying (TREE_TYPE (exp));
1285 range_type = vr.kind ();
1286 min = wi::to_wide (vr.min ());
1287 max = wi::to_wide (vr.max ());
1289 else
1290 range_type = determine_value_range (exp, &min, &max);
1292 else
1293 range_type = VR_VARYING;
1295 if (range_type == VR_VARYING)
1297 if (integral)
1299 /* Use the full range of the type of the expression when
1300 no value range information is available. */
1301 range[0] = TYPE_MIN_VALUE (exptype);
1302 range[1] = TYPE_MAX_VALUE (exptype);
1303 return true;
1306 range[0] = NULL_TREE;
1307 range[1] = NULL_TREE;
1308 return false;
1311 unsigned expprec = TYPE_PRECISION (exptype);
1313 bool signed_p = !TYPE_UNSIGNED (exptype);
1315 if (range_type == VR_ANTI_RANGE)
1317 if (signed_p)
1319 if (wi::les_p (max, 0))
1321 /* EXP is not in a strictly negative range. That means
1322 it must be in some (not necessarily strictly) positive
1323 range which includes zero. Since in signed to unsigned
1324 conversions negative values end up converted to large
1325 positive values, and otherwise they are not valid sizes,
1326 the resulting range is in both cases [0, TYPE_MAX]. */
1327 min = wi::zero (expprec);
1328 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1330 else if (wi::les_p (min - 1, 0))
1332 /* EXP is not in a negative-positive range. That means EXP
1333 is either negative, or greater than max. Since negative
1334 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
1335 min = max + 1;
1336 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1338 else
1340 max = min - 1;
1341 min = wi::zero (expprec);
1344 else
1346 wide_int maxsize = wi::to_wide (max_object_size ());
1347 min = wide_int::from (min, maxsize.get_precision (), UNSIGNED);
1348 max = wide_int::from (max, maxsize.get_precision (), UNSIGNED);
1349 if (wi::eq_p (0, min - 1))
1351 /* EXP is unsigned and not in the range [1, MAX]. That means
1352 it's either zero or greater than MAX. Even though 0 would
1353 normally be detected by -Walloc-zero, unless ALLOW_ZERO
1354 is set, set the range to [MAX, TYPE_MAX] so that when MAX
1355 is greater than the limit the whole range is diagnosed. */
1356 wide_int maxsize = wi::to_wide (max_object_size ());
1357 if (flags & SR_ALLOW_ZERO)
1359 if (wi::leu_p (maxsize, max + 1)
1360 || !(flags & SR_USE_LARGEST))
1361 min = max = wi::zero (expprec);
1362 else
1364 min = max + 1;
1365 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1368 else
1370 min = max + 1;
1371 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1374 else if ((flags & SR_USE_LARGEST)
1375 && wi::ltu_p (max + 1, maxsize))
1377 /* When USE_LARGEST is set and the larger of the two subranges
1378 is a valid size, use it... */
1379 min = max + 1;
1380 max = maxsize;
1382 else
1384 /* ...otherwise use the smaller subrange. */
1385 max = min - 1;
1386 min = wi::zero (expprec);
1391 range[0] = wide_int_to_tree (exptype, min);
1392 range[1] = wide_int_to_tree (exptype, max);
1394 return true;
1397 bool
1398 get_size_range (tree exp, tree range[2], int flags /* = 0 */)
1400 return get_size_range (/*query=*/NULL, exp, /*stmt=*/NULL, range, flags);
1403 /* Diagnose a call EXP to function FN decorated with attribute alloc_size
1404 whose argument numbers given by IDX with values given by ARGS exceed
1405 the maximum object size or cause an unsigned oveflow (wrapping) when
1406 multiplied. FN is null when EXP is a call via a function pointer.
1407 When ARGS[0] is null the function does nothing. ARGS[1] may be null
1408 for functions like malloc, and non-null for those like calloc that
1409 are decorated with a two-argument attribute alloc_size. */
1411 void
1412 maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
1414 /* The range each of the (up to) two arguments is known to be in. */
1415 tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } };
1417 /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */
1418 tree maxobjsize = alloc_max_size ();
1420 location_t loc = EXPR_LOCATION (exp);
1422 tree fntype = fn ? TREE_TYPE (fn) : TREE_TYPE (TREE_TYPE (exp));
1423 bool warned = false;
1425 /* Validate each argument individually. */
1426 for (unsigned i = 0; i != 2 && args[i]; ++i)
1428 if (TREE_CODE (args[i]) == INTEGER_CST)
1430 argrange[i][0] = args[i];
1431 argrange[i][1] = args[i];
1433 if (tree_int_cst_lt (args[i], integer_zero_node))
1435 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1436 "%Kargument %i value %qE is negative",
1437 exp, idx[i] + 1, args[i]);
1439 else if (integer_zerop (args[i]))
1441 /* Avoid issuing -Walloc-zero for allocation functions other
1442 than __builtin_alloca that are declared with attribute
1443 returns_nonnull because there's no portability risk. This
1444 avoids warning for such calls to libiberty's xmalloc and
1445 friends.
1446 Also avoid issuing the warning for calls to function named
1447 "alloca". */
1448 if (fn && fndecl_built_in_p (fn, BUILT_IN_ALLOCA)
1449 ? IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6
1450 : !lookup_attribute ("returns_nonnull",
1451 TYPE_ATTRIBUTES (fntype)))
1452 warned = warning_at (loc, OPT_Walloc_zero,
1453 "%Kargument %i value is zero",
1454 exp, idx[i] + 1);
1456 else if (tree_int_cst_lt (maxobjsize, args[i]))
1458 /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
1459 mode and with -fno-exceptions as a way to indicate array
1460 size overflow. There's no good way to detect C++98 here
1461 so avoid diagnosing these calls for all C++ modes. */
1462 if (i == 0
1463 && fn
1464 && !args[1]
1465 && lang_GNU_CXX ()
1466 && DECL_IS_OPERATOR_NEW_P (fn)
1467 && integer_all_onesp (args[i]))
1468 continue;
1470 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1471 "%Kargument %i value %qE exceeds "
1472 "maximum object size %E",
1473 exp, idx[i] + 1, args[i], maxobjsize);
1476 else if (TREE_CODE (args[i]) == SSA_NAME
1477 && get_size_range (args[i], argrange[i]))
1479 /* Verify that the argument's range is not negative (including
1480 upper bound of zero). */
1481 if (tree_int_cst_lt (argrange[i][0], integer_zero_node)
1482 && tree_int_cst_le (argrange[i][1], integer_zero_node))
1484 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1485 "%Kargument %i range [%E, %E] is negative",
1486 exp, idx[i] + 1,
1487 argrange[i][0], argrange[i][1]);
1489 else if (tree_int_cst_lt (maxobjsize, argrange[i][0]))
1491 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1492 "%Kargument %i range [%E, %E] exceeds "
1493 "maximum object size %E",
1494 exp, idx[i] + 1,
1495 argrange[i][0], argrange[i][1],
1496 maxobjsize);
1501 if (!argrange[0])
1502 return;
1504 /* For a two-argument alloc_size, validate the product of the two
1505 arguments if both of their values or ranges are known. */
1506 if (!warned && tree_fits_uhwi_p (argrange[0][0])
1507 && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0])
1508 && !integer_onep (argrange[0][0])
1509 && !integer_onep (argrange[1][0]))
1511 /* Check for overflow in the product of a function decorated with
1512 attribute alloc_size (X, Y). */
1513 unsigned szprec = TYPE_PRECISION (size_type_node);
1514 wide_int x = wi::to_wide (argrange[0][0], szprec);
1515 wide_int y = wi::to_wide (argrange[1][0], szprec);
1517 wi::overflow_type vflow;
1518 wide_int prod = wi::umul (x, y, &vflow);
1520 if (vflow)
1521 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1522 "%Kproduct %<%E * %E%> of arguments %i and %i "
1523 "exceeds %<SIZE_MAX%>",
1524 exp, argrange[0][0], argrange[1][0],
1525 idx[0] + 1, idx[1] + 1);
1526 else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod))
1527 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1528 "%Kproduct %<%E * %E%> of arguments %i and %i "
1529 "exceeds maximum object size %E",
1530 exp, argrange[0][0], argrange[1][0],
1531 idx[0] + 1, idx[1] + 1,
1532 maxobjsize);
1534 if (warned)
1536 /* Print the full range of each of the two arguments to make
1537 it clear when it is, in fact, in a range and not constant. */
1538 if (argrange[0][0] != argrange [0][1])
1539 inform (loc, "argument %i in the range [%E, %E]",
1540 idx[0] + 1, argrange[0][0], argrange[0][1]);
1541 if (argrange[1][0] != argrange [1][1])
1542 inform (loc, "argument %i in the range [%E, %E]",
1543 idx[1] + 1, argrange[1][0], argrange[1][1]);
1547 if (warned && fn)
1549 location_t fnloc = DECL_SOURCE_LOCATION (fn);
1551 if (DECL_IS_UNDECLARED_BUILTIN (fn))
1552 inform (loc,
1553 "in a call to built-in allocation function %qD", fn);
1554 else
1555 inform (fnloc,
1556 "in a call to allocation function %qD declared here", fn);
1560 /* If EXPR refers to a character array or pointer declared attribute
1561 nonstring return a decl for that array or pointer and set *REF to
1562 the referenced enclosing object or pointer. Otherwise returns
1563 null. */
1565 tree
1566 get_attr_nonstring_decl (tree expr, tree *ref)
1568 tree decl = expr;
1569 tree var = NULL_TREE;
1570 if (TREE_CODE (decl) == SSA_NAME)
1572 gimple *def = SSA_NAME_DEF_STMT (decl);
1574 if (is_gimple_assign (def))
1576 tree_code code = gimple_assign_rhs_code (def);
1577 if (code == ADDR_EXPR
1578 || code == COMPONENT_REF
1579 || code == VAR_DECL)
1580 decl = gimple_assign_rhs1 (def);
1582 else
1583 var = SSA_NAME_VAR (decl);
1586 if (TREE_CODE (decl) == ADDR_EXPR)
1587 decl = TREE_OPERAND (decl, 0);
1589 /* To simplify calling code, store the referenced DECL regardless of
1590 the attribute determined below, but avoid storing the SSA_NAME_VAR
1591 obtained above (it's not useful for dataflow purposes). */
1592 if (ref)
1593 *ref = decl;
1595 /* Use the SSA_NAME_VAR that was determined above to see if it's
1596 declared nonstring. Otherwise drill down into the referenced
1597 DECL. */
1598 if (var)
1599 decl = var;
1600 else if (TREE_CODE (decl) == ARRAY_REF)
1601 decl = TREE_OPERAND (decl, 0);
1602 else if (TREE_CODE (decl) == COMPONENT_REF)
1603 decl = TREE_OPERAND (decl, 1);
1604 else if (TREE_CODE (decl) == MEM_REF)
1605 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
1607 if (DECL_P (decl)
1608 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
1609 return decl;
1611 return NULL_TREE;
1614 /* Warn about passing a non-string array/pointer to a built-in function
1615 that expects a nul-terminated string argument. Returns true if
1616 a warning has been issued.*/
1618 bool
1619 maybe_warn_nonstring_arg (tree fndecl, tree exp)
1621 if (!fndecl || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1622 return false;
1624 if (TREE_NO_WARNING (exp) || !warn_stringop_overread)
1625 return false;
1627 /* Avoid clearly invalid calls (more checking done below). */
1628 unsigned nargs = call_expr_nargs (exp);
1629 if (!nargs)
1630 return false;
1632 /* The bound argument to a bounded string function like strncpy. */
1633 tree bound = NULL_TREE;
1635 /* The longest known or possible string argument to one of the comparison
1636 functions. If the length is less than the bound it is used instead.
1637 Since the length is only used for warning and not for code generation
1638 disable strict mode in the calls to get_range_strlen below. */
1639 tree maxlen = NULL_TREE;
1641 /* It's safe to call "bounded" string functions with a non-string
1642 argument since the functions provide an explicit bound for this
1643 purpose. The exception is strncat where the bound may refer to
1644 either the destination or the source. */
1645 int fncode = DECL_FUNCTION_CODE (fndecl);
1646 switch (fncode)
1648 case BUILT_IN_STRCMP:
1649 case BUILT_IN_STRNCMP:
1650 case BUILT_IN_STRNCASECMP:
1652 /* For these, if one argument refers to one or more of a set
1653 of string constants or arrays of known size, determine
1654 the range of their known or possible lengths and use it
1655 conservatively as the bound for the unbounded function,
1656 and to adjust the range of the bound of the bounded ones. */
1657 for (unsigned argno = 0;
1658 argno < MIN (nargs, 2)
1659 && !(maxlen && TREE_CODE (maxlen) == INTEGER_CST); argno++)
1661 tree arg = CALL_EXPR_ARG (exp, argno);
1662 if (!get_attr_nonstring_decl (arg))
1664 c_strlen_data lendata = { };
1665 /* Set MAXBOUND to an arbitrary non-null non-integer
1666 node as a request to have it set to the length of
1667 the longest string in a PHI. */
1668 lendata.maxbound = arg;
1669 get_range_strlen (arg, &lendata, /* eltsize = */ 1);
1670 maxlen = lendata.maxbound;
1674 /* Fall through. */
1676 case BUILT_IN_STRNCAT:
1677 case BUILT_IN_STPNCPY:
1678 case BUILT_IN_STRNCPY:
1679 if (nargs > 2)
1680 bound = CALL_EXPR_ARG (exp, 2);
1681 break;
1683 case BUILT_IN_STRNDUP:
1684 if (nargs > 1)
1685 bound = CALL_EXPR_ARG (exp, 1);
1686 break;
1688 case BUILT_IN_STRNLEN:
1690 tree arg = CALL_EXPR_ARG (exp, 0);
1691 if (!get_attr_nonstring_decl (arg))
1693 c_strlen_data lendata = { };
1694 /* Set MAXBOUND to an arbitrary non-null non-integer
1695 node as a request to have it set to the length of
1696 the longest string in a PHI. */
1697 lendata.maxbound = arg;
1698 get_range_strlen (arg, &lendata, /* eltsize = */ 1);
1699 maxlen = lendata.maxbound;
1701 if (nargs > 1)
1702 bound = CALL_EXPR_ARG (exp, 1);
1703 break;
1706 default:
1707 break;
1710 /* Determine the range of the bound argument (if specified). */
1711 tree bndrng[2] = { NULL_TREE, NULL_TREE };
1712 if (bound)
1714 STRIP_NOPS (bound);
1715 get_size_range (bound, bndrng);
1718 location_t loc = EXPR_LOCATION (exp);
1720 if (bndrng[0])
1722 /* Diagnose excessive bound prior to the adjustment below and
1723 regardless of attribute nonstring. */
1724 tree maxobjsize = max_object_size ();
1725 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
1727 bool warned = false;
1728 if (tree_int_cst_equal (bndrng[0], bndrng[1]))
1729 warned = warning_at (loc, OPT_Wstringop_overread,
1730 "%K%qD specified bound %E "
1731 "exceeds maximum object size %E",
1732 exp, fndecl, bndrng[0], maxobjsize);
1733 else
1734 warned = warning_at (loc, OPT_Wstringop_overread,
1735 "%K%qD specified bound [%E, %E] "
1736 "exceeds maximum object size %E",
1737 exp, fndecl, bndrng[0], bndrng[1],
1738 maxobjsize);
1739 if (warned)
1740 TREE_NO_WARNING (exp) = true;
1742 return warned;
1746 if (maxlen && !integer_all_onesp (maxlen))
1748 /* Add one for the nul. */
1749 maxlen = const_binop (PLUS_EXPR, TREE_TYPE (maxlen), maxlen,
1750 size_one_node);
1752 if (!bndrng[0])
1754 /* Conservatively use the upper bound of the lengths for
1755 both the lower and the upper bound of the operation. */
1756 bndrng[0] = maxlen;
1757 bndrng[1] = maxlen;
1758 bound = void_type_node;
1760 else if (maxlen)
1762 /* Replace the bound on the operation with the upper bound
1763 of the length of the string if the latter is smaller. */
1764 if (tree_int_cst_lt (maxlen, bndrng[0]))
1765 bndrng[0] = maxlen;
1766 else if (tree_int_cst_lt (maxlen, bndrng[1]))
1767 bndrng[1] = maxlen;
1771 bool any_arg_warned = false;
1772 /* Iterate over the built-in function's formal arguments and check
1773 each const char* against the actual argument. If the actual
1774 argument is declared attribute non-string issue a warning unless
1775 the argument's maximum length is bounded. */
1776 function_args_iterator it;
1777 function_args_iter_init (&it, TREE_TYPE (fndecl));
1779 for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it))
1781 /* Avoid iterating past the declared argument in a call
1782 to function declared without a prototype. */
1783 if (argno >= nargs)
1784 break;
1786 tree argtype = function_args_iter_cond (&it);
1787 if (!argtype)
1788 break;
1790 if (TREE_CODE (argtype) != POINTER_TYPE)
1791 continue;
1793 argtype = TREE_TYPE (argtype);
1795 if (TREE_CODE (argtype) != INTEGER_TYPE
1796 || !TYPE_READONLY (argtype))
1797 continue;
1799 argtype = TYPE_MAIN_VARIANT (argtype);
1800 if (argtype != char_type_node)
1801 continue;
1803 tree callarg = CALL_EXPR_ARG (exp, argno);
1804 if (TREE_CODE (callarg) == ADDR_EXPR)
1805 callarg = TREE_OPERAND (callarg, 0);
1807 /* See if the destination is declared with attribute "nonstring". */
1808 tree decl = get_attr_nonstring_decl (callarg);
1809 if (!decl)
1810 continue;
1812 /* The maximum number of array elements accessed. */
1813 offset_int wibnd = 0;
1815 if (argno && fncode == BUILT_IN_STRNCAT)
1817 /* See if the bound in strncat is derived from the length
1818 of the strlen of the destination (as it's expected to be).
1819 If so, reset BOUND and FNCODE to trigger a warning. */
1820 tree dstarg = CALL_EXPR_ARG (exp, 0);
1821 if (is_strlen_related_p (dstarg, bound))
1823 /* The bound applies to the destination, not to the source,
1824 so reset these to trigger a warning without mentioning
1825 the bound. */
1826 bound = NULL;
1827 fncode = 0;
1829 else if (bndrng[1])
1830 /* Use the upper bound of the range for strncat. */
1831 wibnd = wi::to_offset (bndrng[1]);
1833 else if (bndrng[0])
1834 /* Use the lower bound of the range for functions other than
1835 strncat. */
1836 wibnd = wi::to_offset (bndrng[0]);
1838 /* Determine the size of the argument array if it is one. */
1839 offset_int asize = wibnd;
1840 bool known_size = false;
1841 tree type = TREE_TYPE (decl);
1843 /* Determine the array size. For arrays of unknown bound and
1844 pointers reset BOUND to trigger the appropriate warning. */
1845 if (TREE_CODE (type) == ARRAY_TYPE)
1847 if (tree arrbnd = TYPE_DOMAIN (type))
1849 if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
1851 asize = wi::to_offset (arrbnd) + 1;
1852 known_size = true;
1855 else if (bound == void_type_node)
1856 bound = NULL_TREE;
1858 else if (bound == void_type_node)
1859 bound = NULL_TREE;
1861 /* In a call to strncat with a bound in a range whose lower but
1862 not upper bound is less than the array size, reset ASIZE to
1863 be the same as the bound and the other variable to trigger
1864 the apprpriate warning below. */
1865 if (fncode == BUILT_IN_STRNCAT
1866 && bndrng[0] != bndrng[1]
1867 && wi::ltu_p (wi::to_offset (bndrng[0]), asize)
1868 && (!known_size
1869 || wi::ltu_p (asize, wibnd)))
1871 asize = wibnd;
1872 bound = NULL_TREE;
1873 fncode = 0;
1876 bool warned = false;
1878 auto_diagnostic_group d;
1879 if (wi::ltu_p (asize, wibnd))
1881 if (bndrng[0] == bndrng[1])
1882 warned = warning_at (loc, OPT_Wstringop_overread,
1883 "%qD argument %i declared attribute "
1884 "%<nonstring%> is smaller than the specified "
1885 "bound %wu",
1886 fndecl, argno + 1, wibnd.to_uhwi ());
1887 else if (wi::ltu_p (asize, wi::to_offset (bndrng[0])))
1888 warned = warning_at (loc, OPT_Wstringop_overread,
1889 "%qD argument %i declared attribute "
1890 "%<nonstring%> is smaller than "
1891 "the specified bound [%E, %E]",
1892 fndecl, argno + 1, bndrng[0], bndrng[1]);
1893 else
1894 warned = warning_at (loc, OPT_Wstringop_overread,
1895 "%qD argument %i declared attribute "
1896 "%<nonstring%> may be smaller than "
1897 "the specified bound [%E, %E]",
1898 fndecl, argno + 1, bndrng[0], bndrng[1]);
1900 else if (fncode == BUILT_IN_STRNCAT)
1901 ; /* Avoid warning for calls to strncat() when the bound
1902 is equal to the size of the non-string argument. */
1903 else if (!bound)
1904 warned = warning_at (loc, OPT_Wstringop_overread,
1905 "%qD argument %i declared attribute %<nonstring%>",
1906 fndecl, argno + 1);
1908 if (warned)
1910 inform (DECL_SOURCE_LOCATION (decl),
1911 "argument %qD declared here", decl);
1912 any_arg_warned = true;
1916 if (any_arg_warned)
1917 TREE_NO_WARNING (exp) = true;
1919 return any_arg_warned;
1922 /* Issue an error if CALL_EXPR was flagged as requiring
1923 tall-call optimization. */
1925 void
1926 maybe_complain_about_tail_call (tree call_expr, const char *reason)
1928 gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1929 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1930 return;
1932 error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1935 /* Returns the type of the argument ARGNO to function with type FNTYPE
1936 or null when the typoe cannot be determined or no such argument exists. */
1938 static tree
1939 fntype_argno_type (tree fntype, unsigned argno)
1941 if (!prototype_p (fntype))
1942 return NULL_TREE;
1944 tree argtype;
1945 function_args_iterator it;
1946 FOREACH_FUNCTION_ARGS (fntype, argtype, it)
1947 if (argno-- == 0)
1948 return argtype;
1950 return NULL_TREE;
1953 /* Helper to append the "human readable" attribute access specification
1954 described by ACCESS to the array ATTRSTR with size STRSIZE. Used in
1955 diagnostics. */
1957 static inline void
1958 append_attrname (const std::pair<int, attr_access> &access,
1959 char *attrstr, size_t strsize)
1961 if (access.second.internal_p)
1962 return;
1964 tree str = access.second.to_external_string ();
1965 gcc_assert (strsize >= (size_t) TREE_STRING_LENGTH (str));
1966 strcpy (attrstr, TREE_STRING_POINTER (str));
1969 /* Iterate over attribute access read-only, read-write, and write-only
1970 arguments and diagnose past-the-end accesses and related problems
1971 in the function call EXP. */
1973 static void
1974 maybe_warn_rdwr_sizes (rdwr_map *rwm, tree fndecl, tree fntype, tree exp)
1976 auto_diagnostic_group adg;
1978 /* Set if a warning has been issued for any argument (used to decide
1979 whether to emit an informational note at the end). */
1980 bool any_warned = false;
1982 /* A string describing the attributes that the warnings issued by this
1983 function apply to. Used to print one informational note per function
1984 call, rather than one per warning. That reduces clutter. */
1985 char attrstr[80];
1986 attrstr[0] = 0;
1988 for (rdwr_map::iterator it = rwm->begin (); it != rwm->end (); ++it)
1990 std::pair<int, attr_access> access = *it;
1992 /* Get the function call arguments corresponding to the attribute's
1993 positional arguments. When both arguments have been specified
1994 there will be two entries in *RWM, one for each. They are
1995 cross-referenced by their respective argument numbers in
1996 ACCESS.PTRARG and ACCESS.SIZARG. */
1997 const int ptridx = access.second.ptrarg;
1998 const int sizidx = access.second.sizarg;
2000 gcc_assert (ptridx != -1);
2001 gcc_assert (access.first == ptridx || access.first == sizidx);
2003 /* The pointer is set to null for the entry corresponding to
2004 the size argument. Skip it. It's handled when the entry
2005 corresponding to the pointer argument comes up. */
2006 if (!access.second.ptr)
2007 continue;
2009 tree ptrtype = fntype_argno_type (fntype, ptridx);
2010 tree argtype = TREE_TYPE (ptrtype);
2012 /* The size of the access by the call. */
2013 tree access_size;
2014 if (sizidx == -1)
2016 /* If only the pointer attribute operand was specified and
2017 not size, set SIZE to the greater of MINSIZE or size of
2018 one element of the pointed to type to detect smaller
2019 objects (null pointers are diagnosed in this case only
2020 if the pointer is also declared with attribute nonnull. */
2021 if (access.second.minsize
2022 && access.second.minsize != HOST_WIDE_INT_M1U)
2023 access_size = build_int_cstu (sizetype, access.second.minsize);
2024 else
2025 access_size = size_one_node;
2027 else
2028 access_size = rwm->get (sizidx)->size;
2030 /* Format the value or range to avoid an explosion of messages. */
2031 char sizstr[80];
2032 tree sizrng[2] = { size_zero_node, build_all_ones_cst (sizetype) };
2033 if (get_size_range (access_size, sizrng, true))
2035 const char *s0 = print_generic_expr_to_str (sizrng[0]);
2036 if (tree_int_cst_equal (sizrng[0], sizrng[1]))
2038 gcc_checking_assert (strlen (s0) < sizeof sizstr);
2039 strcpy (sizstr, s0);
2041 else
2043 const char *s1 = print_generic_expr_to_str (sizrng[1]);
2044 gcc_checking_assert (strlen (s0) + strlen (s1)
2045 < sizeof sizstr - 4);
2046 sprintf (sizstr, "[%s, %s]", s0, s1);
2049 else
2050 *sizstr = '\0';
2052 /* Set if a warning has been issued for the current argument. */
2053 bool arg_warned = false;
2054 location_t loc = EXPR_LOCATION (exp);
2055 tree ptr = access.second.ptr;
2056 if (*sizstr
2057 && tree_int_cst_sgn (sizrng[0]) < 0
2058 && tree_int_cst_sgn (sizrng[1]) < 0)
2060 /* Warn about negative sizes. */
2061 if (access.second.internal_p)
2063 const std::string argtypestr
2064 = access.second.array_as_string (ptrtype);
2066 arg_warned = warning_at (loc, OPT_Wstringop_overflow_,
2067 "%Kbound argument %i value %s is "
2068 "negative for a variable length array "
2069 "argument %i of type %s",
2070 exp, sizidx + 1, sizstr,
2071 ptridx + 1, argtypestr.c_str ());
2073 else
2074 arg_warned = warning_at (loc, OPT_Wstringop_overflow_,
2075 "%Kargument %i value %s is negative",
2076 exp, sizidx + 1, sizstr);
2078 if (arg_warned)
2080 append_attrname (access, attrstr, sizeof attrstr);
2081 /* Remember a warning has been issued and avoid warning
2082 again below for the same attribute. */
2083 any_warned = true;
2084 continue;
2088 if (tree_int_cst_sgn (sizrng[0]) >= 0)
2090 if (COMPLETE_TYPE_P (argtype))
2092 /* Multiply ACCESS_SIZE by the size of the type the pointer
2093 argument points to. If it's incomplete the size is used
2094 as is. */
2095 if (tree argsize = TYPE_SIZE_UNIT (argtype))
2096 if (TREE_CODE (argsize) == INTEGER_CST)
2098 const int prec = TYPE_PRECISION (sizetype);
2099 wide_int minsize = wi::to_wide (sizrng[0], prec);
2100 minsize *= wi::to_wide (argsize, prec);
2101 access_size = wide_int_to_tree (sizetype, minsize);
2105 else
2106 access_size = NULL_TREE;
2108 if (integer_zerop (ptr))
2110 if (sizidx >= 0 && tree_int_cst_sgn (sizrng[0]) > 0)
2112 /* Warn about null pointers with positive sizes. This is
2113 different from also declaring the pointer argument with
2114 attribute nonnull when the function accepts null pointers
2115 only when the corresponding size is zero. */
2116 if (access.second.internal_p)
2118 const std::string argtypestr
2119 = access.second.array_as_string (ptrtype);
2121 arg_warned = warning_at (loc, OPT_Wnonnull,
2122 "%Kargument %i of variable length "
2123 "array %s is null but "
2124 "the corresponding bound argument "
2125 "%i value is %s",
2126 exp, sizidx + 1, argtypestr.c_str (),
2127 ptridx + 1, sizstr);
2129 else
2130 arg_warned = warning_at (loc, OPT_Wnonnull,
2131 "%Kargument %i is null but "
2132 "the corresponding size argument "
2133 "%i value is %s",
2134 exp, ptridx + 1, sizidx + 1,
2135 sizstr);
2137 else if (access_size && access.second.static_p)
2139 /* Warn about null pointers for [static N] array arguments
2140 but do not warn for ordinary (i.e., nonstatic) arrays. */
2141 arg_warned = warning_at (loc, OPT_Wnonnull,
2142 "%Kargument %i to %<%T[static %E]%> "
2143 "is null where non-null expected",
2144 exp, ptridx + 1, argtype,
2145 access_size);
2148 if (arg_warned)
2150 append_attrname (access, attrstr, sizeof attrstr);
2151 /* Remember a warning has been issued and avoid warning
2152 again below for the same attribute. */
2153 any_warned = true;
2154 continue;
2158 access_data data (ptr, access.second.mode, NULL_TREE, false,
2159 NULL_TREE, false);
2160 access_ref* const pobj = (access.second.mode == access_write_only
2161 ? &data.dst : &data.src);
2162 tree objsize = compute_objsize (ptr, 1, pobj);
2164 /* The size of the destination or source object. */
2165 tree dstsize = NULL_TREE, srcsize = NULL_TREE;
2166 if (access.second.mode == access_read_only
2167 || access.second.mode == access_none)
2169 /* For a read-only argument there is no destination. For
2170 no access, set the source as well and differentiate via
2171 the access flag below. */
2172 srcsize = objsize;
2173 if (access.second.mode == access_read_only
2174 || access.second.mode == access_none)
2176 /* For a read-only attribute there is no destination so
2177 clear OBJSIZE. This emits "reading N bytes" kind of
2178 diagnostics instead of the "writing N bytes" kind,
2179 unless MODE is none. */
2180 objsize = NULL_TREE;
2183 else
2184 dstsize = objsize;
2186 /* Clear the no-warning bit in case it was set by check_access
2187 in a prior iteration so that accesses via different arguments
2188 are diagnosed. */
2189 TREE_NO_WARNING (exp) = false;
2190 access_mode mode = data.mode;
2191 if (mode == access_deferred)
2192 mode = TYPE_READONLY (argtype) ? access_read_only : access_read_write;
2193 check_access (exp, access_size, /*maxread=*/ NULL_TREE, srcsize,
2194 dstsize, mode, &data);
2196 if (TREE_NO_WARNING (exp))
2198 any_warned = true;
2200 if (access.second.internal_p)
2201 inform (loc, "referencing argument %u of type %qT",
2202 ptridx + 1, ptrtype);
2203 else
2204 /* If check_access issued a warning above, append the relevant
2205 attribute to the string. */
2206 append_attrname (access, attrstr, sizeof attrstr);
2210 if (*attrstr)
2212 if (fndecl)
2213 inform (DECL_SOURCE_LOCATION (fndecl),
2214 "in a call to function %qD declared with attribute %qs",
2215 fndecl, attrstr);
2216 else
2217 inform (EXPR_LOCATION (fndecl),
2218 "in a call with type %qT and attribute %qs",
2219 fntype, attrstr);
2221 else if (any_warned)
2223 if (fndecl)
2224 inform (DECL_SOURCE_LOCATION (fndecl),
2225 "in a call to function %qD", fndecl);
2226 else
2227 inform (EXPR_LOCATION (fndecl),
2228 "in a call with type %qT", fntype);
2231 /* Set the bit in case if was cleared and not set above. */
2232 TREE_NO_WARNING (exp) = true;
2235 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
2236 CALL_EXPR EXP.
2238 NUM_ACTUALS is the total number of parameters.
2240 N_NAMED_ARGS is the total number of named arguments.
2242 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
2243 value, or null.
2245 FNDECL is the tree code for the target of this call (if known)
2247 ARGS_SO_FAR holds state needed by the target to know where to place
2248 the next argument.
2250 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
2251 for arguments which are passed in registers.
2253 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
2254 and may be modified by this routine.
2256 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
2257 flags which may be modified by this routine.
2259 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
2260 that requires allocation of stack space.
2262 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
2263 the thunked-to function. */
2265 static void
2266 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
2267 struct arg_data *args,
2268 struct args_size *args_size,
2269 int n_named_args ATTRIBUTE_UNUSED,
2270 tree exp, tree struct_value_addr_value,
2271 tree fndecl, tree fntype,
2272 cumulative_args_t args_so_far,
2273 int reg_parm_stack_space,
2274 rtx *old_stack_level,
2275 poly_int64_pod *old_pending_adj,
2276 int *must_preallocate, int *ecf_flags,
2277 bool *may_tailcall, bool call_from_thunk_p)
2279 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
2280 location_t loc = EXPR_LOCATION (exp);
2282 /* Count arg position in order args appear. */
2283 int argpos;
2285 int i;
2287 args_size->constant = 0;
2288 args_size->var = 0;
2290 bitmap_obstack_initialize (NULL);
2292 /* In this loop, we consider args in the order they are written.
2293 We fill up ARGS from the back. */
2295 i = num_actuals - 1;
2297 int j = i;
2298 call_expr_arg_iterator iter;
2299 tree arg;
2300 bitmap slots = NULL;
2302 if (struct_value_addr_value)
2304 args[j].tree_value = struct_value_addr_value;
2305 j--;
2307 argpos = 0;
2308 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2310 tree argtype = TREE_TYPE (arg);
2312 if (targetm.calls.split_complex_arg
2313 && argtype
2314 && TREE_CODE (argtype) == COMPLEX_TYPE
2315 && targetm.calls.split_complex_arg (argtype))
2317 tree subtype = TREE_TYPE (argtype);
2318 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
2319 j--;
2320 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
2322 else
2323 args[j].tree_value = arg;
2324 j--;
2325 argpos++;
2328 if (slots)
2329 BITMAP_FREE (slots);
2332 bitmap_obstack_release (NULL);
2334 tree fntypeattrs = TYPE_ATTRIBUTES (fntype);
2335 /* Extract attribute alloc_size from the type of the called expression
2336 (which could be a function or a function pointer) and if set, store
2337 the indices of the corresponding arguments in ALLOC_IDX, and then
2338 the actual argument(s) at those indices in ALLOC_ARGS. */
2339 int alloc_idx[2] = { -1, -1 };
2340 if (tree alloc_size = lookup_attribute ("alloc_size", fntypeattrs))
2342 tree args = TREE_VALUE (alloc_size);
2343 alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
2344 if (TREE_CHAIN (args))
2345 alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
2348 /* Array for up to the two attribute alloc_size arguments. */
2349 tree alloc_args[] = { NULL_TREE, NULL_TREE };
2351 /* Map of attribute accewss specifications for function arguments. */
2352 rdwr_map rdwr_idx;
2353 init_attr_rdwr_indices (&rdwr_idx, fntypeattrs);
2355 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
2356 for (argpos = 0; argpos < num_actuals; i--, argpos++)
2358 tree type = TREE_TYPE (args[i].tree_value);
2359 int unsignedp;
2361 /* Replace erroneous argument with constant zero. */
2362 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
2363 args[i].tree_value = integer_zero_node, type = integer_type_node;
2365 /* If TYPE is a transparent union or record, pass things the way
2366 we would pass the first field of the union or record. We have
2367 already verified that the modes are the same. */
2368 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
2369 type = TREE_TYPE (first_field (type));
2371 /* Decide where to pass this arg.
2373 args[i].reg is nonzero if all or part is passed in registers.
2375 args[i].partial is nonzero if part but not all is passed in registers,
2376 and the exact value says how many bytes are passed in registers.
2378 args[i].pass_on_stack is nonzero if the argument must at least be
2379 computed on the stack. It may then be loaded back into registers
2380 if args[i].reg is nonzero.
2382 These decisions are driven by the FUNCTION_... macros and must agree
2383 with those made by function.c. */
2385 /* See if this argument should be passed by invisible reference. */
2386 function_arg_info arg (type, argpos < n_named_args);
2387 if (pass_by_reference (args_so_far_pnt, arg))
2389 bool callee_copies;
2390 tree base = NULL_TREE;
2392 callee_copies = reference_callee_copied (args_so_far_pnt, arg);
2394 /* If we're compiling a thunk, pass through invisible references
2395 instead of making a copy. */
2396 if (call_from_thunk_p
2397 || (callee_copies
2398 && !TREE_ADDRESSABLE (type)
2399 && (base = get_base_address (args[i].tree_value))
2400 && TREE_CODE (base) != SSA_NAME
2401 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
2403 /* We may have turned the parameter value into an SSA name.
2404 Go back to the original parameter so we can take the
2405 address. */
2406 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
2408 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
2409 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
2410 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
2412 /* Argument setup code may have copied the value to register. We
2413 revert that optimization now because the tail call code must
2414 use the original location. */
2415 if (TREE_CODE (args[i].tree_value) == PARM_DECL
2416 && !MEM_P (DECL_RTL (args[i].tree_value))
2417 && DECL_INCOMING_RTL (args[i].tree_value)
2418 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
2419 set_decl_rtl (args[i].tree_value,
2420 DECL_INCOMING_RTL (args[i].tree_value));
2422 mark_addressable (args[i].tree_value);
2424 /* We can't use sibcalls if a callee-copied argument is
2425 stored in the current function's frame. */
2426 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
2428 *may_tailcall = false;
2429 maybe_complain_about_tail_call (exp,
2430 "a callee-copied argument is"
2431 " stored in the current"
2432 " function's frame");
2435 args[i].tree_value = build_fold_addr_expr_loc (loc,
2436 args[i].tree_value);
2437 type = TREE_TYPE (args[i].tree_value);
2439 if (*ecf_flags & ECF_CONST)
2440 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
2442 else
2444 /* We make a copy of the object and pass the address to the
2445 function being called. */
2446 rtx copy;
2448 if (!COMPLETE_TYPE_P (type)
2449 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2450 || (flag_stack_check == GENERIC_STACK_CHECK
2451 && compare_tree_int (TYPE_SIZE_UNIT (type),
2452 STACK_CHECK_MAX_VAR_SIZE) > 0))
2454 /* This is a variable-sized object. Make space on the stack
2455 for it. */
2456 rtx size_rtx = expr_size (args[i].tree_value);
2458 if (*old_stack_level == 0)
2460 emit_stack_save (SAVE_BLOCK, old_stack_level);
2461 *old_pending_adj = pending_stack_adjust;
2462 pending_stack_adjust = 0;
2465 /* We can pass TRUE as the 4th argument because we just
2466 saved the stack pointer and will restore it right after
2467 the call. */
2468 copy = allocate_dynamic_stack_space (size_rtx,
2469 TYPE_ALIGN (type),
2470 TYPE_ALIGN (type),
2471 max_int_size_in_bytes
2472 (type),
2473 true);
2474 copy = gen_rtx_MEM (BLKmode, copy);
2475 set_mem_attributes (copy, type, 1);
2477 else
2478 copy = assign_temp (type, 1, 0);
2480 store_expr (args[i].tree_value, copy, 0, false, false);
2482 /* Just change the const function to pure and then let
2483 the next test clear the pure based on
2484 callee_copies. */
2485 if (*ecf_flags & ECF_CONST)
2487 *ecf_flags &= ~ECF_CONST;
2488 *ecf_flags |= ECF_PURE;
2491 if (!callee_copies && *ecf_flags & ECF_PURE)
2492 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2494 args[i].tree_value
2495 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
2496 type = TREE_TYPE (args[i].tree_value);
2497 *may_tailcall = false;
2498 maybe_complain_about_tail_call (exp,
2499 "argument must be passed"
2500 " by copying");
2502 arg.pass_by_reference = true;
2505 unsignedp = TYPE_UNSIGNED (type);
2506 arg.type = type;
2507 arg.mode
2508 = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
2509 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
2511 args[i].unsignedp = unsignedp;
2512 args[i].mode = arg.mode;
2514 targetm.calls.warn_parameter_passing_abi (args_so_far, type);
2516 args[i].reg = targetm.calls.function_arg (args_so_far, arg);
2518 if (args[i].reg && CONST_INT_P (args[i].reg))
2519 args[i].reg = NULL;
2521 /* If this is a sibling call and the machine has register windows, the
2522 register window has to be unwinded before calling the routine, so
2523 arguments have to go into the incoming registers. */
2524 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
2525 args[i].tail_call_reg
2526 = targetm.calls.function_incoming_arg (args_so_far, arg);
2527 else
2528 args[i].tail_call_reg = args[i].reg;
2530 if (args[i].reg)
2531 args[i].partial = targetm.calls.arg_partial_bytes (args_so_far, arg);
2533 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (arg);
2535 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
2536 it means that we are to pass this arg in the register(s) designated
2537 by the PARALLEL, but also to pass it in the stack. */
2538 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
2539 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
2540 args[i].pass_on_stack = 1;
2542 /* If this is an addressable type, we must preallocate the stack
2543 since we must evaluate the object into its final location.
2545 If this is to be passed in both registers and the stack, it is simpler
2546 to preallocate. */
2547 if (TREE_ADDRESSABLE (type)
2548 || (args[i].pass_on_stack && args[i].reg != 0))
2549 *must_preallocate = 1;
2551 /* Compute the stack-size of this argument. */
2552 if (args[i].reg == 0 || args[i].partial != 0
2553 || reg_parm_stack_space > 0
2554 || args[i].pass_on_stack)
2555 locate_and_pad_parm (arg.mode, type,
2556 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2558 #else
2559 args[i].reg != 0,
2560 #endif
2561 reg_parm_stack_space,
2562 args[i].pass_on_stack ? 0 : args[i].partial,
2563 fndecl, args_size, &args[i].locate);
2564 #ifdef BLOCK_REG_PADDING
2565 else
2566 /* The argument is passed entirely in registers. See at which
2567 end it should be padded. */
2568 args[i].locate.where_pad =
2569 BLOCK_REG_PADDING (arg.mode, type,
2570 int_size_in_bytes (type) <= UNITS_PER_WORD);
2571 #endif
2573 /* Update ARGS_SIZE, the total stack space for args so far. */
2575 args_size->constant += args[i].locate.size.constant;
2576 if (args[i].locate.size.var)
2577 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
2579 /* Increment ARGS_SO_FAR, which has info about which arg-registers
2580 have been used, etc. */
2582 /* ??? Traditionally we've passed TYPE_MODE here, instead of the
2583 promoted_mode used for function_arg above. However, the
2584 corresponding handling of incoming arguments in function.c
2585 does pass the promoted mode. */
2586 arg.mode = TYPE_MODE (type);
2587 targetm.calls.function_arg_advance (args_so_far, arg);
2589 /* Store argument values for functions decorated with attribute
2590 alloc_size. */
2591 if (argpos == alloc_idx[0])
2592 alloc_args[0] = args[i].tree_value;
2593 else if (argpos == alloc_idx[1])
2594 alloc_args[1] = args[i].tree_value;
2596 /* Save the actual argument that corresponds to the access attribute
2597 operand for later processing. */
2598 if (attr_access *access = rdwr_idx.get (argpos))
2600 if (POINTER_TYPE_P (type))
2602 access->ptr = args[i].tree_value;
2603 // A nonnull ACCESS->SIZE contains VLA bounds. */
2605 else
2607 access->size = args[i].tree_value;
2608 gcc_assert (access->ptr == NULL_TREE);
2613 if (alloc_args[0])
2615 /* Check the arguments of functions decorated with attribute
2616 alloc_size. */
2617 maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
2620 /* Detect passing non-string arguments to functions expecting
2621 nul-terminated strings. */
2622 maybe_warn_nonstring_arg (fndecl, exp);
2624 /* Check attribute access arguments. */
2625 maybe_warn_rdwr_sizes (&rdwr_idx, fndecl, fntype, exp);
2628 /* Update ARGS_SIZE to contain the total size for the argument block.
2629 Return the original constant component of the argument block's size.
2631 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
2632 for arguments passed in registers. */
2634 static poly_int64
2635 compute_argument_block_size (int reg_parm_stack_space,
2636 struct args_size *args_size,
2637 tree fndecl ATTRIBUTE_UNUSED,
2638 tree fntype ATTRIBUTE_UNUSED,
2639 int preferred_stack_boundary ATTRIBUTE_UNUSED)
2641 poly_int64 unadjusted_args_size = args_size->constant;
2643 /* For accumulate outgoing args mode we don't need to align, since the frame
2644 will be already aligned. Align to STACK_BOUNDARY in order to prevent
2645 backends from generating misaligned frame sizes. */
2646 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
2647 preferred_stack_boundary = STACK_BOUNDARY;
2649 /* Compute the actual size of the argument block required. The variable
2650 and constant sizes must be combined, the size may have to be rounded,
2651 and there may be a minimum required size. */
2653 if (args_size->var)
2655 args_size->var = ARGS_SIZE_TREE (*args_size);
2656 args_size->constant = 0;
2658 preferred_stack_boundary /= BITS_PER_UNIT;
2659 if (preferred_stack_boundary > 1)
2661 /* We don't handle this case yet. To handle it correctly we have
2662 to add the delta, round and subtract the delta.
2663 Currently no machine description requires this support. */
2664 gcc_assert (multiple_p (stack_pointer_delta,
2665 preferred_stack_boundary));
2666 args_size->var = round_up (args_size->var, preferred_stack_boundary);
2669 if (reg_parm_stack_space > 0)
2671 args_size->var
2672 = size_binop (MAX_EXPR, args_size->var,
2673 ssize_int (reg_parm_stack_space));
2675 /* The area corresponding to register parameters is not to count in
2676 the size of the block we need. So make the adjustment. */
2677 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2678 args_size->var
2679 = size_binop (MINUS_EXPR, args_size->var,
2680 ssize_int (reg_parm_stack_space));
2683 else
2685 preferred_stack_boundary /= BITS_PER_UNIT;
2686 if (preferred_stack_boundary < 1)
2687 preferred_stack_boundary = 1;
2688 args_size->constant = (aligned_upper_bound (args_size->constant
2689 + stack_pointer_delta,
2690 preferred_stack_boundary)
2691 - stack_pointer_delta);
2693 args_size->constant = upper_bound (args_size->constant,
2694 reg_parm_stack_space);
2696 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2697 args_size->constant -= reg_parm_stack_space;
2699 return unadjusted_args_size;
2702 /* Precompute parameters as needed for a function call.
2704 FLAGS is mask of ECF_* constants.
2706 NUM_ACTUALS is the number of arguments.
2708 ARGS is an array containing information for each argument; this
2709 routine fills in the INITIAL_VALUE and VALUE fields for each
2710 precomputed argument. */
2712 static void
2713 precompute_arguments (int num_actuals, struct arg_data *args)
2715 int i;
2717 /* If this is a libcall, then precompute all arguments so that we do not
2718 get extraneous instructions emitted as part of the libcall sequence. */
2720 /* If we preallocated the stack space, and some arguments must be passed
2721 on the stack, then we must precompute any parameter which contains a
2722 function call which will store arguments on the stack.
2723 Otherwise, evaluating the parameter may clobber previous parameters
2724 which have already been stored into the stack. (we have code to avoid
2725 such case by saving the outgoing stack arguments, but it results in
2726 worse code) */
2727 if (!ACCUMULATE_OUTGOING_ARGS)
2728 return;
2730 for (i = 0; i < num_actuals; i++)
2732 tree type;
2733 machine_mode mode;
2735 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
2736 continue;
2738 /* If this is an addressable type, we cannot pre-evaluate it. */
2739 type = TREE_TYPE (args[i].tree_value);
2740 gcc_assert (!TREE_ADDRESSABLE (type));
2742 args[i].initial_value = args[i].value
2743 = expand_normal (args[i].tree_value);
2745 mode = TYPE_MODE (type);
2746 if (mode != args[i].mode)
2748 int unsignedp = args[i].unsignedp;
2749 args[i].value
2750 = convert_modes (args[i].mode, mode,
2751 args[i].value, args[i].unsignedp);
2753 /* CSE will replace this only if it contains args[i].value
2754 pseudo, so convert it down to the declared mode using
2755 a SUBREG. */
2756 if (REG_P (args[i].value)
2757 && GET_MODE_CLASS (args[i].mode) == MODE_INT
2758 && promote_mode (type, mode, &unsignedp) != args[i].mode)
2760 args[i].initial_value
2761 = gen_lowpart_SUBREG (mode, args[i].value);
2762 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
2763 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
2769 /* Given the current state of MUST_PREALLOCATE and information about
2770 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
2771 compute and return the final value for MUST_PREALLOCATE. */
2773 static int
2774 finalize_must_preallocate (int must_preallocate, int num_actuals,
2775 struct arg_data *args, struct args_size *args_size)
2777 /* See if we have or want to preallocate stack space.
2779 If we would have to push a partially-in-regs parm
2780 before other stack parms, preallocate stack space instead.
2782 If the size of some parm is not a multiple of the required stack
2783 alignment, we must preallocate.
2785 If the total size of arguments that would otherwise create a copy in
2786 a temporary (such as a CALL) is more than half the total argument list
2787 size, preallocation is faster.
2789 Another reason to preallocate is if we have a machine (like the m88k)
2790 where stack alignment is required to be maintained between every
2791 pair of insns, not just when the call is made. However, we assume here
2792 that such machines either do not have push insns (and hence preallocation
2793 would occur anyway) or the problem is taken care of with
2794 PUSH_ROUNDING. */
2796 if (! must_preallocate)
2798 int partial_seen = 0;
2799 poly_int64 copy_to_evaluate_size = 0;
2800 int i;
2802 for (i = 0; i < num_actuals && ! must_preallocate; i++)
2804 if (args[i].partial > 0 && ! args[i].pass_on_stack)
2805 partial_seen = 1;
2806 else if (partial_seen && args[i].reg == 0)
2807 must_preallocate = 1;
2809 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
2810 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
2811 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
2812 || TREE_CODE (args[i].tree_value) == COND_EXPR
2813 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
2814 copy_to_evaluate_size
2815 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2818 if (maybe_ne (args_size->constant, 0)
2819 && maybe_ge (copy_to_evaluate_size * 2, args_size->constant))
2820 must_preallocate = 1;
2822 return must_preallocate;
2825 /* If we preallocated stack space, compute the address of each argument
2826 and store it into the ARGS array.
2828 We need not ensure it is a valid memory address here; it will be
2829 validized when it is used.
2831 ARGBLOCK is an rtx for the address of the outgoing arguments. */
2833 static void
2834 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
2836 if (argblock)
2838 rtx arg_reg = argblock;
2839 int i;
2840 poly_int64 arg_offset = 0;
2842 if (GET_CODE (argblock) == PLUS)
2844 arg_reg = XEXP (argblock, 0);
2845 arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
2848 for (i = 0; i < num_actuals; i++)
2850 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
2851 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
2852 rtx addr;
2853 unsigned int align, boundary;
2854 poly_uint64 units_on_stack = 0;
2855 machine_mode partial_mode = VOIDmode;
2857 /* Skip this parm if it will not be passed on the stack. */
2858 if (! args[i].pass_on_stack
2859 && args[i].reg != 0
2860 && args[i].partial == 0)
2861 continue;
2863 if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
2864 continue;
2866 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
2867 addr = plus_constant (Pmode, addr, arg_offset);
2869 if (args[i].partial != 0)
2871 /* Only part of the parameter is being passed on the stack.
2872 Generate a simple memory reference of the correct size. */
2873 units_on_stack = args[i].locate.size.constant;
2874 poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT;
2875 partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk ();
2876 args[i].stack = gen_rtx_MEM (partial_mode, addr);
2877 set_mem_size (args[i].stack, units_on_stack);
2879 else
2881 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
2882 set_mem_attributes (args[i].stack,
2883 TREE_TYPE (args[i].tree_value), 1);
2885 align = BITS_PER_UNIT;
2886 boundary = args[i].locate.boundary;
2887 poly_int64 offset_val;
2888 if (args[i].locate.where_pad != PAD_DOWNWARD)
2889 align = boundary;
2890 else if (poly_int_rtx_p (offset, &offset_val))
2892 align = least_bit_hwi (boundary);
2893 unsigned int offset_align
2894 = known_alignment (offset_val) * BITS_PER_UNIT;
2895 if (offset_align != 0)
2896 align = MIN (align, offset_align);
2898 set_mem_align (args[i].stack, align);
2900 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset);
2901 addr = plus_constant (Pmode, addr, arg_offset);
2903 if (args[i].partial != 0)
2905 /* Only part of the parameter is being passed on the stack.
2906 Generate a simple memory reference of the correct size.
2908 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
2909 set_mem_size (args[i].stack_slot, units_on_stack);
2911 else
2913 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
2914 set_mem_attributes (args[i].stack_slot,
2915 TREE_TYPE (args[i].tree_value), 1);
2917 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
2919 /* Function incoming arguments may overlap with sibling call
2920 outgoing arguments and we cannot allow reordering of reads
2921 from function arguments with stores to outgoing arguments
2922 of sibling calls. */
2923 set_mem_alias_set (args[i].stack, 0);
2924 set_mem_alias_set (args[i].stack_slot, 0);
2929 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
2930 in a call instruction.
2932 FNDECL is the tree node for the target function. For an indirect call
2933 FNDECL will be NULL_TREE.
2935 ADDR is the operand 0 of CALL_EXPR for this call. */
2937 static rtx
2938 rtx_for_function_call (tree fndecl, tree addr)
2940 rtx funexp;
2942 /* Get the function to call, in the form of RTL. */
2943 if (fndecl)
2945 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
2946 TREE_USED (fndecl) = 1;
2948 /* Get a SYMBOL_REF rtx for the function address. */
2949 funexp = XEXP (DECL_RTL (fndecl), 0);
2951 else
2952 /* Generate an rtx (probably a pseudo-register) for the address. */
2954 push_temp_slots ();
2955 funexp = expand_normal (addr);
2956 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
2958 return funexp;
2961 /* Return the static chain for this function, if any. */
2964 rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
2966 if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
2967 return NULL;
2969 return targetm.calls.static_chain (fndecl_or_type, incoming_p);
2972 /* Internal state for internal_arg_pointer_based_exp and its helpers. */
2973 static struct
2975 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
2976 or NULL_RTX if none has been scanned yet. */
2977 rtx_insn *scan_start;
2978 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
2979 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
2980 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
2981 with fixed offset, or PC if this is with variable or unknown offset. */
2982 vec<rtx> cache;
2983 } internal_arg_pointer_exp_state;
2985 static rtx internal_arg_pointer_based_exp (const_rtx, bool);
2987 /* Helper function for internal_arg_pointer_based_exp. Scan insns in
2988 the tail call sequence, starting with first insn that hasn't been
2989 scanned yet, and note for each pseudo on the LHS whether it is based
2990 on crtl->args.internal_arg_pointer or not, and what offset from that
2991 that pointer it has. */
2993 static void
2994 internal_arg_pointer_based_exp_scan (void)
2996 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
2998 if (scan_start == NULL_RTX)
2999 insn = get_insns ();
3000 else
3001 insn = NEXT_INSN (scan_start);
3003 while (insn)
3005 rtx set = single_set (insn);
3006 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
3008 rtx val = NULL_RTX;
3009 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
3010 /* Punt on pseudos set multiple times. */
3011 if (idx < internal_arg_pointer_exp_state.cache.length ()
3012 && (internal_arg_pointer_exp_state.cache[idx]
3013 != NULL_RTX))
3014 val = pc_rtx;
3015 else
3016 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
3017 if (val != NULL_RTX)
3019 if (idx >= internal_arg_pointer_exp_state.cache.length ())
3020 internal_arg_pointer_exp_state.cache
3021 .safe_grow_cleared (idx + 1, true);
3022 internal_arg_pointer_exp_state.cache[idx] = val;
3025 if (NEXT_INSN (insn) == NULL_RTX)
3026 scan_start = insn;
3027 insn = NEXT_INSN (insn);
3030 internal_arg_pointer_exp_state.scan_start = scan_start;
3033 /* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
3034 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
3035 it with fixed offset, or PC if this is with variable or unknown offset.
3036 TOPLEVEL is true if the function is invoked at the topmost level. */
3038 static rtx
3039 internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
3041 if (CONSTANT_P (rtl))
3042 return NULL_RTX;
3044 if (rtl == crtl->args.internal_arg_pointer)
3045 return const0_rtx;
3047 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
3048 return NULL_RTX;
3050 poly_int64 offset;
3051 if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), &offset))
3053 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
3054 if (val == NULL_RTX || val == pc_rtx)
3055 return val;
3056 return plus_constant (Pmode, val, offset);
3059 /* When called at the topmost level, scan pseudo assignments in between the
3060 last scanned instruction in the tail call sequence and the latest insn
3061 in that sequence. */
3062 if (toplevel)
3063 internal_arg_pointer_based_exp_scan ();
3065 if (REG_P (rtl))
3067 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
3068 if (idx < internal_arg_pointer_exp_state.cache.length ())
3069 return internal_arg_pointer_exp_state.cache[idx];
3071 return NULL_RTX;
3074 subrtx_iterator::array_type array;
3075 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
3077 const_rtx x = *iter;
3078 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
3079 return pc_rtx;
3080 if (MEM_P (x))
3081 iter.skip_subrtxes ();
3084 return NULL_RTX;
3087 /* Return true if SIZE bytes starting from address ADDR might overlap an
3088 already-clobbered argument area. This function is used to determine
3089 if we should give up a sibcall. */
3091 static bool
3092 mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size)
3094 poly_int64 i;
3095 unsigned HOST_WIDE_INT start, end;
3096 rtx val;
3098 if (bitmap_empty_p (stored_args_map)
3099 && stored_args_watermark == HOST_WIDE_INT_M1U)
3100 return false;
3101 val = internal_arg_pointer_based_exp (addr, true);
3102 if (val == NULL_RTX)
3103 return false;
3104 else if (!poly_int_rtx_p (val, &i))
3105 return true;
3107 if (known_eq (size, 0U))
3108 return false;
3110 if (STACK_GROWS_DOWNWARD)
3111 i -= crtl->args.pretend_args_size;
3112 else
3113 i += crtl->args.pretend_args_size;
3115 if (ARGS_GROW_DOWNWARD)
3116 i = -i - size;
3118 /* We can ignore any references to the function's pretend args,
3119 which at this point would manifest as negative values of I. */
3120 if (known_le (i, 0) && known_le (size, poly_uint64 (-i)))
3121 return false;
3123 start = maybe_lt (i, 0) ? 0 : constant_lower_bound (i);
3124 if (!(i + size).is_constant (&end))
3125 end = HOST_WIDE_INT_M1U;
3127 if (end > stored_args_watermark)
3128 return true;
3130 end = MIN (end, SBITMAP_SIZE (stored_args_map));
3131 for (unsigned HOST_WIDE_INT k = start; k < end; ++k)
3132 if (bitmap_bit_p (stored_args_map, k))
3133 return true;
3135 return false;
3138 /* Do the register loads required for any wholly-register parms or any
3139 parms which are passed both on the stack and in a register. Their
3140 expressions were already evaluated.
3142 Mark all register-parms as living through the call, putting these USE
3143 insns in the CALL_INSN_FUNCTION_USAGE field.
3145 When IS_SIBCALL, perform the check_sibcall_argument_overlap
3146 checking, setting *SIBCALL_FAILURE if appropriate. */
3148 static void
3149 load_register_parameters (struct arg_data *args, int num_actuals,
3150 rtx *call_fusage, int flags, int is_sibcall,
3151 int *sibcall_failure)
3153 int i, j;
3155 for (i = 0; i < num_actuals; i++)
3157 rtx reg = ((flags & ECF_SIBCALL)
3158 ? args[i].tail_call_reg : args[i].reg);
3159 if (reg)
3161 int partial = args[i].partial;
3162 int nregs;
3163 poly_int64 size = 0;
3164 HOST_WIDE_INT const_size = 0;
3165 rtx_insn *before_arg = get_last_insn ();
3166 tree type = TREE_TYPE (args[i].tree_value);
3167 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
3168 type = TREE_TYPE (first_field (type));
3169 /* Set non-negative if we must move a word at a time, even if
3170 just one word (e.g, partial == 4 && mode == DFmode). Set
3171 to -1 if we just use a normal move insn. This value can be
3172 zero if the argument is a zero size structure. */
3173 nregs = -1;
3174 if (GET_CODE (reg) == PARALLEL)
3176 else if (partial)
3178 gcc_assert (partial % UNITS_PER_WORD == 0);
3179 nregs = partial / UNITS_PER_WORD;
3181 else if (TYPE_MODE (type) == BLKmode)
3183 /* Variable-sized parameters should be described by a
3184 PARALLEL instead. */
3185 const_size = int_size_in_bytes (type);
3186 gcc_assert (const_size >= 0);
3187 nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3188 size = const_size;
3190 else
3191 size = GET_MODE_SIZE (args[i].mode);
3193 /* Handle calls that pass values in multiple non-contiguous
3194 locations. The Irix 6 ABI has examples of this. */
3196 if (GET_CODE (reg) == PARALLEL)
3197 emit_group_move (reg, args[i].parallel_value);
3199 /* If simple case, just do move. If normal partial, store_one_arg
3200 has already loaded the register for us. In all other cases,
3201 load the register(s) from memory. */
3203 else if (nregs == -1)
3205 emit_move_insn (reg, args[i].value);
3206 #ifdef BLOCK_REG_PADDING
3207 /* Handle case where we have a value that needs shifting
3208 up to the msb. eg. a QImode value and we're padding
3209 upward on a BYTES_BIG_ENDIAN machine. */
3210 if (args[i].locate.where_pad
3211 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
3213 gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
3214 if (maybe_lt (size, UNITS_PER_WORD))
3216 rtx x;
3217 poly_int64 shift
3218 = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3220 /* Assigning REG here rather than a temp makes
3221 CALL_FUSAGE report the whole reg as used.
3222 Strictly speaking, the call only uses SIZE
3223 bytes at the msb end, but it doesn't seem worth
3224 generating rtl to say that. */
3225 reg = gen_rtx_REG (word_mode, REGNO (reg));
3226 x = expand_shift (LSHIFT_EXPR, word_mode,
3227 reg, shift, reg, 1);
3228 if (x != reg)
3229 emit_move_insn (reg, x);
3232 #endif
3235 /* If we have pre-computed the values to put in the registers in
3236 the case of non-aligned structures, copy them in now. */
3238 else if (args[i].n_aligned_regs != 0)
3239 for (j = 0; j < args[i].n_aligned_regs; j++)
3240 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
3241 args[i].aligned_regs[j]);
3243 else if (partial == 0 || args[i].pass_on_stack)
3245 /* SIZE and CONST_SIZE are 0 for partial arguments and
3246 the size of a BLKmode type otherwise. */
3247 gcc_checking_assert (known_eq (size, const_size));
3248 rtx mem = validize_mem (copy_rtx (args[i].value));
3250 /* Check for overlap with already clobbered argument area,
3251 providing that this has non-zero size. */
3252 if (is_sibcall
3253 && const_size != 0
3254 && (mem_might_overlap_already_clobbered_arg_p
3255 (XEXP (args[i].value, 0), const_size)))
3256 *sibcall_failure = 1;
3258 if (const_size % UNITS_PER_WORD == 0
3259 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
3260 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
3261 else
3263 if (nregs > 1)
3264 move_block_to_reg (REGNO (reg), mem, nregs - 1,
3265 args[i].mode);
3266 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
3267 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
3268 unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
3269 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
3270 word_mode, word_mode, false,
3271 NULL);
3272 if (BYTES_BIG_ENDIAN)
3273 x = expand_shift (LSHIFT_EXPR, word_mode, x,
3274 BITS_PER_WORD - bitsize, dest, 1);
3275 if (x != dest)
3276 emit_move_insn (dest, x);
3279 /* Handle a BLKmode that needs shifting. */
3280 if (nregs == 1 && const_size < UNITS_PER_WORD
3281 #ifdef BLOCK_REG_PADDING
3282 && args[i].locate.where_pad == PAD_DOWNWARD
3283 #else
3284 && BYTES_BIG_ENDIAN
3285 #endif
3288 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
3289 int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
3290 enum tree_code dir = (BYTES_BIG_ENDIAN
3291 ? RSHIFT_EXPR : LSHIFT_EXPR);
3292 rtx x;
3294 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
3295 if (x != dest)
3296 emit_move_insn (dest, x);
3300 /* When a parameter is a block, and perhaps in other cases, it is
3301 possible that it did a load from an argument slot that was
3302 already clobbered. */
3303 if (is_sibcall
3304 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
3305 *sibcall_failure = 1;
3307 /* Handle calls that pass values in multiple non-contiguous
3308 locations. The Irix 6 ABI has examples of this. */
3309 if (GET_CODE (reg) == PARALLEL)
3310 use_group_regs (call_fusage, reg);
3311 else if (nregs == -1)
3312 use_reg_mode (call_fusage, reg, TYPE_MODE (type));
3313 else if (nregs > 0)
3314 use_regs (call_fusage, REGNO (reg), nregs);
3319 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
3320 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
3321 bytes, then we would need to push some additional bytes to pad the
3322 arguments. So, we try to compute an adjust to the stack pointer for an
3323 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
3324 bytes. Then, when the arguments are pushed the stack will be perfectly
3325 aligned.
3327 Return true if this optimization is possible, storing the adjustment
3328 in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
3329 bytes that should be popped after the call. */
3331 static bool
3332 combine_pending_stack_adjustment_and_call (poly_int64_pod *adjustment_out,
3333 poly_int64 unadjusted_args_size,
3334 struct args_size *args_size,
3335 unsigned int preferred_unit_stack_boundary)
3337 /* The number of bytes to pop so that the stack will be
3338 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
3339 poly_int64 adjustment;
3340 /* The alignment of the stack after the arguments are pushed, if we
3341 just pushed the arguments without adjust the stack here. */
3342 unsigned HOST_WIDE_INT unadjusted_alignment;
3344 if (!known_misalignment (stack_pointer_delta + unadjusted_args_size,
3345 preferred_unit_stack_boundary,
3346 &unadjusted_alignment))
3347 return false;
3349 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
3350 as possible -- leaving just enough left to cancel out the
3351 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
3352 PENDING_STACK_ADJUST is non-negative, and congruent to
3353 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
3355 /* Begin by trying to pop all the bytes. */
3356 unsigned HOST_WIDE_INT tmp_misalignment;
3357 if (!known_misalignment (pending_stack_adjust,
3358 preferred_unit_stack_boundary,
3359 &tmp_misalignment))
3360 return false;
3361 unadjusted_alignment -= tmp_misalignment;
3362 adjustment = pending_stack_adjust;
3363 /* Push enough additional bytes that the stack will be aligned
3364 after the arguments are pushed. */
3365 if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
3366 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
3368 /* We need to know whether the adjusted argument size
3369 (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
3370 or a deallocation. */
3371 if (!ordered_p (adjustment, unadjusted_args_size))
3372 return false;
3374 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
3375 bytes after the call. The right number is the entire
3376 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
3377 by the arguments in the first place. */
3378 args_size->constant
3379 = pending_stack_adjust - adjustment + unadjusted_args_size;
3381 *adjustment_out = adjustment;
3382 return true;
3385 /* Scan X expression if it does not dereference any argument slots
3386 we already clobbered by tail call arguments (as noted in stored_args_map
3387 bitmap).
3388 Return nonzero if X expression dereferences such argument slots,
3389 zero otherwise. */
3391 static int
3392 check_sibcall_argument_overlap_1 (rtx x)
3394 RTX_CODE code;
3395 int i, j;
3396 const char *fmt;
3398 if (x == NULL_RTX)
3399 return 0;
3401 code = GET_CODE (x);
3403 /* We need not check the operands of the CALL expression itself. */
3404 if (code == CALL)
3405 return 0;
3407 if (code == MEM)
3408 return (mem_might_overlap_already_clobbered_arg_p
3409 (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x))));
3411 /* Scan all subexpressions. */
3412 fmt = GET_RTX_FORMAT (code);
3413 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3415 if (*fmt == 'e')
3417 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
3418 return 1;
3420 else if (*fmt == 'E')
3422 for (j = 0; j < XVECLEN (x, i); j++)
3423 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
3424 return 1;
3427 return 0;
3430 /* Scan sequence after INSN if it does not dereference any argument slots
3431 we already clobbered by tail call arguments (as noted in stored_args_map
3432 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
3433 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
3434 should be 0). Return nonzero if sequence after INSN dereferences such argument
3435 slots, zero otherwise. */
3437 static int
3438 check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
3439 int mark_stored_args_map)
3441 poly_uint64 low, high;
3442 unsigned HOST_WIDE_INT const_low, const_high;
3444 if (insn == NULL_RTX)
3445 insn = get_insns ();
3446 else
3447 insn = NEXT_INSN (insn);
3449 for (; insn; insn = NEXT_INSN (insn))
3450 if (INSN_P (insn)
3451 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
3452 break;
3454 if (mark_stored_args_map)
3456 if (ARGS_GROW_DOWNWARD)
3457 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
3458 else
3459 low = arg->locate.slot_offset.constant;
3460 high = low + arg->locate.size.constant;
3462 const_low = constant_lower_bound (low);
3463 if (high.is_constant (&const_high))
3464 for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i)
3465 bitmap_set_bit (stored_args_map, i);
3466 else
3467 stored_args_watermark = MIN (stored_args_watermark, const_low);
3469 return insn != NULL_RTX;
3472 /* Given that a function returns a value of mode MODE at the most
3473 significant end of hard register VALUE, shift VALUE left or right
3474 as specified by LEFT_P. Return true if some action was needed. */
3476 bool
3477 shift_return_value (machine_mode mode, bool left_p, rtx value)
3479 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
3480 machine_mode value_mode = GET_MODE (value);
3481 poly_int64 shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
3483 if (known_eq (shift, 0))
3484 return false;
3486 /* Use ashr rather than lshr for right shifts. This is for the benefit
3487 of the MIPS port, which requires SImode values to be sign-extended
3488 when stored in 64-bit registers. */
3489 if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
3490 value, gen_int_shift_amount (value_mode, shift),
3491 value, 1, OPTAB_WIDEN))
3492 gcc_unreachable ();
3493 return true;
3496 /* If X is a likely-spilled register value, copy it to a pseudo
3497 register and return that register. Return X otherwise. */
3499 static rtx
3500 avoid_likely_spilled_reg (rtx x)
3502 rtx new_rtx;
3504 if (REG_P (x)
3505 && HARD_REGISTER_P (x)
3506 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
3508 /* Make sure that we generate a REG rather than a CONCAT.
3509 Moves into CONCATs can need nontrivial instructions,
3510 and the whole point of this function is to avoid
3511 using the hard register directly in such a situation. */
3512 generating_concat_p = 0;
3513 new_rtx = gen_reg_rtx (GET_MODE (x));
3514 generating_concat_p = 1;
3515 emit_move_insn (new_rtx, x);
3516 return new_rtx;
3518 return x;
3521 /* Helper function for expand_call.
3522 Return false is EXP is not implementable as a sibling call. */
3524 static bool
3525 can_implement_as_sibling_call_p (tree exp,
3526 rtx structure_value_addr,
3527 tree funtype,
3528 tree fndecl,
3529 int flags,
3530 tree addr,
3531 const args_size &args_size)
3533 if (!targetm.have_sibcall_epilogue ())
3535 maybe_complain_about_tail_call
3536 (exp,
3537 "machine description does not have"
3538 " a sibcall_epilogue instruction pattern");
3539 return false;
3542 /* Doing sibling call optimization needs some work, since
3543 structure_value_addr can be allocated on the stack.
3544 It does not seem worth the effort since few optimizable
3545 sibling calls will return a structure. */
3546 if (structure_value_addr != NULL_RTX)
3548 maybe_complain_about_tail_call (exp, "callee returns a structure");
3549 return false;
3552 /* Check whether the target is able to optimize the call
3553 into a sibcall. */
3554 if (!targetm.function_ok_for_sibcall (fndecl, exp))
3556 maybe_complain_about_tail_call (exp,
3557 "target is not able to optimize the"
3558 " call into a sibling call");
3559 return false;
3562 /* Functions that do not return exactly once may not be sibcall
3563 optimized. */
3564 if (flags & ECF_RETURNS_TWICE)
3566 maybe_complain_about_tail_call (exp, "callee returns twice");
3567 return false;
3569 if (flags & ECF_NORETURN)
3571 maybe_complain_about_tail_call (exp, "callee does not return");
3572 return false;
3575 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
3577 maybe_complain_about_tail_call (exp, "volatile function type");
3578 return false;
3581 /* If the called function is nested in the current one, it might access
3582 some of the caller's arguments, but could clobber them beforehand if
3583 the argument areas are shared. */
3584 if (fndecl && decl_function_context (fndecl) == current_function_decl)
3586 maybe_complain_about_tail_call (exp, "nested function");
3587 return false;
3590 /* If this function requires more stack slots than the current
3591 function, we cannot change it into a sibling call.
3592 crtl->args.pretend_args_size is not part of the
3593 stack allocated by our caller. */
3594 if (maybe_gt (args_size.constant,
3595 crtl->args.size - crtl->args.pretend_args_size))
3597 maybe_complain_about_tail_call (exp,
3598 "callee required more stack slots"
3599 " than the caller");
3600 return false;
3603 /* If the callee pops its own arguments, then it must pop exactly
3604 the same number of arguments as the current function. */
3605 if (maybe_ne (targetm.calls.return_pops_args (fndecl, funtype,
3606 args_size.constant),
3607 targetm.calls.return_pops_args (current_function_decl,
3608 TREE_TYPE
3609 (current_function_decl),
3610 crtl->args.size)))
3612 maybe_complain_about_tail_call (exp,
3613 "inconsistent number of"
3614 " popped arguments");
3615 return false;
3618 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
3620 maybe_complain_about_tail_call (exp, "frontend does not support"
3621 " sibling call");
3622 return false;
3625 /* All checks passed. */
3626 return true;
3629 /* Update stack alignment when the parameter is passed in the stack
3630 since the outgoing parameter requires extra alignment on the calling
3631 function side. */
3633 static void
3634 update_stack_alignment_for_call (struct locate_and_pad_arg_data *locate)
3636 if (crtl->stack_alignment_needed < locate->boundary)
3637 crtl->stack_alignment_needed = locate->boundary;
3638 if (crtl->preferred_stack_boundary < locate->boundary)
3639 crtl->preferred_stack_boundary = locate->boundary;
3642 /* Generate all the code for a CALL_EXPR exp
3643 and return an rtx for its value.
3644 Store the value in TARGET (specified as an rtx) if convenient.
3645 If the value is stored in TARGET then TARGET is returned.
3646 If IGNORE is nonzero, then we ignore the value of the function call. */
3649 expand_call (tree exp, rtx target, int ignore)
3651 /* Nonzero if we are currently expanding a call. */
3652 static int currently_expanding_call = 0;
3654 /* RTX for the function to be called. */
3655 rtx funexp;
3656 /* Sequence of insns to perform a normal "call". */
3657 rtx_insn *normal_call_insns = NULL;
3658 /* Sequence of insns to perform a tail "call". */
3659 rtx_insn *tail_call_insns = NULL;
3660 /* Data type of the function. */
3661 tree funtype;
3662 tree type_arg_types;
3663 tree rettype;
3664 /* Declaration of the function being called,
3665 or 0 if the function is computed (not known by name). */
3666 tree fndecl = 0;
3667 /* The type of the function being called. */
3668 tree fntype;
3669 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
3670 bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
3671 int pass;
3673 /* Register in which non-BLKmode value will be returned,
3674 or 0 if no value or if value is BLKmode. */
3675 rtx valreg;
3676 /* Address where we should return a BLKmode value;
3677 0 if value not BLKmode. */
3678 rtx structure_value_addr = 0;
3679 /* Nonzero if that address is being passed by treating it as
3680 an extra, implicit first parameter. Otherwise,
3681 it is passed by being copied directly into struct_value_rtx. */
3682 int structure_value_addr_parm = 0;
3683 /* Holds the value of implicit argument for the struct value. */
3684 tree structure_value_addr_value = NULL_TREE;
3685 /* Size of aggregate value wanted, or zero if none wanted
3686 or if we are using the non-reentrant PCC calling convention
3687 or expecting the value in registers. */
3688 poly_int64 struct_value_size = 0;
3689 /* Nonzero if called function returns an aggregate in memory PCC style,
3690 by returning the address of where to find it. */
3691 int pcc_struct_value = 0;
3692 rtx struct_value = 0;
3694 /* Number of actual parameters in this call, including struct value addr. */
3695 int num_actuals;
3696 /* Number of named args. Args after this are anonymous ones
3697 and they must all go on the stack. */
3698 int n_named_args;
3699 /* Number of complex actual arguments that need to be split. */
3700 int num_complex_actuals = 0;
3702 /* Vector of information about each argument.
3703 Arguments are numbered in the order they will be pushed,
3704 not the order they are written. */
3705 struct arg_data *args;
3707 /* Total size in bytes of all the stack-parms scanned so far. */
3708 struct args_size args_size;
3709 struct args_size adjusted_args_size;
3710 /* Size of arguments before any adjustments (such as rounding). */
3711 poly_int64 unadjusted_args_size;
3712 /* Data on reg parms scanned so far. */
3713 CUMULATIVE_ARGS args_so_far_v;
3714 cumulative_args_t args_so_far;
3715 /* Nonzero if a reg parm has been scanned. */
3716 int reg_parm_seen;
3717 /* Nonzero if this is an indirect function call. */
3719 /* Nonzero if we must avoid push-insns in the args for this call.
3720 If stack space is allocated for register parameters, but not by the
3721 caller, then it is preallocated in the fixed part of the stack frame.
3722 So the entire argument block must then be preallocated (i.e., we
3723 ignore PUSH_ROUNDING in that case). */
3725 int must_preallocate = !PUSH_ARGS;
3727 /* Size of the stack reserved for parameter registers. */
3728 int reg_parm_stack_space = 0;
3730 /* Address of space preallocated for stack parms
3731 (on machines that lack push insns), or 0 if space not preallocated. */
3732 rtx argblock = 0;
3734 /* Mask of ECF_ and ERF_ flags. */
3735 int flags = 0;
3736 int return_flags = 0;
3737 #ifdef REG_PARM_STACK_SPACE
3738 /* Define the boundary of the register parm stack space that needs to be
3739 saved, if any. */
3740 int low_to_save, high_to_save;
3741 rtx save_area = 0; /* Place that it is saved */
3742 #endif
3744 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3745 char *initial_stack_usage_map = stack_usage_map;
3746 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
3747 char *stack_usage_map_buf = NULL;
3749 poly_int64 old_stack_allocated;
3751 /* State variables to track stack modifications. */
3752 rtx old_stack_level = 0;
3753 int old_stack_arg_under_construction = 0;
3754 poly_int64 old_pending_adj = 0;
3755 int old_inhibit_defer_pop = inhibit_defer_pop;
3757 /* Some stack pointer alterations we make are performed via
3758 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
3759 which we then also need to save/restore along the way. */
3760 poly_int64 old_stack_pointer_delta = 0;
3762 rtx call_fusage;
3763 tree addr = CALL_EXPR_FN (exp);
3764 int i;
3765 /* The alignment of the stack, in bits. */
3766 unsigned HOST_WIDE_INT preferred_stack_boundary;
3767 /* The alignment of the stack, in bytes. */
3768 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
3769 /* The static chain value to use for this call. */
3770 rtx static_chain_value;
3771 /* See if this is "nothrow" function call. */
3772 if (TREE_NOTHROW (exp))
3773 flags |= ECF_NOTHROW;
3775 /* See if we can find a DECL-node for the actual function, and get the
3776 function attributes (flags) from the function decl or type node. */
3777 fndecl = get_callee_fndecl (exp);
3778 if (fndecl)
3780 fntype = TREE_TYPE (fndecl);
3781 flags |= flags_from_decl_or_type (fndecl);
3782 return_flags |= decl_return_flags (fndecl);
3784 else
3786 fntype = TREE_TYPE (TREE_TYPE (addr));
3787 flags |= flags_from_decl_or_type (fntype);
3788 if (CALL_EXPR_BY_DESCRIPTOR (exp))
3789 flags |= ECF_BY_DESCRIPTOR;
3791 rettype = TREE_TYPE (exp);
3793 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
3795 /* Warn if this value is an aggregate type,
3796 regardless of which calling convention we are using for it. */
3797 if (AGGREGATE_TYPE_P (rettype))
3798 warning (OPT_Waggregate_return, "function call has aggregate value");
3800 /* If the result of a non looping pure or const function call is
3801 ignored (or void), and none of its arguments are volatile, we can
3802 avoid expanding the call and just evaluate the arguments for
3803 side-effects. */
3804 if ((flags & (ECF_CONST | ECF_PURE))
3805 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
3806 && (ignore || target == const0_rtx
3807 || TYPE_MODE (rettype) == VOIDmode))
3809 bool volatilep = false;
3810 tree arg;
3811 call_expr_arg_iterator iter;
3813 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3814 if (TREE_THIS_VOLATILE (arg))
3816 volatilep = true;
3817 break;
3820 if (! volatilep)
3822 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3823 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
3824 return const0_rtx;
3828 #ifdef REG_PARM_STACK_SPACE
3829 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
3830 #endif
3832 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
3833 && reg_parm_stack_space > 0 && PUSH_ARGS)
3834 must_preallocate = 1;
3836 /* Set up a place to return a structure. */
3838 /* Cater to broken compilers. */
3839 if (aggregate_value_p (exp, fntype))
3841 /* This call returns a big structure. */
3842 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3844 #ifdef PCC_STATIC_STRUCT_RETURN
3846 pcc_struct_value = 1;
3848 #else /* not PCC_STATIC_STRUCT_RETURN */
3850 if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), &struct_value_size))
3851 struct_value_size = -1;
3853 /* Even if it is semantically safe to use the target as the return
3854 slot, it may be not sufficiently aligned for the return type. */
3855 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
3856 && target
3857 && MEM_P (target)
3858 /* If rettype is addressable, we may not create a temporary.
3859 If target is properly aligned at runtime and the compiler
3860 just doesn't know about it, it will work fine, otherwise it
3861 will be UB. */
3862 && (TREE_ADDRESSABLE (rettype)
3863 || !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
3864 && targetm.slow_unaligned_access (TYPE_MODE (rettype),
3865 MEM_ALIGN (target)))))
3866 structure_value_addr = XEXP (target, 0);
3867 else
3869 /* For variable-sized objects, we must be called with a target
3870 specified. If we were to allocate space on the stack here,
3871 we would have no way of knowing when to free it. */
3872 rtx d = assign_temp (rettype, 1, 1);
3873 structure_value_addr = XEXP (d, 0);
3874 target = 0;
3877 #endif /* not PCC_STATIC_STRUCT_RETURN */
3880 /* Figure out the amount to which the stack should be aligned. */
3881 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3882 if (fndecl)
3884 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
3885 /* Without automatic stack alignment, we can't increase preferred
3886 stack boundary. With automatic stack alignment, it is
3887 unnecessary since unless we can guarantee that all callers will
3888 align the outgoing stack properly, callee has to align its
3889 stack anyway. */
3890 if (i
3891 && i->preferred_incoming_stack_boundary
3892 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
3893 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
3896 /* Operand 0 is a pointer-to-function; get the type of the function. */
3897 funtype = TREE_TYPE (addr);
3898 gcc_assert (POINTER_TYPE_P (funtype));
3899 funtype = TREE_TYPE (funtype);
3901 /* Count whether there are actual complex arguments that need to be split
3902 into their real and imaginary parts. Munge the type_arg_types
3903 appropriately here as well. */
3904 if (targetm.calls.split_complex_arg)
3906 call_expr_arg_iterator iter;
3907 tree arg;
3908 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3910 tree type = TREE_TYPE (arg);
3911 if (type && TREE_CODE (type) == COMPLEX_TYPE
3912 && targetm.calls.split_complex_arg (type))
3913 num_complex_actuals++;
3915 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
3917 else
3918 type_arg_types = TYPE_ARG_TYPES (funtype);
3920 if (flags & ECF_MAY_BE_ALLOCA)
3921 cfun->calls_alloca = 1;
3923 /* If struct_value_rtx is 0, it means pass the address
3924 as if it were an extra parameter. Put the argument expression
3925 in structure_value_addr_value. */
3926 if (structure_value_addr && struct_value == 0)
3928 /* If structure_value_addr is a REG other than
3929 virtual_outgoing_args_rtx, we can use always use it. If it
3930 is not a REG, we must always copy it into a register.
3931 If it is virtual_outgoing_args_rtx, we must copy it to another
3932 register in some cases. */
3933 rtx temp = (!REG_P (structure_value_addr)
3934 || (ACCUMULATE_OUTGOING_ARGS
3935 && stack_arg_under_construction
3936 && structure_value_addr == virtual_outgoing_args_rtx)
3937 ? copy_addr_to_reg (convert_memory_address
3938 (Pmode, structure_value_addr))
3939 : structure_value_addr);
3941 structure_value_addr_value =
3942 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
3943 structure_value_addr_parm = 1;
3946 /* Count the arguments and set NUM_ACTUALS. */
3947 num_actuals =
3948 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
3950 /* Compute number of named args.
3951 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
3953 if (type_arg_types != 0)
3954 n_named_args
3955 = (list_length (type_arg_types)
3956 /* Count the struct value address, if it is passed as a parm. */
3957 + structure_value_addr_parm);
3958 else
3959 /* If we know nothing, treat all args as named. */
3960 n_named_args = num_actuals;
3962 /* Start updating where the next arg would go.
3964 On some machines (such as the PA) indirect calls have a different
3965 calling convention than normal calls. The fourth argument in
3966 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
3967 or not. */
3968 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
3969 args_so_far = pack_cumulative_args (&args_so_far_v);
3971 /* Now possibly adjust the number of named args.
3972 Normally, don't include the last named arg if anonymous args follow.
3973 We do include the last named arg if
3974 targetm.calls.strict_argument_naming() returns nonzero.
3975 (If no anonymous args follow, the result of list_length is actually
3976 one too large. This is harmless.)
3978 If targetm.calls.pretend_outgoing_varargs_named() returns
3979 nonzero, and targetm.calls.strict_argument_naming() returns zero,
3980 this machine will be able to place unnamed args that were passed
3981 in registers into the stack. So treat all args as named. This
3982 allows the insns emitting for a specific argument list to be
3983 independent of the function declaration.
3985 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
3986 we do not have any reliable way to pass unnamed args in
3987 registers, so we must force them into memory. */
3989 if (type_arg_types != 0
3990 && targetm.calls.strict_argument_naming (args_so_far))
3992 else if (type_arg_types != 0
3993 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
3994 /* Don't include the last named arg. */
3995 --n_named_args;
3996 else
3997 /* Treat all args as named. */
3998 n_named_args = num_actuals;
4000 /* Make a vector to hold all the information about each arg. */
4001 args = XCNEWVEC (struct arg_data, num_actuals);
4003 /* Build up entries in the ARGS array, compute the size of the
4004 arguments into ARGS_SIZE, etc. */
4005 initialize_argument_information (num_actuals, args, &args_size,
4006 n_named_args, exp,
4007 structure_value_addr_value, fndecl, fntype,
4008 args_so_far, reg_parm_stack_space,
4009 &old_stack_level, &old_pending_adj,
4010 &must_preallocate, &flags,
4011 &try_tail_call, CALL_FROM_THUNK_P (exp));
4013 if (args_size.var)
4014 must_preallocate = 1;
4016 /* Now make final decision about preallocating stack space. */
4017 must_preallocate = finalize_must_preallocate (must_preallocate,
4018 num_actuals, args,
4019 &args_size);
4021 /* If the structure value address will reference the stack pointer, we
4022 must stabilize it. We don't need to do this if we know that we are
4023 not going to adjust the stack pointer in processing this call. */
4025 if (structure_value_addr
4026 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
4027 || reg_mentioned_p (virtual_outgoing_args_rtx,
4028 structure_value_addr))
4029 && (args_size.var
4030 || (!ACCUMULATE_OUTGOING_ARGS
4031 && maybe_ne (args_size.constant, 0))))
4032 structure_value_addr = copy_to_reg (structure_value_addr);
4034 /* Tail calls can make things harder to debug, and we've traditionally
4035 pushed these optimizations into -O2. Don't try if we're already
4036 expanding a call, as that means we're an argument. Don't try if
4037 there's cleanups, as we know there's code to follow the call. */
4038 if (currently_expanding_call++ != 0
4039 || (!flag_optimize_sibling_calls && !CALL_FROM_THUNK_P (exp))
4040 || args_size.var
4041 || dbg_cnt (tail_call) == false)
4042 try_tail_call = 0;
4044 /* Workaround buggy C/C++ wrappers around Fortran routines with
4045 character(len=constant) arguments if the hidden string length arguments
4046 are passed on the stack; if the callers forget to pass those arguments,
4047 attempting to tail call in such routines leads to stack corruption.
4048 Avoid tail calls in functions where at least one such hidden string
4049 length argument is passed (partially or fully) on the stack in the
4050 caller and the callee needs to pass any arguments on the stack.
4051 See PR90329. */
4052 if (try_tail_call && maybe_ne (args_size.constant, 0))
4053 for (tree arg = DECL_ARGUMENTS (current_function_decl);
4054 arg; arg = DECL_CHAIN (arg))
4055 if (DECL_HIDDEN_STRING_LENGTH (arg) && DECL_INCOMING_RTL (arg))
4057 subrtx_iterator::array_type array;
4058 FOR_EACH_SUBRTX (iter, array, DECL_INCOMING_RTL (arg), NONCONST)
4059 if (MEM_P (*iter))
4061 try_tail_call = 0;
4062 break;
4066 /* If the user has marked the function as requiring tail-call
4067 optimization, attempt it. */
4068 if (must_tail_call)
4069 try_tail_call = 1;
4071 /* Rest of purposes for tail call optimizations to fail. */
4072 if (try_tail_call)
4073 try_tail_call = can_implement_as_sibling_call_p (exp,
4074 structure_value_addr,
4075 funtype,
4076 fndecl,
4077 flags, addr, args_size);
4079 /* Check if caller and callee disagree in promotion of function
4080 return value. */
4081 if (try_tail_call)
4083 machine_mode caller_mode, caller_promoted_mode;
4084 machine_mode callee_mode, callee_promoted_mode;
4085 int caller_unsignedp, callee_unsignedp;
4086 tree caller_res = DECL_RESULT (current_function_decl);
4088 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
4089 caller_mode = DECL_MODE (caller_res);
4090 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
4091 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
4092 caller_promoted_mode
4093 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
4094 &caller_unsignedp,
4095 TREE_TYPE (current_function_decl), 1);
4096 callee_promoted_mode
4097 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
4098 &callee_unsignedp,
4099 funtype, 1);
4100 if (caller_mode != VOIDmode
4101 && (caller_promoted_mode != callee_promoted_mode
4102 || ((caller_mode != caller_promoted_mode
4103 || callee_mode != callee_promoted_mode)
4104 && (caller_unsignedp != callee_unsignedp
4105 || partial_subreg_p (caller_mode, callee_mode)))))
4107 try_tail_call = 0;
4108 maybe_complain_about_tail_call (exp,
4109 "caller and callee disagree in"
4110 " promotion of function"
4111 " return value");
4115 /* Ensure current function's preferred stack boundary is at least
4116 what we need. Stack alignment may also increase preferred stack
4117 boundary. */
4118 for (i = 0; i < num_actuals; i++)
4119 if (reg_parm_stack_space > 0
4120 || args[i].reg == 0
4121 || args[i].partial != 0
4122 || args[i].pass_on_stack)
4123 update_stack_alignment_for_call (&args[i].locate);
4124 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
4125 crtl->preferred_stack_boundary = preferred_stack_boundary;
4126 else
4127 preferred_stack_boundary = crtl->preferred_stack_boundary;
4129 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
4131 if (flag_callgraph_info)
4132 record_final_call (fndecl, EXPR_LOCATION (exp));
4134 /* We want to make two insn chains; one for a sibling call, the other
4135 for a normal call. We will select one of the two chains after
4136 initial RTL generation is complete. */
4137 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
4139 int sibcall_failure = 0;
4140 /* We want to emit any pending stack adjustments before the tail
4141 recursion "call". That way we know any adjustment after the tail
4142 recursion call can be ignored if we indeed use the tail
4143 call expansion. */
4144 saved_pending_stack_adjust save;
4145 rtx_insn *insns, *before_call, *after_args;
4146 rtx next_arg_reg;
4148 if (pass == 0)
4150 /* State variables we need to save and restore between
4151 iterations. */
4152 save_pending_stack_adjust (&save);
4154 if (pass)
4155 flags &= ~ECF_SIBCALL;
4156 else
4157 flags |= ECF_SIBCALL;
4159 /* Other state variables that we must reinitialize each time
4160 through the loop (that are not initialized by the loop itself). */
4161 argblock = 0;
4162 call_fusage = 0;
4164 /* Start a new sequence for the normal call case.
4166 From this point on, if the sibling call fails, we want to set
4167 sibcall_failure instead of continuing the loop. */
4168 start_sequence ();
4170 /* Don't let pending stack adjusts add up to too much.
4171 Also, do all pending adjustments now if there is any chance
4172 this might be a call to alloca or if we are expanding a sibling
4173 call sequence.
4174 Also do the adjustments before a throwing call, otherwise
4175 exception handling can fail; PR 19225. */
4176 if (maybe_ge (pending_stack_adjust, 32)
4177 || (maybe_ne (pending_stack_adjust, 0)
4178 && (flags & ECF_MAY_BE_ALLOCA))
4179 || (maybe_ne (pending_stack_adjust, 0)
4180 && flag_exceptions && !(flags & ECF_NOTHROW))
4181 || pass == 0)
4182 do_pending_stack_adjust ();
4184 /* Precompute any arguments as needed. */
4185 if (pass)
4186 precompute_arguments (num_actuals, args);
4188 /* Now we are about to start emitting insns that can be deleted
4189 if a libcall is deleted. */
4190 if (pass && (flags & ECF_MALLOC))
4191 start_sequence ();
4193 if (pass == 0
4194 && crtl->stack_protect_guard
4195 && targetm.stack_protect_runtime_enabled_p ())
4196 stack_protect_epilogue ();
4198 adjusted_args_size = args_size;
4199 /* Compute the actual size of the argument block required. The variable
4200 and constant sizes must be combined, the size may have to be rounded,
4201 and there may be a minimum required size. When generating a sibcall
4202 pattern, do not round up, since we'll be re-using whatever space our
4203 caller provided. */
4204 unadjusted_args_size
4205 = compute_argument_block_size (reg_parm_stack_space,
4206 &adjusted_args_size,
4207 fndecl, fntype,
4208 (pass == 0 ? 0
4209 : preferred_stack_boundary));
4211 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
4213 /* The argument block when performing a sibling call is the
4214 incoming argument block. */
4215 if (pass == 0)
4217 argblock = crtl->args.internal_arg_pointer;
4218 if (STACK_GROWS_DOWNWARD)
4219 argblock
4220 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
4221 else
4222 argblock
4223 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
4225 HOST_WIDE_INT map_size = constant_lower_bound (args_size.constant);
4226 stored_args_map = sbitmap_alloc (map_size);
4227 bitmap_clear (stored_args_map);
4228 stored_args_watermark = HOST_WIDE_INT_M1U;
4231 /* If we have no actual push instructions, or shouldn't use them,
4232 make space for all args right now. */
4233 else if (adjusted_args_size.var != 0)
4235 if (old_stack_level == 0)
4237 emit_stack_save (SAVE_BLOCK, &old_stack_level);
4238 old_stack_pointer_delta = stack_pointer_delta;
4239 old_pending_adj = pending_stack_adjust;
4240 pending_stack_adjust = 0;
4241 /* stack_arg_under_construction says whether a stack arg is
4242 being constructed at the old stack level. Pushing the stack
4243 gets a clean outgoing argument block. */
4244 old_stack_arg_under_construction = stack_arg_under_construction;
4245 stack_arg_under_construction = 0;
4247 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
4248 if (flag_stack_usage_info)
4249 current_function_has_unbounded_dynamic_stack_size = 1;
4251 else
4253 /* Note that we must go through the motions of allocating an argument
4254 block even if the size is zero because we may be storing args
4255 in the area reserved for register arguments, which may be part of
4256 the stack frame. */
4258 poly_int64 needed = adjusted_args_size.constant;
4260 /* Store the maximum argument space used. It will be pushed by
4261 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
4262 checking). */
4264 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
4265 needed);
4267 if (must_preallocate)
4269 if (ACCUMULATE_OUTGOING_ARGS)
4271 /* Since the stack pointer will never be pushed, it is
4272 possible for the evaluation of a parm to clobber
4273 something we have already written to the stack.
4274 Since most function calls on RISC machines do not use
4275 the stack, this is uncommon, but must work correctly.
4277 Therefore, we save any area of the stack that was already
4278 written and that we are using. Here we set up to do this
4279 by making a new stack usage map from the old one. The
4280 actual save will be done by store_one_arg.
4282 Another approach might be to try to reorder the argument
4283 evaluations to avoid this conflicting stack usage. */
4285 /* Since we will be writing into the entire argument area,
4286 the map must be allocated for its entire size, not just
4287 the part that is the responsibility of the caller. */
4288 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4289 needed += reg_parm_stack_space;
4291 poly_int64 limit = needed;
4292 if (ARGS_GROW_DOWNWARD)
4293 limit += 1;
4295 /* For polynomial sizes, this is the maximum possible
4296 size needed for arguments with a constant size
4297 and offset. */
4298 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
4299 highest_outgoing_arg_in_use
4300 = MAX (initial_highest_arg_in_use, const_limit);
4302 free (stack_usage_map_buf);
4303 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
4304 stack_usage_map = stack_usage_map_buf;
4306 if (initial_highest_arg_in_use)
4307 memcpy (stack_usage_map, initial_stack_usage_map,
4308 initial_highest_arg_in_use);
4310 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
4311 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
4312 (highest_outgoing_arg_in_use
4313 - initial_highest_arg_in_use));
4314 needed = 0;
4316 /* The address of the outgoing argument list must not be
4317 copied to a register here, because argblock would be left
4318 pointing to the wrong place after the call to
4319 allocate_dynamic_stack_space below. */
4321 argblock = virtual_outgoing_args_rtx;
4323 else
4325 /* Try to reuse some or all of the pending_stack_adjust
4326 to get this space. */
4327 if (inhibit_defer_pop == 0
4328 && (combine_pending_stack_adjustment_and_call
4329 (&needed,
4330 unadjusted_args_size,
4331 &adjusted_args_size,
4332 preferred_unit_stack_boundary)))
4334 /* combine_pending_stack_adjustment_and_call computes
4335 an adjustment before the arguments are allocated.
4336 Account for them and see whether or not the stack
4337 needs to go up or down. */
4338 needed = unadjusted_args_size - needed;
4340 /* Checked by
4341 combine_pending_stack_adjustment_and_call. */
4342 gcc_checking_assert (ordered_p (needed, 0));
4343 if (maybe_lt (needed, 0))
4345 /* We're releasing stack space. */
4346 /* ??? We can avoid any adjustment at all if we're
4347 already aligned. FIXME. */
4348 pending_stack_adjust = -needed;
4349 do_pending_stack_adjust ();
4350 needed = 0;
4352 else
4353 /* We need to allocate space. We'll do that in
4354 push_block below. */
4355 pending_stack_adjust = 0;
4358 /* Special case this because overhead of `push_block' in
4359 this case is non-trivial. */
4360 if (known_eq (needed, 0))
4361 argblock = virtual_outgoing_args_rtx;
4362 else
4364 rtx needed_rtx = gen_int_mode (needed, Pmode);
4365 argblock = push_block (needed_rtx, 0, 0);
4366 if (ARGS_GROW_DOWNWARD)
4367 argblock = plus_constant (Pmode, argblock, needed);
4370 /* We only really need to call `copy_to_reg' in the case
4371 where push insns are going to be used to pass ARGBLOCK
4372 to a function call in ARGS. In that case, the stack
4373 pointer changes value from the allocation point to the
4374 call point, and hence the value of
4375 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
4376 as well always do it. */
4377 argblock = copy_to_reg (argblock);
4382 if (ACCUMULATE_OUTGOING_ARGS)
4384 /* The save/restore code in store_one_arg handles all
4385 cases except one: a constructor call (including a C
4386 function returning a BLKmode struct) to initialize
4387 an argument. */
4388 if (stack_arg_under_construction)
4390 rtx push_size
4391 = (gen_int_mode
4392 (adjusted_args_size.constant
4393 + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype
4394 : TREE_TYPE (fndecl))
4395 ? 0 : reg_parm_stack_space), Pmode));
4396 if (old_stack_level == 0)
4398 emit_stack_save (SAVE_BLOCK, &old_stack_level);
4399 old_stack_pointer_delta = stack_pointer_delta;
4400 old_pending_adj = pending_stack_adjust;
4401 pending_stack_adjust = 0;
4402 /* stack_arg_under_construction says whether a stack
4403 arg is being constructed at the old stack level.
4404 Pushing the stack gets a clean outgoing argument
4405 block. */
4406 old_stack_arg_under_construction
4407 = stack_arg_under_construction;
4408 stack_arg_under_construction = 0;
4409 /* Make a new map for the new argument list. */
4410 free (stack_usage_map_buf);
4411 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
4412 stack_usage_map = stack_usage_map_buf;
4413 highest_outgoing_arg_in_use = 0;
4414 stack_usage_watermark = HOST_WIDE_INT_M1U;
4416 /* We can pass TRUE as the 4th argument because we just
4417 saved the stack pointer and will restore it right after
4418 the call. */
4419 allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
4420 -1, true);
4423 /* If argument evaluation might modify the stack pointer,
4424 copy the address of the argument list to a register. */
4425 for (i = 0; i < num_actuals; i++)
4426 if (args[i].pass_on_stack)
4428 argblock = copy_addr_to_reg (argblock);
4429 break;
4433 compute_argument_addresses (args, argblock, num_actuals);
4435 /* Stack is properly aligned, pops can't safely be deferred during
4436 the evaluation of the arguments. */
4437 NO_DEFER_POP;
4439 /* Precompute all register parameters. It isn't safe to compute
4440 anything once we have started filling any specific hard regs.
4441 TLS symbols sometimes need a call to resolve. Precompute
4442 register parameters before any stack pointer manipulation
4443 to avoid unaligned stack in the called function. */
4444 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
4446 OK_DEFER_POP;
4448 /* Perform stack alignment before the first push (the last arg). */
4449 if (argblock == 0
4450 && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space)
4451 && maybe_ne (adjusted_args_size.constant, unadjusted_args_size))
4453 /* When the stack adjustment is pending, we get better code
4454 by combining the adjustments. */
4455 if (maybe_ne (pending_stack_adjust, 0)
4456 && ! inhibit_defer_pop
4457 && (combine_pending_stack_adjustment_and_call
4458 (&pending_stack_adjust,
4459 unadjusted_args_size,
4460 &adjusted_args_size,
4461 preferred_unit_stack_boundary)))
4462 do_pending_stack_adjust ();
4463 else if (argblock == 0)
4464 anti_adjust_stack (gen_int_mode (adjusted_args_size.constant
4465 - unadjusted_args_size,
4466 Pmode));
4468 /* Now that the stack is properly aligned, pops can't safely
4469 be deferred during the evaluation of the arguments. */
4470 NO_DEFER_POP;
4472 /* Record the maximum pushed stack space size. We need to delay
4473 doing it this far to take into account the optimization done
4474 by combine_pending_stack_adjustment_and_call. */
4475 if (flag_stack_usage_info
4476 && !ACCUMULATE_OUTGOING_ARGS
4477 && pass
4478 && adjusted_args_size.var == 0)
4480 poly_int64 pushed = (adjusted_args_size.constant
4481 + pending_stack_adjust);
4482 current_function_pushed_stack_size
4483 = upper_bound (current_function_pushed_stack_size, pushed);
4486 funexp = rtx_for_function_call (fndecl, addr);
4488 if (CALL_EXPR_STATIC_CHAIN (exp))
4489 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
4490 else
4491 static_chain_value = 0;
4493 #ifdef REG_PARM_STACK_SPACE
4494 /* Save the fixed argument area if it's part of the caller's frame and
4495 is clobbered by argument setup for this call. */
4496 if (ACCUMULATE_OUTGOING_ARGS && pass)
4497 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4498 &low_to_save, &high_to_save);
4499 #endif
4501 /* Now store (and compute if necessary) all non-register parms.
4502 These come before register parms, since they can require block-moves,
4503 which could clobber the registers used for register parms.
4504 Parms which have partial registers are not stored here,
4505 but we do preallocate space here if they want that. */
4507 for (i = 0; i < num_actuals; i++)
4509 if (args[i].reg == 0 || args[i].pass_on_stack)
4511 rtx_insn *before_arg = get_last_insn ();
4513 /* We don't allow passing huge (> 2^30 B) arguments
4514 by value. It would cause an overflow later on. */
4515 if (constant_lower_bound (adjusted_args_size.constant)
4516 >= (1 << (HOST_BITS_PER_INT - 2)))
4518 sorry ("passing too large argument on stack");
4519 continue;
4522 if (store_one_arg (&args[i], argblock, flags,
4523 adjusted_args_size.var != 0,
4524 reg_parm_stack_space)
4525 || (pass == 0
4526 && check_sibcall_argument_overlap (before_arg,
4527 &args[i], 1)))
4528 sibcall_failure = 1;
4531 if (args[i].stack)
4532 call_fusage
4533 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
4534 gen_rtx_USE (VOIDmode, args[i].stack),
4535 call_fusage);
4538 /* If we have a parm that is passed in registers but not in memory
4539 and whose alignment does not permit a direct copy into registers,
4540 make a group of pseudos that correspond to each register that we
4541 will later fill. */
4542 if (STRICT_ALIGNMENT)
4543 store_unaligned_arguments_into_pseudos (args, num_actuals);
4545 /* Now store any partially-in-registers parm.
4546 This is the last place a block-move can happen. */
4547 if (reg_parm_seen)
4548 for (i = 0; i < num_actuals; i++)
4549 if (args[i].partial != 0 && ! args[i].pass_on_stack)
4551 rtx_insn *before_arg = get_last_insn ();
4553 /* On targets with weird calling conventions (e.g. PA) it's
4554 hard to ensure that all cases of argument overlap between
4555 stack and registers work. Play it safe and bail out. */
4556 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
4558 sibcall_failure = 1;
4559 break;
4562 if (store_one_arg (&args[i], argblock, flags,
4563 adjusted_args_size.var != 0,
4564 reg_parm_stack_space)
4565 || (pass == 0
4566 && check_sibcall_argument_overlap (before_arg,
4567 &args[i], 1)))
4568 sibcall_failure = 1;
4571 bool any_regs = false;
4572 for (i = 0; i < num_actuals; i++)
4573 if (args[i].reg != NULL_RTX)
4575 any_regs = true;
4576 targetm.calls.call_args (args[i].reg, funtype);
4578 if (!any_regs)
4579 targetm.calls.call_args (pc_rtx, funtype);
4581 /* Figure out the register where the value, if any, will come back. */
4582 valreg = 0;
4583 if (TYPE_MODE (rettype) != VOIDmode
4584 && ! structure_value_addr)
4586 if (pcc_struct_value)
4587 valreg = hard_function_value (build_pointer_type (rettype),
4588 fndecl, NULL, (pass == 0));
4589 else
4590 valreg = hard_function_value (rettype, fndecl, fntype,
4591 (pass == 0));
4593 /* If VALREG is a PARALLEL whose first member has a zero
4594 offset, use that. This is for targets such as m68k that
4595 return the same value in multiple places. */
4596 if (GET_CODE (valreg) == PARALLEL)
4598 rtx elem = XVECEXP (valreg, 0, 0);
4599 rtx where = XEXP (elem, 0);
4600 rtx offset = XEXP (elem, 1);
4601 if (offset == const0_rtx
4602 && GET_MODE (where) == GET_MODE (valreg))
4603 valreg = where;
4607 /* If register arguments require space on the stack and stack space
4608 was not preallocated, allocate stack space here for arguments
4609 passed in registers. */
4610 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
4611 && !ACCUMULATE_OUTGOING_ARGS
4612 && must_preallocate == 0 && reg_parm_stack_space > 0)
4613 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
4615 /* Pass the function the address in which to return a
4616 structure value. */
4617 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
4619 structure_value_addr
4620 = convert_memory_address (Pmode, structure_value_addr);
4621 emit_move_insn (struct_value,
4622 force_reg (Pmode,
4623 force_operand (structure_value_addr,
4624 NULL_RTX)));
4626 if (REG_P (struct_value))
4627 use_reg (&call_fusage, struct_value);
4630 after_args = get_last_insn ();
4631 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
4632 static_chain_value, &call_fusage,
4633 reg_parm_seen, flags);
4635 load_register_parameters (args, num_actuals, &call_fusage, flags,
4636 pass == 0, &sibcall_failure);
4638 /* Save a pointer to the last insn before the call, so that we can
4639 later safely search backwards to find the CALL_INSN. */
4640 before_call = get_last_insn ();
4642 /* Set up next argument register. For sibling calls on machines
4643 with register windows this should be the incoming register. */
4644 if (pass == 0)
4645 next_arg_reg = targetm.calls.function_incoming_arg
4646 (args_so_far, function_arg_info::end_marker ());
4647 else
4648 next_arg_reg = targetm.calls.function_arg
4649 (args_so_far, function_arg_info::end_marker ());
4651 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
4653 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
4654 arg_nr = num_actuals - arg_nr - 1;
4655 if (arg_nr >= 0
4656 && arg_nr < num_actuals
4657 && args[arg_nr].reg
4658 && valreg
4659 && REG_P (valreg)
4660 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
4661 call_fusage
4662 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
4663 gen_rtx_SET (valreg, args[arg_nr].reg),
4664 call_fusage);
4666 /* All arguments and registers used for the call must be set up by
4667 now! */
4669 /* Stack must be properly aligned now. */
4670 gcc_assert (!pass
4671 || multiple_p (stack_pointer_delta,
4672 preferred_unit_stack_boundary));
4674 /* Generate the actual call instruction. */
4675 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
4676 adjusted_args_size.constant, struct_value_size,
4677 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
4678 flags, args_so_far);
4680 if (flag_ipa_ra)
4682 rtx_call_insn *last;
4683 rtx datum = NULL_RTX;
4684 if (fndecl != NULL_TREE)
4686 datum = XEXP (DECL_RTL (fndecl), 0);
4687 gcc_assert (datum != NULL_RTX
4688 && GET_CODE (datum) == SYMBOL_REF);
4690 last = last_call_insn ();
4691 add_reg_note (last, REG_CALL_DECL, datum);
4694 /* If the call setup or the call itself overlaps with anything
4695 of the argument setup we probably clobbered our call address.
4696 In that case we can't do sibcalls. */
4697 if (pass == 0
4698 && check_sibcall_argument_overlap (after_args, 0, 0))
4699 sibcall_failure = 1;
4701 /* If a non-BLKmode value is returned at the most significant end
4702 of a register, shift the register right by the appropriate amount
4703 and update VALREG accordingly. BLKmode values are handled by the
4704 group load/store machinery below. */
4705 if (!structure_value_addr
4706 && !pcc_struct_value
4707 && TYPE_MODE (rettype) != VOIDmode
4708 && TYPE_MODE (rettype) != BLKmode
4709 && REG_P (valreg)
4710 && targetm.calls.return_in_msb (rettype))
4712 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
4713 sibcall_failure = 1;
4714 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
4717 if (pass && (flags & ECF_MALLOC))
4719 rtx temp = gen_reg_rtx (GET_MODE (valreg));
4720 rtx_insn *last, *insns;
4722 /* The return value from a malloc-like function is a pointer. */
4723 if (TREE_CODE (rettype) == POINTER_TYPE)
4724 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
4726 emit_move_insn (temp, valreg);
4728 /* The return value from a malloc-like function cannot alias
4729 anything else. */
4730 last = get_last_insn ();
4731 add_reg_note (last, REG_NOALIAS, temp);
4733 /* Write out the sequence. */
4734 insns = get_insns ();
4735 end_sequence ();
4736 emit_insn (insns);
4737 valreg = temp;
4740 /* For calls to `setjmp', etc., inform
4741 function.c:setjmp_warnings that it should complain if
4742 nonvolatile values are live. For functions that cannot
4743 return, inform flow that control does not fall through. */
4745 if ((flags & ECF_NORETURN) || pass == 0)
4747 /* The barrier must be emitted
4748 immediately after the CALL_INSN. Some ports emit more
4749 than just a CALL_INSN above, so we must search for it here. */
4751 rtx_insn *last = get_last_insn ();
4752 while (!CALL_P (last))
4754 last = PREV_INSN (last);
4755 /* There was no CALL_INSN? */
4756 gcc_assert (last != before_call);
4759 emit_barrier_after (last);
4761 /* Stack adjustments after a noreturn call are dead code.
4762 However when NO_DEFER_POP is in effect, we must preserve
4763 stack_pointer_delta. */
4764 if (inhibit_defer_pop == 0)
4766 stack_pointer_delta = old_stack_allocated;
4767 pending_stack_adjust = 0;
4771 /* If value type not void, return an rtx for the value. */
4773 if (TYPE_MODE (rettype) == VOIDmode
4774 || ignore)
4775 target = const0_rtx;
4776 else if (structure_value_addr)
4778 if (target == 0 || !MEM_P (target))
4780 target
4781 = gen_rtx_MEM (TYPE_MODE (rettype),
4782 memory_address (TYPE_MODE (rettype),
4783 structure_value_addr));
4784 set_mem_attributes (target, rettype, 1);
4787 else if (pcc_struct_value)
4789 /* This is the special C++ case where we need to
4790 know what the true target was. We take care to
4791 never use this value more than once in one expression. */
4792 target = gen_rtx_MEM (TYPE_MODE (rettype),
4793 copy_to_reg (valreg));
4794 set_mem_attributes (target, rettype, 1);
4796 /* Handle calls that return values in multiple non-contiguous locations.
4797 The Irix 6 ABI has examples of this. */
4798 else if (GET_CODE (valreg) == PARALLEL)
4800 if (target == 0)
4801 target = emit_group_move_into_temps (valreg);
4802 else if (rtx_equal_p (target, valreg))
4804 else if (GET_CODE (target) == PARALLEL)
4805 /* Handle the result of a emit_group_move_into_temps
4806 call in the previous pass. */
4807 emit_group_move (target, valreg);
4808 else
4809 emit_group_store (target, valreg, rettype,
4810 int_size_in_bytes (rettype));
4812 else if (target
4813 && GET_MODE (target) == TYPE_MODE (rettype)
4814 && GET_MODE (target) == GET_MODE (valreg))
4816 bool may_overlap = false;
4818 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
4819 reg to a plain register. */
4820 if (!REG_P (target) || HARD_REGISTER_P (target))
4821 valreg = avoid_likely_spilled_reg (valreg);
4823 /* If TARGET is a MEM in the argument area, and we have
4824 saved part of the argument area, then we can't store
4825 directly into TARGET as it may get overwritten when we
4826 restore the argument save area below. Don't work too
4827 hard though and simply force TARGET to a register if it
4828 is a MEM; the optimizer is quite likely to sort it out. */
4829 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
4830 for (i = 0; i < num_actuals; i++)
4831 if (args[i].save_area)
4833 may_overlap = true;
4834 break;
4837 if (may_overlap)
4838 target = copy_to_reg (valreg);
4839 else
4841 /* TARGET and VALREG cannot be equal at this point
4842 because the latter would not have
4843 REG_FUNCTION_VALUE_P true, while the former would if
4844 it were referring to the same register.
4846 If they refer to the same register, this move will be
4847 a no-op, except when function inlining is being
4848 done. */
4849 emit_move_insn (target, valreg);
4851 /* If we are setting a MEM, this code must be executed.
4852 Since it is emitted after the call insn, sibcall
4853 optimization cannot be performed in that case. */
4854 if (MEM_P (target))
4855 sibcall_failure = 1;
4858 else
4859 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
4861 /* If we promoted this return value, make the proper SUBREG.
4862 TARGET might be const0_rtx here, so be careful. */
4863 if (REG_P (target)
4864 && TYPE_MODE (rettype) != BLKmode
4865 && GET_MODE (target) != TYPE_MODE (rettype))
4867 tree type = rettype;
4868 int unsignedp = TYPE_UNSIGNED (type);
4869 machine_mode pmode;
4871 /* Ensure we promote as expected, and get the new unsignedness. */
4872 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
4873 funtype, 1);
4874 gcc_assert (GET_MODE (target) == pmode);
4876 poly_uint64 offset = subreg_lowpart_offset (TYPE_MODE (type),
4877 GET_MODE (target));
4878 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
4879 SUBREG_PROMOTED_VAR_P (target) = 1;
4880 SUBREG_PROMOTED_SET (target, unsignedp);
4883 /* If size of args is variable or this was a constructor call for a stack
4884 argument, restore saved stack-pointer value. */
4886 if (old_stack_level)
4888 rtx_insn *prev = get_last_insn ();
4890 emit_stack_restore (SAVE_BLOCK, old_stack_level);
4891 stack_pointer_delta = old_stack_pointer_delta;
4893 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
4895 pending_stack_adjust = old_pending_adj;
4896 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
4897 stack_arg_under_construction = old_stack_arg_under_construction;
4898 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4899 stack_usage_map = initial_stack_usage_map;
4900 stack_usage_watermark = initial_stack_usage_watermark;
4901 sibcall_failure = 1;
4903 else if (ACCUMULATE_OUTGOING_ARGS && pass)
4905 #ifdef REG_PARM_STACK_SPACE
4906 if (save_area)
4907 restore_fixed_argument_area (save_area, argblock,
4908 high_to_save, low_to_save);
4909 #endif
4911 /* If we saved any argument areas, restore them. */
4912 for (i = 0; i < num_actuals; i++)
4913 if (args[i].save_area)
4915 machine_mode save_mode = GET_MODE (args[i].save_area);
4916 rtx stack_area
4917 = gen_rtx_MEM (save_mode,
4918 memory_address (save_mode,
4919 XEXP (args[i].stack_slot, 0)));
4921 if (save_mode != BLKmode)
4922 emit_move_insn (stack_area, args[i].save_area);
4923 else
4924 emit_block_move (stack_area, args[i].save_area,
4925 (gen_int_mode
4926 (args[i].locate.size.constant, Pmode)),
4927 BLOCK_OP_CALL_PARM);
4930 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4931 stack_usage_map = initial_stack_usage_map;
4932 stack_usage_watermark = initial_stack_usage_watermark;
4935 /* If this was alloca, record the new stack level. */
4936 if (flags & ECF_MAY_BE_ALLOCA)
4937 record_new_stack_level ();
4939 /* Free up storage we no longer need. */
4940 for (i = 0; i < num_actuals; ++i)
4941 free (args[i].aligned_regs);
4943 targetm.calls.end_call_args ();
4945 insns = get_insns ();
4946 end_sequence ();
4948 if (pass == 0)
4950 tail_call_insns = insns;
4952 /* Restore the pending stack adjustment now that we have
4953 finished generating the sibling call sequence. */
4955 restore_pending_stack_adjust (&save);
4957 /* Prepare arg structure for next iteration. */
4958 for (i = 0; i < num_actuals; i++)
4960 args[i].value = 0;
4961 args[i].aligned_regs = 0;
4962 args[i].stack = 0;
4965 sbitmap_free (stored_args_map);
4966 internal_arg_pointer_exp_state.scan_start = NULL;
4967 internal_arg_pointer_exp_state.cache.release ();
4969 else
4971 normal_call_insns = insns;
4973 /* Verify that we've deallocated all the stack we used. */
4974 gcc_assert ((flags & ECF_NORETURN)
4975 || known_eq (old_stack_allocated,
4976 stack_pointer_delta
4977 - pending_stack_adjust));
4980 /* If something prevents making this a sibling call,
4981 zero out the sequence. */
4982 if (sibcall_failure)
4983 tail_call_insns = NULL;
4984 else
4985 break;
4988 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
4989 arguments too, as argument area is now clobbered by the call. */
4990 if (tail_call_insns)
4992 emit_insn (tail_call_insns);
4993 crtl->tail_call_emit = true;
4995 else
4997 emit_insn (normal_call_insns);
4998 if (try_tail_call)
4999 /* Ideally we'd emit a message for all of the ways that it could
5000 have failed. */
5001 maybe_complain_about_tail_call (exp, "tail call production failed");
5004 currently_expanding_call--;
5006 free (stack_usage_map_buf);
5007 free (args);
5008 return target;
5011 /* A sibling call sequence invalidates any REG_EQUIV notes made for
5012 this function's incoming arguments.
5014 At the start of RTL generation we know the only REG_EQUIV notes
5015 in the rtl chain are those for incoming arguments, so we can look
5016 for REG_EQUIV notes between the start of the function and the
5017 NOTE_INSN_FUNCTION_BEG.
5019 This is (slight) overkill. We could keep track of the highest
5020 argument we clobber and be more selective in removing notes, but it
5021 does not seem to be worth the effort. */
5023 void
5024 fixup_tail_calls (void)
5026 rtx_insn *insn;
5028 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5030 rtx note;
5032 /* There are never REG_EQUIV notes for the incoming arguments
5033 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
5034 if (NOTE_P (insn)
5035 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
5036 break;
5038 note = find_reg_note (insn, REG_EQUIV, 0);
5039 if (note)
5040 remove_note (insn, note);
5041 note = find_reg_note (insn, REG_EQUIV, 0);
5042 gcc_assert (!note);
5046 /* Traverse a list of TYPES and expand all complex types into their
5047 components. */
5048 static tree
5049 split_complex_types (tree types)
5051 tree p;
5053 /* Before allocating memory, check for the common case of no complex. */
5054 for (p = types; p; p = TREE_CHAIN (p))
5056 tree type = TREE_VALUE (p);
5057 if (TREE_CODE (type) == COMPLEX_TYPE
5058 && targetm.calls.split_complex_arg (type))
5059 goto found;
5061 return types;
5063 found:
5064 types = copy_list (types);
5066 for (p = types; p; p = TREE_CHAIN (p))
5068 tree complex_type = TREE_VALUE (p);
5070 if (TREE_CODE (complex_type) == COMPLEX_TYPE
5071 && targetm.calls.split_complex_arg (complex_type))
5073 tree next, imag;
5075 /* Rewrite complex type with component type. */
5076 TREE_VALUE (p) = TREE_TYPE (complex_type);
5077 next = TREE_CHAIN (p);
5079 /* Add another component type for the imaginary part. */
5080 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
5081 TREE_CHAIN (p) = imag;
5082 TREE_CHAIN (imag) = next;
5084 /* Skip the newly created node. */
5085 p = TREE_CHAIN (p);
5089 return types;
5092 /* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
5093 for a value of mode OUTMODE,
5094 with NARGS different arguments, passed as ARGS.
5095 Store the return value if RETVAL is nonzero: store it in VALUE if
5096 VALUE is nonnull, otherwise pick a convenient location. In either
5097 case return the location of the stored value.
5099 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
5100 `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
5101 other types of library calls. */
5104 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
5105 enum libcall_type fn_type,
5106 machine_mode outmode, int nargs, rtx_mode_t *args)
5108 /* Total size in bytes of all the stack-parms scanned so far. */
5109 struct args_size args_size;
5110 /* Size of arguments before any adjustments (such as rounding). */
5111 struct args_size original_args_size;
5112 int argnum;
5113 rtx fun;
5114 /* Todo, choose the correct decl type of orgfun. Sadly this information
5115 isn't present here, so we default to native calling abi here. */
5116 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
5117 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
5118 int count;
5119 rtx argblock = 0;
5120 CUMULATIVE_ARGS args_so_far_v;
5121 cumulative_args_t args_so_far;
5122 struct arg
5124 rtx value;
5125 machine_mode mode;
5126 rtx reg;
5127 int partial;
5128 struct locate_and_pad_arg_data locate;
5129 rtx save_area;
5131 struct arg *argvec;
5132 int old_inhibit_defer_pop = inhibit_defer_pop;
5133 rtx call_fusage = 0;
5134 rtx mem_value = 0;
5135 rtx valreg;
5136 int pcc_struct_value = 0;
5137 poly_int64 struct_value_size = 0;
5138 int flags;
5139 int reg_parm_stack_space = 0;
5140 poly_int64 needed;
5141 rtx_insn *before_call;
5142 bool have_push_fusage;
5143 tree tfom; /* type_for_mode (outmode, 0) */
5145 #ifdef REG_PARM_STACK_SPACE
5146 /* Define the boundary of the register parm stack space that needs to be
5147 save, if any. */
5148 int low_to_save = 0, high_to_save = 0;
5149 rtx save_area = 0; /* Place that it is saved. */
5150 #endif
5152 /* Size of the stack reserved for parameter registers. */
5153 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
5154 char *initial_stack_usage_map = stack_usage_map;
5155 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
5156 char *stack_usage_map_buf = NULL;
5158 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
5160 #ifdef REG_PARM_STACK_SPACE
5161 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
5162 #endif
5164 /* By default, library functions cannot throw. */
5165 flags = ECF_NOTHROW;
5167 switch (fn_type)
5169 case LCT_NORMAL:
5170 break;
5171 case LCT_CONST:
5172 flags |= ECF_CONST;
5173 break;
5174 case LCT_PURE:
5175 flags |= ECF_PURE;
5176 break;
5177 case LCT_NORETURN:
5178 flags |= ECF_NORETURN;
5179 break;
5180 case LCT_THROW:
5181 flags &= ~ECF_NOTHROW;
5182 break;
5183 case LCT_RETURNS_TWICE:
5184 flags = ECF_RETURNS_TWICE;
5185 break;
5187 fun = orgfun;
5189 /* Ensure current function's preferred stack boundary is at least
5190 what we need. */
5191 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
5192 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5194 /* If this kind of value comes back in memory,
5195 decide where in memory it should come back. */
5196 if (outmode != VOIDmode)
5198 tfom = lang_hooks.types.type_for_mode (outmode, 0);
5199 if (aggregate_value_p (tfom, 0))
5201 #ifdef PCC_STATIC_STRUCT_RETURN
5202 rtx pointer_reg
5203 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
5204 mem_value = gen_rtx_MEM (outmode, pointer_reg);
5205 pcc_struct_value = 1;
5206 if (value == 0)
5207 value = gen_reg_rtx (outmode);
5208 #else /* not PCC_STATIC_STRUCT_RETURN */
5209 struct_value_size = GET_MODE_SIZE (outmode);
5210 if (value != 0 && MEM_P (value))
5211 mem_value = value;
5212 else
5213 mem_value = assign_temp (tfom, 1, 1);
5214 #endif
5215 /* This call returns a big structure. */
5216 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
5219 else
5220 tfom = void_type_node;
5222 /* ??? Unfinished: must pass the memory address as an argument. */
5224 /* Copy all the libcall-arguments out of the varargs data
5225 and into a vector ARGVEC.
5227 Compute how to pass each argument. We only support a very small subset
5228 of the full argument passing conventions to limit complexity here since
5229 library functions shouldn't have many args. */
5231 argvec = XALLOCAVEC (struct arg, nargs + 1);
5232 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
5234 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
5235 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
5236 #else
5237 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
5238 #endif
5239 args_so_far = pack_cumulative_args (&args_so_far_v);
5241 args_size.constant = 0;
5242 args_size.var = 0;
5244 count = 0;
5246 push_temp_slots ();
5248 /* If there's a structure value address to be passed,
5249 either pass it in the special place, or pass it as an extra argument. */
5250 if (mem_value && struct_value == 0 && ! pcc_struct_value)
5252 rtx addr = XEXP (mem_value, 0);
5254 nargs++;
5256 /* Make sure it is a reasonable operand for a move or push insn. */
5257 if (!REG_P (addr) && !MEM_P (addr)
5258 && !(CONSTANT_P (addr)
5259 && targetm.legitimate_constant_p (Pmode, addr)))
5260 addr = force_operand (addr, NULL_RTX);
5262 argvec[count].value = addr;
5263 argvec[count].mode = Pmode;
5264 argvec[count].partial = 0;
5266 function_arg_info ptr_arg (Pmode, /*named=*/true);
5267 argvec[count].reg = targetm.calls.function_arg (args_so_far, ptr_arg);
5268 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, ptr_arg) == 0);
5270 locate_and_pad_parm (Pmode, NULL_TREE,
5271 #ifdef STACK_PARMS_IN_REG_PARM_AREA
5273 #else
5274 argvec[count].reg != 0,
5275 #endif
5276 reg_parm_stack_space, 0,
5277 NULL_TREE, &args_size, &argvec[count].locate);
5279 if (argvec[count].reg == 0 || argvec[count].partial != 0
5280 || reg_parm_stack_space > 0)
5281 args_size.constant += argvec[count].locate.size.constant;
5283 targetm.calls.function_arg_advance (args_so_far, ptr_arg);
5285 count++;
5288 for (unsigned int i = 0; count < nargs; i++, count++)
5290 rtx val = args[i].first;
5291 function_arg_info arg (args[i].second, /*named=*/true);
5292 int unsigned_p = 0;
5294 /* We cannot convert the arg value to the mode the library wants here;
5295 must do it earlier where we know the signedness of the arg. */
5296 gcc_assert (arg.mode != BLKmode
5297 && (GET_MODE (val) == arg.mode
5298 || GET_MODE (val) == VOIDmode));
5300 /* Make sure it is a reasonable operand for a move or push insn. */
5301 if (!REG_P (val) && !MEM_P (val)
5302 && !(CONSTANT_P (val)
5303 && targetm.legitimate_constant_p (arg.mode, val)))
5304 val = force_operand (val, NULL_RTX);
5306 if (pass_by_reference (&args_so_far_v, arg))
5308 rtx slot;
5309 int must_copy = !reference_callee_copied (&args_so_far_v, arg);
5311 /* If this was a CONST function, it is now PURE since it now
5312 reads memory. */
5313 if (flags & ECF_CONST)
5315 flags &= ~ECF_CONST;
5316 flags |= ECF_PURE;
5319 if (MEM_P (val) && !must_copy)
5321 tree val_expr = MEM_EXPR (val);
5322 if (val_expr)
5323 mark_addressable (val_expr);
5324 slot = val;
5326 else
5328 slot = assign_temp (lang_hooks.types.type_for_mode (arg.mode, 0),
5329 1, 1);
5330 emit_move_insn (slot, val);
5333 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
5334 gen_rtx_USE (VOIDmode, slot),
5335 call_fusage);
5336 if (must_copy)
5337 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
5338 gen_rtx_CLOBBER (VOIDmode,
5339 slot),
5340 call_fusage);
5342 arg.mode = Pmode;
5343 arg.pass_by_reference = true;
5344 val = force_operand (XEXP (slot, 0), NULL_RTX);
5347 arg.mode = promote_function_mode (NULL_TREE, arg.mode, &unsigned_p,
5348 NULL_TREE, 0);
5349 argvec[count].mode = arg.mode;
5350 argvec[count].value = convert_modes (arg.mode, GET_MODE (val), val,
5351 unsigned_p);
5352 argvec[count].reg = targetm.calls.function_arg (args_so_far, arg);
5354 argvec[count].partial
5355 = targetm.calls.arg_partial_bytes (args_so_far, arg);
5357 if (argvec[count].reg == 0
5358 || argvec[count].partial != 0
5359 || reg_parm_stack_space > 0)
5361 locate_and_pad_parm (arg.mode, NULL_TREE,
5362 #ifdef STACK_PARMS_IN_REG_PARM_AREA
5364 #else
5365 argvec[count].reg != 0,
5366 #endif
5367 reg_parm_stack_space, argvec[count].partial,
5368 NULL_TREE, &args_size, &argvec[count].locate);
5369 args_size.constant += argvec[count].locate.size.constant;
5370 gcc_assert (!argvec[count].locate.size.var);
5372 #ifdef BLOCK_REG_PADDING
5373 else
5374 /* The argument is passed entirely in registers. See at which
5375 end it should be padded. */
5376 argvec[count].locate.where_pad =
5377 BLOCK_REG_PADDING (arg.mode, NULL_TREE,
5378 known_le (GET_MODE_SIZE (arg.mode),
5379 UNITS_PER_WORD));
5380 #endif
5382 targetm.calls.function_arg_advance (args_so_far, arg);
5385 for (int i = 0; i < nargs; i++)
5386 if (reg_parm_stack_space > 0
5387 || argvec[i].reg == 0
5388 || argvec[i].partial != 0)
5389 update_stack_alignment_for_call (&argvec[i].locate);
5391 /* If this machine requires an external definition for library
5392 functions, write one out. */
5393 assemble_external_libcall (fun);
5395 original_args_size = args_size;
5396 args_size.constant = (aligned_upper_bound (args_size.constant
5397 + stack_pointer_delta,
5398 STACK_BYTES)
5399 - stack_pointer_delta);
5401 args_size.constant = upper_bound (args_size.constant,
5402 reg_parm_stack_space);
5404 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
5405 args_size.constant -= reg_parm_stack_space;
5407 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
5408 args_size.constant);
5410 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
5412 poly_int64 pushed = args_size.constant + pending_stack_adjust;
5413 current_function_pushed_stack_size
5414 = upper_bound (current_function_pushed_stack_size, pushed);
5417 if (ACCUMULATE_OUTGOING_ARGS)
5419 /* Since the stack pointer will never be pushed, it is possible for
5420 the evaluation of a parm to clobber something we have already
5421 written to the stack. Since most function calls on RISC machines
5422 do not use the stack, this is uncommon, but must work correctly.
5424 Therefore, we save any area of the stack that was already written
5425 and that we are using. Here we set up to do this by making a new
5426 stack usage map from the old one.
5428 Another approach might be to try to reorder the argument
5429 evaluations to avoid this conflicting stack usage. */
5431 needed = args_size.constant;
5433 /* Since we will be writing into the entire argument area, the
5434 map must be allocated for its entire size, not just the part that
5435 is the responsibility of the caller. */
5436 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
5437 needed += reg_parm_stack_space;
5439 poly_int64 limit = needed;
5440 if (ARGS_GROW_DOWNWARD)
5441 limit += 1;
5443 /* For polynomial sizes, this is the maximum possible size needed
5444 for arguments with a constant size and offset. */
5445 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
5446 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
5447 const_limit);
5449 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
5450 stack_usage_map = stack_usage_map_buf;
5452 if (initial_highest_arg_in_use)
5453 memcpy (stack_usage_map, initial_stack_usage_map,
5454 initial_highest_arg_in_use);
5456 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
5457 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
5458 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
5459 needed = 0;
5461 /* We must be careful to use virtual regs before they're instantiated,
5462 and real regs afterwards. Loop optimization, for example, can create
5463 new libcalls after we've instantiated the virtual regs, and if we
5464 use virtuals anyway, they won't match the rtl patterns. */
5466 if (virtuals_instantiated)
5467 argblock = plus_constant (Pmode, stack_pointer_rtx,
5468 STACK_POINTER_OFFSET);
5469 else
5470 argblock = virtual_outgoing_args_rtx;
5472 else
5474 if (!PUSH_ARGS)
5475 argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
5478 /* We push args individually in reverse order, perform stack alignment
5479 before the first push (the last arg). */
5480 if (argblock == 0)
5481 anti_adjust_stack (gen_int_mode (args_size.constant
5482 - original_args_size.constant,
5483 Pmode));
5485 argnum = nargs - 1;
5487 #ifdef REG_PARM_STACK_SPACE
5488 if (ACCUMULATE_OUTGOING_ARGS)
5490 /* The argument list is the property of the called routine and it
5491 may clobber it. If the fixed area has been used for previous
5492 parameters, we must save and restore it. */
5493 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
5494 &low_to_save, &high_to_save);
5496 #endif
5498 /* When expanding a normal call, args are stored in push order,
5499 which is the reverse of what we have here. */
5500 bool any_regs = false;
5501 for (int i = nargs; i-- > 0; )
5502 if (argvec[i].reg != NULL_RTX)
5504 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
5505 any_regs = true;
5507 if (!any_regs)
5508 targetm.calls.call_args (pc_rtx, NULL_TREE);
5510 /* Push the args that need to be pushed. */
5512 have_push_fusage = false;
5514 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5515 are to be pushed. */
5516 for (count = 0; count < nargs; count++, argnum--)
5518 machine_mode mode = argvec[argnum].mode;
5519 rtx val = argvec[argnum].value;
5520 rtx reg = argvec[argnum].reg;
5521 int partial = argvec[argnum].partial;
5522 unsigned int parm_align = argvec[argnum].locate.boundary;
5523 poly_int64 lower_bound = 0, upper_bound = 0;
5525 if (! (reg != 0 && partial == 0))
5527 rtx use;
5529 if (ACCUMULATE_OUTGOING_ARGS)
5531 /* If this is being stored into a pre-allocated, fixed-size,
5532 stack area, save any previous data at that location. */
5534 if (ARGS_GROW_DOWNWARD)
5536 /* stack_slot is negative, but we want to index stack_usage_map
5537 with positive values. */
5538 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
5539 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
5541 else
5543 lower_bound = argvec[argnum].locate.slot_offset.constant;
5544 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
5547 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5548 reg_parm_stack_space))
5550 /* We need to make a save area. */
5551 poly_uint64 size
5552 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
5553 machine_mode save_mode
5554 = int_mode_for_size (size, 1).else_blk ();
5555 rtx adr
5556 = plus_constant (Pmode, argblock,
5557 argvec[argnum].locate.offset.constant);
5558 rtx stack_area
5559 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
5561 if (save_mode == BLKmode)
5563 argvec[argnum].save_area
5564 = assign_stack_temp (BLKmode,
5565 argvec[argnum].locate.size.constant
5568 emit_block_move (validize_mem
5569 (copy_rtx (argvec[argnum].save_area)),
5570 stack_area,
5571 (gen_int_mode
5572 (argvec[argnum].locate.size.constant,
5573 Pmode)),
5574 BLOCK_OP_CALL_PARM);
5576 else
5578 argvec[argnum].save_area = gen_reg_rtx (save_mode);
5580 emit_move_insn (argvec[argnum].save_area, stack_area);
5585 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
5586 partial, reg, 0, argblock,
5587 (gen_int_mode
5588 (argvec[argnum].locate.offset.constant, Pmode)),
5589 reg_parm_stack_space,
5590 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
5592 /* Now mark the segment we just used. */
5593 if (ACCUMULATE_OUTGOING_ARGS)
5594 mark_stack_region_used (lower_bound, upper_bound);
5596 NO_DEFER_POP;
5598 /* Indicate argument access so that alias.c knows that these
5599 values are live. */
5600 if (argblock)
5601 use = plus_constant (Pmode, argblock,
5602 argvec[argnum].locate.offset.constant);
5603 else if (have_push_fusage)
5604 continue;
5605 else
5607 /* When arguments are pushed, trying to tell alias.c where
5608 exactly this argument is won't work, because the
5609 auto-increment causes confusion. So we merely indicate
5610 that we access something with a known mode somewhere on
5611 the stack. */
5612 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5613 gen_rtx_SCRATCH (Pmode));
5614 have_push_fusage = true;
5616 use = gen_rtx_MEM (argvec[argnum].mode, use);
5617 use = gen_rtx_USE (VOIDmode, use);
5618 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
5622 argnum = nargs - 1;
5624 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
5626 /* Now load any reg parms into their regs. */
5628 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5629 are to be pushed. */
5630 for (count = 0; count < nargs; count++, argnum--)
5632 machine_mode mode = argvec[argnum].mode;
5633 rtx val = argvec[argnum].value;
5634 rtx reg = argvec[argnum].reg;
5635 int partial = argvec[argnum].partial;
5637 /* Handle calls that pass values in multiple non-contiguous
5638 locations. The PA64 has examples of this for library calls. */
5639 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5640 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
5641 else if (reg != 0 && partial == 0)
5643 emit_move_insn (reg, val);
5644 #ifdef BLOCK_REG_PADDING
5645 poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
5647 /* Copied from load_register_parameters. */
5649 /* Handle case where we have a value that needs shifting
5650 up to the msb. eg. a QImode value and we're padding
5651 upward on a BYTES_BIG_ENDIAN machine. */
5652 if (known_lt (size, UNITS_PER_WORD)
5653 && (argvec[argnum].locate.where_pad
5654 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
5656 rtx x;
5657 poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
5659 /* Assigning REG here rather than a temp makes CALL_FUSAGE
5660 report the whole reg as used. Strictly speaking, the
5661 call only uses SIZE bytes at the msb end, but it doesn't
5662 seem worth generating rtl to say that. */
5663 reg = gen_rtx_REG (word_mode, REGNO (reg));
5664 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
5665 if (x != reg)
5666 emit_move_insn (reg, x);
5668 #endif
5671 NO_DEFER_POP;
5674 /* Any regs containing parms remain in use through the call. */
5675 for (count = 0; count < nargs; count++)
5677 rtx reg = argvec[count].reg;
5678 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5679 use_group_regs (&call_fusage, reg);
5680 else if (reg != 0)
5682 int partial = argvec[count].partial;
5683 if (partial)
5685 int nregs;
5686 gcc_assert (partial % UNITS_PER_WORD == 0);
5687 nregs = partial / UNITS_PER_WORD;
5688 use_regs (&call_fusage, REGNO (reg), nregs);
5690 else
5691 use_reg (&call_fusage, reg);
5695 /* Pass the function the address in which to return a structure value. */
5696 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
5698 emit_move_insn (struct_value,
5699 force_reg (Pmode,
5700 force_operand (XEXP (mem_value, 0),
5701 NULL_RTX)));
5702 if (REG_P (struct_value))
5703 use_reg (&call_fusage, struct_value);
5706 /* Don't allow popping to be deferred, since then
5707 cse'ing of library calls could delete a call and leave the pop. */
5708 NO_DEFER_POP;
5709 valreg = (mem_value == 0 && outmode != VOIDmode
5710 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
5712 /* Stack must be properly aligned now. */
5713 gcc_assert (multiple_p (stack_pointer_delta,
5714 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
5716 before_call = get_last_insn ();
5718 if (flag_callgraph_info)
5719 record_final_call (SYMBOL_REF_DECL (orgfun), UNKNOWN_LOCATION);
5721 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
5722 will set inhibit_defer_pop to that value. */
5723 /* The return type is needed to decide how many bytes the function pops.
5724 Signedness plays no role in that, so for simplicity, we pretend it's
5725 always signed. We also assume that the list of arguments passed has
5726 no impact, so we pretend it is unknown. */
5728 emit_call_1 (fun, NULL,
5729 get_identifier (XSTR (orgfun, 0)),
5730 build_function_type (tfom, NULL_TREE),
5731 original_args_size.constant, args_size.constant,
5732 struct_value_size,
5733 targetm.calls.function_arg (args_so_far,
5734 function_arg_info::end_marker ()),
5735 valreg,
5736 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
5738 if (flag_ipa_ra)
5740 rtx datum = orgfun;
5741 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
5742 rtx_call_insn *last = last_call_insn ();
5743 add_reg_note (last, REG_CALL_DECL, datum);
5746 /* Right-shift returned value if necessary. */
5747 if (!pcc_struct_value
5748 && TYPE_MODE (tfom) != BLKmode
5749 && targetm.calls.return_in_msb (tfom))
5751 shift_return_value (TYPE_MODE (tfom), false, valreg);
5752 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
5755 targetm.calls.end_call_args ();
5757 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
5758 that it should complain if nonvolatile values are live. For
5759 functions that cannot return, inform flow that control does not
5760 fall through. */
5761 if (flags & ECF_NORETURN)
5763 /* The barrier note must be emitted
5764 immediately after the CALL_INSN. Some ports emit more than
5765 just a CALL_INSN above, so we must search for it here. */
5766 rtx_insn *last = get_last_insn ();
5767 while (!CALL_P (last))
5769 last = PREV_INSN (last);
5770 /* There was no CALL_INSN? */
5771 gcc_assert (last != before_call);
5774 emit_barrier_after (last);
5777 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
5778 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
5779 if (flags & ECF_NOTHROW)
5781 rtx_insn *last = get_last_insn ();
5782 while (!CALL_P (last))
5784 last = PREV_INSN (last);
5785 /* There was no CALL_INSN? */
5786 gcc_assert (last != before_call);
5789 make_reg_eh_region_note_nothrow_nononlocal (last);
5792 /* Now restore inhibit_defer_pop to its actual original value. */
5793 OK_DEFER_POP;
5795 pop_temp_slots ();
5797 /* Copy the value to the right place. */
5798 if (outmode != VOIDmode && retval)
5800 if (mem_value)
5802 if (value == 0)
5803 value = mem_value;
5804 if (value != mem_value)
5805 emit_move_insn (value, mem_value);
5807 else if (GET_CODE (valreg) == PARALLEL)
5809 if (value == 0)
5810 value = gen_reg_rtx (outmode);
5811 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
5813 else
5815 /* Convert to the proper mode if a promotion has been active. */
5816 if (GET_MODE (valreg) != outmode)
5818 int unsignedp = TYPE_UNSIGNED (tfom);
5820 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
5821 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
5822 == GET_MODE (valreg));
5823 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
5826 if (value != 0)
5827 emit_move_insn (value, valreg);
5828 else
5829 value = valreg;
5833 if (ACCUMULATE_OUTGOING_ARGS)
5835 #ifdef REG_PARM_STACK_SPACE
5836 if (save_area)
5837 restore_fixed_argument_area (save_area, argblock,
5838 high_to_save, low_to_save);
5839 #endif
5841 /* If we saved any argument areas, restore them. */
5842 for (count = 0; count < nargs; count++)
5843 if (argvec[count].save_area)
5845 machine_mode save_mode = GET_MODE (argvec[count].save_area);
5846 rtx adr = plus_constant (Pmode, argblock,
5847 argvec[count].locate.offset.constant);
5848 rtx stack_area = gen_rtx_MEM (save_mode,
5849 memory_address (save_mode, adr));
5851 if (save_mode == BLKmode)
5852 emit_block_move (stack_area,
5853 validize_mem
5854 (copy_rtx (argvec[count].save_area)),
5855 (gen_int_mode
5856 (argvec[count].locate.size.constant, Pmode)),
5857 BLOCK_OP_CALL_PARM);
5858 else
5859 emit_move_insn (stack_area, argvec[count].save_area);
5862 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
5863 stack_usage_map = initial_stack_usage_map;
5864 stack_usage_watermark = initial_stack_usage_watermark;
5867 free (stack_usage_map_buf);
5869 return value;
5874 /* Store a single argument for a function call
5875 into the register or memory area where it must be passed.
5876 *ARG describes the argument value and where to pass it.
5878 ARGBLOCK is the address of the stack-block for all the arguments,
5879 or 0 on a machine where arguments are pushed individually.
5881 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
5882 so must be careful about how the stack is used.
5884 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
5885 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
5886 that we need not worry about saving and restoring the stack.
5888 FNDECL is the declaration of the function we are calling.
5890 Return nonzero if this arg should cause sibcall failure,
5891 zero otherwise. */
5893 static int
5894 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
5895 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
5897 tree pval = arg->tree_value;
5898 rtx reg = 0;
5899 int partial = 0;
5900 poly_int64 used = 0;
5901 poly_int64 lower_bound = 0, upper_bound = 0;
5902 int sibcall_failure = 0;
5904 if (TREE_CODE (pval) == ERROR_MARK)
5905 return 1;
5907 /* Push a new temporary level for any temporaries we make for
5908 this argument. */
5909 push_temp_slots ();
5911 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
5913 /* If this is being stored into a pre-allocated, fixed-size, stack area,
5914 save any previous data at that location. */
5915 if (argblock && ! variable_size && arg->stack)
5917 if (ARGS_GROW_DOWNWARD)
5919 /* stack_slot is negative, but we want to index stack_usage_map
5920 with positive values. */
5921 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5923 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5924 upper_bound = -rtx_to_poly_int64 (offset) + 1;
5926 else
5927 upper_bound = 0;
5929 lower_bound = upper_bound - arg->locate.size.constant;
5931 else
5933 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5935 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5936 lower_bound = rtx_to_poly_int64 (offset);
5938 else
5939 lower_bound = 0;
5941 upper_bound = lower_bound + arg->locate.size.constant;
5944 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5945 reg_parm_stack_space))
5947 /* We need to make a save area. */
5948 poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT;
5949 machine_mode save_mode
5950 = int_mode_for_size (size, 1).else_blk ();
5951 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
5952 rtx stack_area = gen_rtx_MEM (save_mode, adr);
5954 if (save_mode == BLKmode)
5956 arg->save_area
5957 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
5958 preserve_temp_slots (arg->save_area);
5959 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
5960 stack_area,
5961 (gen_int_mode
5962 (arg->locate.size.constant, Pmode)),
5963 BLOCK_OP_CALL_PARM);
5965 else
5967 arg->save_area = gen_reg_rtx (save_mode);
5968 emit_move_insn (arg->save_area, stack_area);
5974 /* If this isn't going to be placed on both the stack and in registers,
5975 set up the register and number of words. */
5976 if (! arg->pass_on_stack)
5978 if (flags & ECF_SIBCALL)
5979 reg = arg->tail_call_reg;
5980 else
5981 reg = arg->reg;
5982 partial = arg->partial;
5985 /* Being passed entirely in a register. We shouldn't be called in
5986 this case. */
5987 gcc_assert (reg == 0 || partial != 0);
5989 /* If this arg needs special alignment, don't load the registers
5990 here. */
5991 if (arg->n_aligned_regs != 0)
5992 reg = 0;
5994 /* If this is being passed partially in a register, we can't evaluate
5995 it directly into its stack slot. Otherwise, we can. */
5996 if (arg->value == 0)
5998 /* stack_arg_under_construction is nonzero if a function argument is
5999 being evaluated directly into the outgoing argument list and
6000 expand_call must take special action to preserve the argument list
6001 if it is called recursively.
6003 For scalar function arguments stack_usage_map is sufficient to
6004 determine which stack slots must be saved and restored. Scalar
6005 arguments in general have pass_on_stack == 0.
6007 If this argument is initialized by a function which takes the
6008 address of the argument (a C++ constructor or a C function
6009 returning a BLKmode structure), then stack_usage_map is
6010 insufficient and expand_call must push the stack around the
6011 function call. Such arguments have pass_on_stack == 1.
6013 Note that it is always safe to set stack_arg_under_construction,
6014 but this generates suboptimal code if set when not needed. */
6016 if (arg->pass_on_stack)
6017 stack_arg_under_construction++;
6019 arg->value = expand_expr (pval,
6020 (partial
6021 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
6022 ? NULL_RTX : arg->stack,
6023 VOIDmode, EXPAND_STACK_PARM);
6025 /* If we are promoting object (or for any other reason) the mode
6026 doesn't agree, convert the mode. */
6028 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
6029 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
6030 arg->value, arg->unsignedp);
6032 if (arg->pass_on_stack)
6033 stack_arg_under_construction--;
6036 /* Check for overlap with already clobbered argument area. */
6037 if ((flags & ECF_SIBCALL)
6038 && MEM_P (arg->value)
6039 && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0),
6040 arg->locate.size.constant))
6041 sibcall_failure = 1;
6043 /* Don't allow anything left on stack from computation
6044 of argument to alloca. */
6045 if (flags & ECF_MAY_BE_ALLOCA)
6046 do_pending_stack_adjust ();
6048 if (arg->value == arg->stack)
6049 /* If the value is already in the stack slot, we are done. */
6051 else if (arg->mode != BLKmode)
6053 unsigned int parm_align;
6055 /* Argument is a scalar, not entirely passed in registers.
6056 (If part is passed in registers, arg->partial says how much
6057 and emit_push_insn will take care of putting it there.)
6059 Push it, and if its size is less than the
6060 amount of space allocated to it,
6061 also bump stack pointer by the additional space.
6062 Note that in C the default argument promotions
6063 will prevent such mismatches. */
6065 poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval))
6066 ? 0 : GET_MODE_SIZE (arg->mode));
6068 /* Compute how much space the push instruction will push.
6069 On many machines, pushing a byte will advance the stack
6070 pointer by a halfword. */
6071 #ifdef PUSH_ROUNDING
6072 size = PUSH_ROUNDING (size);
6073 #endif
6074 used = size;
6076 /* Compute how much space the argument should get:
6077 round up to a multiple of the alignment for arguments. */
6078 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6079 != PAD_NONE)
6080 /* At the moment we don't (need to) support ABIs for which the
6081 padding isn't known at compile time. In principle it should
6082 be easy to add though. */
6083 used = force_align_up (size, PARM_BOUNDARY / BITS_PER_UNIT);
6085 /* Compute the alignment of the pushed argument. */
6086 parm_align = arg->locate.boundary;
6087 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6088 == PAD_DOWNWARD)
6090 poly_int64 pad = used - size;
6091 unsigned int pad_align = known_alignment (pad) * BITS_PER_UNIT;
6092 if (pad_align != 0)
6093 parm_align = MIN (parm_align, pad_align);
6096 /* This isn't already where we want it on the stack, so put it there.
6097 This can either be done with push or copy insns. */
6098 if (maybe_ne (used, 0)
6099 && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
6100 NULL_RTX, parm_align, partial, reg, used - size,
6101 argblock, ARGS_SIZE_RTX (arg->locate.offset),
6102 reg_parm_stack_space,
6103 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
6104 sibcall_failure = 1;
6106 /* Unless this is a partially-in-register argument, the argument is now
6107 in the stack. */
6108 if (partial == 0)
6109 arg->value = arg->stack;
6111 else
6113 /* BLKmode, at least partly to be pushed. */
6115 unsigned int parm_align;
6116 poly_int64 excess;
6117 rtx size_rtx;
6119 /* Pushing a nonscalar.
6120 If part is passed in registers, PARTIAL says how much
6121 and emit_push_insn will take care of putting it there. */
6123 /* Round its size up to a multiple
6124 of the allocation unit for arguments. */
6126 if (arg->locate.size.var != 0)
6128 excess = 0;
6129 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
6131 else
6133 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
6134 for BLKmode is careful to avoid it. */
6135 excess = (arg->locate.size.constant
6136 - arg_int_size_in_bytes (TREE_TYPE (pval))
6137 + partial);
6138 size_rtx = expand_expr (arg_size_in_bytes (TREE_TYPE (pval)),
6139 NULL_RTX, TYPE_MODE (sizetype),
6140 EXPAND_NORMAL);
6143 parm_align = arg->locate.boundary;
6145 /* When an argument is padded down, the block is aligned to
6146 PARM_BOUNDARY, but the actual argument isn't. */
6147 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6148 == PAD_DOWNWARD)
6150 if (arg->locate.size.var)
6151 parm_align = BITS_PER_UNIT;
6152 else
6154 unsigned int excess_align
6155 = known_alignment (excess) * BITS_PER_UNIT;
6156 if (excess_align != 0)
6157 parm_align = MIN (parm_align, excess_align);
6161 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
6163 /* emit_push_insn might not work properly if arg->value and
6164 argblock + arg->locate.offset areas overlap. */
6165 rtx x = arg->value;
6166 poly_int64 i = 0;
6168 if (strip_offset (XEXP (x, 0), &i)
6169 == crtl->args.internal_arg_pointer)
6171 /* arg.locate doesn't contain the pretend_args_size offset,
6172 it's part of argblock. Ensure we don't count it in I. */
6173 if (STACK_GROWS_DOWNWARD)
6174 i -= crtl->args.pretend_args_size;
6175 else
6176 i += crtl->args.pretend_args_size;
6178 /* expand_call should ensure this. */
6179 gcc_assert (!arg->locate.offset.var
6180 && arg->locate.size.var == 0);
6181 poly_int64 size_val = rtx_to_poly_int64 (size_rtx);
6183 if (known_eq (arg->locate.offset.constant, i))
6185 /* Even though they appear to be at the same location,
6186 if part of the outgoing argument is in registers,
6187 they aren't really at the same location. Check for
6188 this by making sure that the incoming size is the
6189 same as the outgoing size. */
6190 if (maybe_ne (arg->locate.size.constant, size_val))
6191 sibcall_failure = 1;
6193 else if (maybe_in_range_p (arg->locate.offset.constant,
6194 i, size_val))
6195 sibcall_failure = 1;
6196 /* Use arg->locate.size.constant instead of size_rtx
6197 because we only care about the part of the argument
6198 on the stack. */
6199 else if (maybe_in_range_p (i, arg->locate.offset.constant,
6200 arg->locate.size.constant))
6201 sibcall_failure = 1;
6205 if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
6206 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
6207 parm_align, partial, reg, excess, argblock,
6208 ARGS_SIZE_RTX (arg->locate.offset),
6209 reg_parm_stack_space,
6210 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
6212 /* Unless this is a partially-in-register argument, the argument is now
6213 in the stack.
6215 ??? Unlike the case above, in which we want the actual
6216 address of the data, so that we can load it directly into a
6217 register, here we want the address of the stack slot, so that
6218 it's properly aligned for word-by-word copying or something
6219 like that. It's not clear that this is always correct. */
6220 if (partial == 0)
6221 arg->value = arg->stack_slot;
6224 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
6226 tree type = TREE_TYPE (arg->tree_value);
6227 arg->parallel_value
6228 = emit_group_load_into_temps (arg->reg, arg->value, type,
6229 int_size_in_bytes (type));
6232 /* Mark all slots this store used. */
6233 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
6234 && argblock && ! variable_size && arg->stack)
6235 mark_stack_region_used (lower_bound, upper_bound);
6237 /* Once we have pushed something, pops can't safely
6238 be deferred during the rest of the arguments. */
6239 NO_DEFER_POP;
6241 /* Free any temporary slots made in processing this argument. */
6242 pop_temp_slots ();
6244 return sibcall_failure;
6247 /* Nonzero if we do not know how to pass ARG solely in registers. */
6249 bool
6250 must_pass_in_stack_var_size (const function_arg_info &arg)
6252 if (!arg.type)
6253 return false;
6255 /* If the type has variable size... */
6256 if (!poly_int_tree_p (TYPE_SIZE (arg.type)))
6257 return true;
6259 /* If the type is marked as addressable (it is required
6260 to be constructed into the stack)... */
6261 if (TREE_ADDRESSABLE (arg.type))
6262 return true;
6264 return false;
6267 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
6268 takes trailing padding of a structure into account. */
6269 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
6271 bool
6272 must_pass_in_stack_var_size_or_pad (const function_arg_info &arg)
6274 if (!arg.type)
6275 return false;
6277 /* If the type has variable size... */
6278 if (TREE_CODE (TYPE_SIZE (arg.type)) != INTEGER_CST)
6279 return true;
6281 /* If the type is marked as addressable (it is required
6282 to be constructed into the stack)... */
6283 if (TREE_ADDRESSABLE (arg.type))
6284 return true;
6286 if (TYPE_EMPTY_P (arg.type))
6287 return false;
6289 /* If the padding and mode of the type is such that a copy into
6290 a register would put it into the wrong part of the register. */
6291 if (arg.mode == BLKmode
6292 && int_size_in_bytes (arg.type) % (PARM_BOUNDARY / BITS_PER_UNIT)
6293 && (targetm.calls.function_arg_padding (arg.mode, arg.type)
6294 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
6295 return true;
6297 return false;
6300 /* Return true if TYPE must be passed on the stack when passed to
6301 the "..." arguments of a function. */
6303 bool
6304 must_pass_va_arg_in_stack (tree type)
6306 function_arg_info arg (type, /*named=*/false);
6307 return targetm.calls.must_pass_in_stack (arg);
6310 /* Return true if FIELD is the C++17 empty base field that should
6311 be ignored for ABI calling convention decisions in order to
6312 maintain ABI compatibility between C++14 and earlier, which doesn't
6313 add this FIELD to classes with empty bases, and C++17 and later
6314 which does. */
6316 bool
6317 cxx17_empty_base_field_p (const_tree field)
6319 return (DECL_FIELD_ABI_IGNORED (field)
6320 && DECL_ARTIFICIAL (field)
6321 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))
6322 && !lookup_attribute ("no_unique_address", DECL_ATTRIBUTES (field)));
6325 /* Tell the garbage collector about GTY markers in this source file. */
6326 #include "gt-calls.h"