Add missing SLP discovery for CFN[_MASK][_LEN]_SCATTER_STORE
[official-gcc.git] / gcc / tree-inline.cc
blobd16ad6a28deb76c6d19a8aece78f44b442d8afa8
1 /* Tree inlining.
2 Copyright (C) 2001-2024 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #define INCLUDE_MEMORY
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "cfghooks.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "cgraph.h"
34 #include "tree-pretty-print.h"
35 #include "diagnostic-core.h"
36 #include "gimple-predict.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "calls.h"
40 #include "tree-inline.h"
41 #include "langhooks.h"
42 #include "cfganal.h"
43 #include "tree-iterator.h"
44 #include "intl.h"
45 #include "gimple-iterator.h"
46 #include "gimple-fold.h"
47 #include "tree-eh.h"
48 #include "gimplify.h"
49 #include "gimplify-me.h"
50 #include "gimple-walk.h"
51 #include "tree-cfg.h"
52 #include "tree-into-ssa.h"
53 #include "tree-dfa.h"
54 #include "tree-ssa.h"
55 #include "except.h"
56 #include "debug.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "sreal.h"
63 #include "tree-cfgcleanup.h"
64 #include "tree-ssa-live.h"
65 #include "alloc-pool.h"
66 #include "symbol-summary.h"
67 #include "symtab-thunks.h"
68 #include "symtab-clones.h"
69 #include "asan.h"
71 /* I'm not real happy about this, but we need to handle gimple and
72 non-gimple trees. */
74 /* Inlining, Cloning, Versioning, Parallelization
76 Inlining: a function body is duplicated, but the PARM_DECLs are
77 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
78 MODIFY_EXPRs that store to a dedicated returned-value variable.
79 The duplicated eh_region info of the copy will later be appended
80 to the info for the caller; the eh_region info in copied throwing
81 statements and RESX statements are adjusted accordingly.
83 Cloning: (only in C++) We have one body for a con/de/structor, and
84 multiple function decls, each with a unique parameter list.
85 Duplicate the body, using the given splay tree; some parameters
86 will become constants (like 0 or 1).
88 Versioning: a function body is duplicated and the result is a new
89 function rather than into blocks of an existing function as with
90 inlining. Some parameters will become constants.
92 Parallelization: a region of a function is duplicated resulting in
93 a new function. Variables may be replaced with complex expressions
94 to enable shared variable semantics.
96 All of these will simultaneously lookup any callgraph edges. If
97 we're going to inline the duplicated function body, and the given
98 function has some cloned callgraph nodes (one for each place this
99 function will be inlined) those callgraph edges will be duplicated.
100 If we're cloning the body, those callgraph edges will be
101 updated to point into the new body. (Note that the original
102 callgraph node and edge list will not be altered.)
104 See the CALL_EXPR handling case in copy_tree_body_r (). */
106 /* To Do:
108 o In order to make inlining-on-trees work, we pessimized
109 function-local static constants. In particular, they are now
110 always output, even when not addressed. Fix this by treating
111 function-local static constants just like global static
112 constants; the back-end already knows not to output them if they
113 are not needed.
115 o Provide heuristics to clamp inlining of recursive template
116 calls? */
119 /* Weights that estimate_num_insns uses to estimate the size of the
120 produced code. */
122 eni_weights eni_size_weights;
124 /* Weights that estimate_num_insns uses to estimate the time necessary
125 to execute the produced code. */
127 eni_weights eni_time_weights;
129 /* Prototypes. */
131 static tree declare_return_variable (copy_body_data *, tree, tree,
132 basic_block);
133 static void remap_block (tree *, copy_body_data *);
134 static void copy_bind_expr (tree *, int *, copy_body_data *);
135 static void declare_inline_vars (tree, tree);
136 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
137 static void prepend_lexical_block (tree current_block, tree new_block);
138 static tree copy_result_decl_to_var (tree, copy_body_data *);
139 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
140 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
141 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
143 /* Insert a tree->tree mapping for ID. Despite the name suggests
144 that the trees should be variables, it is used for more than that. */
146 void
147 insert_decl_map (copy_body_data *id, tree key, tree value)
149 id->decl_map->put (key, value);
151 /* Always insert an identity map as well. If we see this same new
152 node again, we won't want to duplicate it a second time. */
153 if (key != value && value)
154 id->decl_map->put (value, value);
157 /* If nonzero, we're remapping the contents of inlined debug
158 statements. If negative, an error has occurred, such as a
159 reference to a variable that isn't available in the inlined
160 context. */
161 static int processing_debug_stmt = 0;
163 /* Construct new SSA name for old NAME. ID is the inline context. */
165 static tree
166 remap_ssa_name (tree name, copy_body_data *id)
168 tree new_tree, var;
169 tree *n;
171 gcc_assert (TREE_CODE (name) == SSA_NAME);
173 n = id->decl_map->get (name);
174 if (n)
176 /* When we perform edge redirection as part of CFG copy, IPA-SRA can
177 remove an unused LHS from a call statement. Such LHS can however
178 still appear in debug statements, but their value is lost in this
179 function and we do not want to map them. */
180 if (id->killed_new_ssa_names
181 && id->killed_new_ssa_names->contains (*n))
183 gcc_assert (processing_debug_stmt);
184 processing_debug_stmt = -1;
185 return name;
188 return unshare_expr (*n);
191 if (processing_debug_stmt)
193 if (SSA_NAME_IS_DEFAULT_DEF (name)
194 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
195 && id->entry_bb == NULL
196 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
198 gimple *def_temp;
199 gimple_stmt_iterator gsi;
200 tree val = SSA_NAME_VAR (name);
202 n = id->decl_map->get (val);
203 if (n != NULL)
204 val = *n;
205 if (TREE_CODE (val) != PARM_DECL
206 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
208 processing_debug_stmt = -1;
209 return name;
211 n = id->decl_map->get (val);
212 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
213 return *n;
214 tree vexpr = build_debug_expr_decl (TREE_TYPE (name));
215 /* FIXME: Is setting the mode really necessary? */
216 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
217 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
218 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
219 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
220 insert_decl_map (id, val, vexpr);
221 return vexpr;
224 processing_debug_stmt = -1;
225 return name;
228 /* Remap anonymous SSA names or SSA names of anonymous decls. */
229 var = SSA_NAME_VAR (name);
230 if (!var
231 || (!SSA_NAME_IS_DEFAULT_DEF (name)
232 && VAR_P (var)
233 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
234 && DECL_ARTIFICIAL (var)
235 && DECL_IGNORED_P (var)
236 && !DECL_NAME (var)))
238 struct ptr_info_def *pi;
239 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
240 if (!var && SSA_NAME_IDENTIFIER (name))
241 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
242 insert_decl_map (id, name, new_tree);
243 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
244 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
245 /* At least IPA points-to info can be directly transferred. */
246 if (id->src_cfun->gimple_df
247 && id->src_cfun->gimple_df->ipa_pta
248 && POINTER_TYPE_P (TREE_TYPE (name))
249 && (pi = SSA_NAME_PTR_INFO (name))
250 && !pi->pt.anything)
252 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
253 new_pi->pt = pi->pt;
255 /* So can range-info. */
256 if (!POINTER_TYPE_P (TREE_TYPE (name))
257 && SSA_NAME_RANGE_INFO (name))
258 duplicate_ssa_name_range_info (new_tree, name);
259 return new_tree;
262 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
263 in copy_bb. */
264 new_tree = remap_decl (var, id);
266 /* We might've substituted constant or another SSA_NAME for
267 the variable.
269 Replace the SSA name representing RESULT_DECL by variable during
270 inlining: this saves us from need to introduce PHI node in a case
271 return value is just partly initialized. */
272 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
273 && (!SSA_NAME_VAR (name)
274 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
275 || !id->transform_return_to_modify))
277 struct ptr_info_def *pi;
278 new_tree = make_ssa_name (new_tree);
279 insert_decl_map (id, name, new_tree);
280 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
281 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
282 /* At least IPA points-to info can be directly transferred. */
283 if (id->src_cfun->gimple_df
284 && id->src_cfun->gimple_df->ipa_pta
285 && POINTER_TYPE_P (TREE_TYPE (name))
286 && (pi = SSA_NAME_PTR_INFO (name))
287 && !pi->pt.anything)
289 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
290 new_pi->pt = pi->pt;
292 /* So can range-info. */
293 if (!POINTER_TYPE_P (TREE_TYPE (name))
294 && SSA_NAME_RANGE_INFO (name))
295 duplicate_ssa_name_range_info (new_tree, name);
296 if (SSA_NAME_IS_DEFAULT_DEF (name))
298 /* By inlining function having uninitialized variable, we might
299 extend the lifetime (variable might get reused). This cause
300 ICE in the case we end up extending lifetime of SSA name across
301 abnormal edge, but also increase register pressure.
303 We simply initialize all uninitialized vars by 0 except
304 for case we are inlining to very first BB. We can avoid
305 this for all BBs that are not inside strongly connected
306 regions of the CFG, but this is expensive to test. */
307 if (id->entry_bb
308 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
309 && (!SSA_NAME_VAR (name)
310 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
311 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
312 0)->dest
313 || EDGE_COUNT (id->entry_bb->preds) != 1))
315 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
316 gimple *init_stmt;
317 tree zero = build_zero_cst (TREE_TYPE (new_tree));
319 init_stmt = gimple_build_assign (new_tree, zero);
320 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
321 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
323 else
325 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
326 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
330 else
331 insert_decl_map (id, name, new_tree);
332 return new_tree;
335 /* Remap DECL during the copying of the BLOCK tree for the function. */
337 tree
338 remap_decl (tree decl, copy_body_data *id)
340 tree *n;
342 /* We only remap local variables in the current function. */
344 /* See if we have remapped this declaration. */
346 n = id->decl_map->get (decl);
348 if (!n && processing_debug_stmt)
350 processing_debug_stmt = -1;
351 return decl;
354 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
355 necessary DECLs have already been remapped and we do not want to duplicate
356 a decl coming from outside of the sequence we are copying. */
357 if (!n
358 && id->prevent_decl_creation_for_types
359 && id->remapping_type_depth > 0
360 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
361 return decl;
363 /* If we didn't already have an equivalent for this declaration, create one
364 now. */
365 if (!n)
367 /* Make a copy of the variable or label. */
368 tree t = id->copy_decl (decl, id);
370 /* Remember it, so that if we encounter this local entity again
371 we can reuse this copy. Do this early because remap_type may
372 need this decl for TYPE_STUB_DECL. */
373 insert_decl_map (id, decl, t);
375 if (!DECL_P (t) || t == decl)
376 return t;
378 /* Remap types, if necessary. */
379 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
380 if (TREE_CODE (t) == TYPE_DECL)
382 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
384 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
385 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
386 is not set on the TYPE_DECL, for example in LTO mode. */
387 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
389 tree x = build_variant_type_copy (TREE_TYPE (t));
390 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
391 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
392 DECL_ORIGINAL_TYPE (t) = x;
396 /* Remap sizes as necessary. */
397 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
398 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
400 /* If fields, do likewise for offset and qualifier. */
401 if (TREE_CODE (t) == FIELD_DECL)
403 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
404 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
405 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
408 return t;
411 if (id->do_not_unshare)
412 return *n;
413 else
414 return unshare_expr (*n);
417 static tree
418 remap_type_1 (tree type, copy_body_data *id)
420 tree new_tree, t;
422 /* We do need a copy. build and register it now. If this is a pointer or
423 reference type, remap the designated type and make a new pointer or
424 reference type. */
425 if (TREE_CODE (type) == POINTER_TYPE)
427 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
428 TYPE_MODE (type),
429 TYPE_REF_CAN_ALIAS_ALL (type));
430 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
431 new_tree = build_type_attribute_qual_variant (new_tree,
432 TYPE_ATTRIBUTES (type),
433 TYPE_QUALS (type));
434 insert_decl_map (id, type, new_tree);
435 return new_tree;
437 else if (TREE_CODE (type) == REFERENCE_TYPE)
439 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
440 TYPE_MODE (type),
441 TYPE_REF_CAN_ALIAS_ALL (type));
442 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
443 new_tree = build_type_attribute_qual_variant (new_tree,
444 TYPE_ATTRIBUTES (type),
445 TYPE_QUALS (type));
446 insert_decl_map (id, type, new_tree);
447 return new_tree;
449 else
450 new_tree = copy_node (type);
452 insert_decl_map (id, type, new_tree);
454 /* This is a new type, not a copy of an old type. Need to reassociate
455 variants. We can handle everything except the main variant lazily. */
456 t = TYPE_MAIN_VARIANT (type);
457 if (type != t)
459 t = remap_type (t, id);
460 TYPE_MAIN_VARIANT (new_tree) = t;
461 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
462 TYPE_NEXT_VARIANT (t) = new_tree;
464 else
466 TYPE_MAIN_VARIANT (new_tree) = new_tree;
467 TYPE_NEXT_VARIANT (new_tree) = NULL;
470 if (TYPE_STUB_DECL (type))
471 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
473 /* Lazily create pointer and reference types. */
474 TYPE_POINTER_TO (new_tree) = NULL;
475 TYPE_REFERENCE_TO (new_tree) = NULL;
477 /* Copy all types that may contain references to local variables; be sure to
478 preserve sharing in between type and its main variant when possible. */
479 switch (TREE_CODE (new_tree))
481 case INTEGER_TYPE:
482 case REAL_TYPE:
483 case FIXED_POINT_TYPE:
484 case ENUMERAL_TYPE:
485 case BOOLEAN_TYPE:
486 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
488 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
489 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
491 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
492 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
494 else
496 t = TYPE_MIN_VALUE (new_tree);
497 if (t && TREE_CODE (t) != INTEGER_CST)
498 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
500 t = TYPE_MAX_VALUE (new_tree);
501 if (t && TREE_CODE (t) != INTEGER_CST)
502 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
504 return new_tree;
506 case FUNCTION_TYPE:
507 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
508 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
509 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
510 else
511 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
512 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
513 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
514 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
515 else
516 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
517 return new_tree;
519 case ARRAY_TYPE:
520 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
521 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
522 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
523 else
524 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
526 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
528 gcc_checking_assert (TYPE_DOMAIN (type)
529 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
530 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
532 else
534 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
535 /* For array bounds where we have decided not to copy over the bounds
536 variable which isn't used in OpenMP/OpenACC region, change them to
537 an uninitialized VAR_DECL temporary. */
538 if (id->adjust_array_error_bounds
539 && TYPE_DOMAIN (new_tree)
540 && TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
541 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
543 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
544 DECL_ATTRIBUTES (v)
545 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
546 DECL_ATTRIBUTES (v));
547 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
550 break;
552 case RECORD_TYPE:
553 case UNION_TYPE:
554 case QUAL_UNION_TYPE:
555 if (TYPE_MAIN_VARIANT (type) != type
556 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
557 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
558 else
560 tree f, nf = NULL;
562 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
564 t = remap_decl (f, id);
565 DECL_CONTEXT (t) = new_tree;
566 DECL_CHAIN (t) = nf;
567 nf = t;
569 TYPE_FIELDS (new_tree) = nreverse (nf);
571 break;
573 case OFFSET_TYPE:
574 default:
575 /* Shouldn't have been thought variable sized. */
576 gcc_unreachable ();
579 /* All variants of type share the same size, so use the already remaped data. */
580 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
582 tree s = TYPE_SIZE (type);
583 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
584 tree su = TYPE_SIZE_UNIT (type);
585 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
586 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
587 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
588 || s == mvs);
589 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
590 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
591 || su == mvsu);
592 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
593 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
595 else
597 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
598 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
601 return new_tree;
604 /* Helper function for remap_type_2, called through walk_tree. */
606 static tree
607 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
609 copy_body_data *id = (copy_body_data *) data;
611 if (TYPE_P (*tp))
612 *walk_subtrees = 0;
614 else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
615 return *tp;
617 return NULL_TREE;
620 /* Return true if TYPE needs to be remapped because remap_decl on any
621 needed embedded decl returns something other than that decl. */
623 static bool
624 remap_type_2 (tree type, copy_body_data *id)
626 tree t;
628 #define RETURN_TRUE_IF_VAR(T) \
629 do \
631 tree _t = (T); \
632 if (_t) \
634 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
635 return true; \
636 if (!TYPE_SIZES_GIMPLIFIED (type) \
637 && walk_tree (&_t, remap_type_3, id, NULL)) \
638 return true; \
641 while (0)
643 switch (TREE_CODE (type))
645 case POINTER_TYPE:
646 case REFERENCE_TYPE:
647 case FUNCTION_TYPE:
648 case METHOD_TYPE:
649 return remap_type_2 (TREE_TYPE (type), id);
651 case INTEGER_TYPE:
652 case REAL_TYPE:
653 case FIXED_POINT_TYPE:
654 case ENUMERAL_TYPE:
655 case BOOLEAN_TYPE:
656 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
657 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
658 return false;
660 case ARRAY_TYPE:
661 if (remap_type_2 (TREE_TYPE (type), id)
662 || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
663 return true;
664 break;
666 case RECORD_TYPE:
667 case UNION_TYPE:
668 case QUAL_UNION_TYPE:
669 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
670 if (TREE_CODE (t) == FIELD_DECL)
672 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
673 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
674 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
675 if (TREE_CODE (type) == QUAL_UNION_TYPE)
676 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
678 break;
680 default:
681 return false;
684 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
685 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
686 return false;
687 #undef RETURN_TRUE_IF_VAR
690 tree
691 remap_type (tree type, copy_body_data *id)
693 tree *node;
694 tree tmp;
696 if (type == NULL)
697 return type;
699 /* See if we have remapped this type. */
700 node = id->decl_map->get (type);
701 if (node)
702 return *node;
704 /* The type only needs remapping if it's variably modified. */
705 if (! variably_modified_type_p (type, id->src_fn)
706 /* Don't remap if copy_decl method doesn't always return a new
707 decl and for all embedded decls returns the passed in decl. */
708 || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
710 insert_decl_map (id, type, type);
711 return type;
714 id->remapping_type_depth++;
715 tmp = remap_type_1 (type, id);
716 id->remapping_type_depth--;
718 return tmp;
721 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
723 static bool
724 can_be_nonlocal (tree decl, copy_body_data *id)
726 /* We cannot duplicate function decls. */
727 if (TREE_CODE (decl) == FUNCTION_DECL)
728 return true;
730 /* Local static vars must be non-local or we get multiple declaration
731 problems. */
732 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
733 return true;
735 return false;
738 static tree
739 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
740 copy_body_data *id)
742 tree old_var;
743 tree new_decls = NULL_TREE;
745 /* Remap its variables. */
746 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
748 tree new_var;
750 if (can_be_nonlocal (old_var, id))
752 /* We need to add this variable to the local decls as otherwise
753 nothing else will do so. */
754 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
755 add_local_decl (cfun, old_var);
756 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
757 && !DECL_IGNORED_P (old_var)
758 && nonlocalized_list)
759 vec_safe_push (*nonlocalized_list, old_var);
760 continue;
763 /* Remap the variable. */
764 new_var = remap_decl (old_var, id);
766 /* If we didn't remap this variable, we can't mess with its
767 TREE_CHAIN. If we remapped this variable to the return slot, it's
768 already declared somewhere else, so don't declare it here. */
770 if (new_var == old_var || new_var == id->retvar)
772 else if (!new_var)
774 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
775 && !DECL_IGNORED_P (old_var)
776 && nonlocalized_list)
777 vec_safe_push (*nonlocalized_list, old_var);
779 else
781 gcc_assert (DECL_P (new_var));
782 DECL_CHAIN (new_var) = new_decls;
783 new_decls = new_var;
785 /* Also copy value-expressions. */
786 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
788 tree tem = DECL_VALUE_EXPR (new_var);
789 bool old_regimplify = id->regimplify;
790 id->remapping_type_depth++;
791 walk_tree (&tem, copy_tree_body_r, id, NULL);
792 id->remapping_type_depth--;
793 id->regimplify = old_regimplify;
794 SET_DECL_VALUE_EXPR (new_var, tem);
799 return nreverse (new_decls);
802 /* Copy the BLOCK to contain remapped versions of the variables
803 therein. And hook the new block into the block-tree. */
805 static void
806 remap_block (tree *block, copy_body_data *id)
808 tree old_block;
809 tree new_block;
811 /* Make the new block. */
812 old_block = *block;
813 new_block = make_node (BLOCK);
814 TREE_USED (new_block) = TREE_USED (old_block);
815 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
816 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
817 BLOCK_NONLOCALIZED_VARS (new_block)
818 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
819 *block = new_block;
821 /* Remap its variables. */
822 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
823 &BLOCK_NONLOCALIZED_VARS (new_block),
824 id);
826 /* Remember the remapped block. */
827 insert_decl_map (id, old_block, new_block);
830 /* Copy the whole block tree and root it in id->block. */
832 static tree
833 remap_blocks (tree block, copy_body_data *id)
835 tree t;
836 tree new_tree = block;
838 if (!block)
839 return NULL;
841 remap_block (&new_tree, id);
842 gcc_assert (new_tree != block);
843 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
844 prepend_lexical_block (new_tree, remap_blocks (t, id));
845 /* Blocks are in arbitrary order, but make things slightly prettier and do
846 not swap order when producing a copy. */
847 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
848 return new_tree;
851 /* Remap the block tree rooted at BLOCK to nothing. */
853 static void
854 remap_blocks_to_null (tree block, copy_body_data *id)
856 tree t;
857 insert_decl_map (id, block, NULL_TREE);
858 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
859 remap_blocks_to_null (t, id);
862 /* Remap the location info pointed to by LOCUS. */
864 static location_t
865 remap_location (location_t locus, copy_body_data *id)
867 if (LOCATION_BLOCK (locus))
869 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
870 gcc_assert (n);
871 if (*n)
872 return set_block (locus, *n);
875 locus = LOCATION_LOCUS (locus);
877 if (locus != UNKNOWN_LOCATION && id->block)
878 return set_block (locus, id->block);
880 return locus;
883 static void
884 copy_statement_list (tree *tp)
886 tree_stmt_iterator oi, ni;
887 tree new_tree;
889 new_tree = alloc_stmt_list ();
890 ni = tsi_start (new_tree);
891 oi = tsi_start (*tp);
892 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
893 *tp = new_tree;
895 for (; !tsi_end_p (oi); tsi_next (&oi))
897 tree stmt = tsi_stmt (oi);
898 if (TREE_CODE (stmt) == STATEMENT_LIST)
899 /* This copy is not redundant; tsi_link_after will smash this
900 STATEMENT_LIST into the end of the one we're building, and we
901 don't want to do that with the original. */
902 copy_statement_list (&stmt);
903 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
907 static void
908 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
910 tree block = BIND_EXPR_BLOCK (*tp);
911 /* Copy (and replace) the statement. */
912 copy_tree_r (tp, walk_subtrees, NULL);
913 if (block)
915 remap_block (&block, id);
916 BIND_EXPR_BLOCK (*tp) = block;
919 if (BIND_EXPR_VARS (*tp))
920 /* This will remap a lot of the same decls again, but this should be
921 harmless. */
922 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
926 /* Create a new gimple_seq by remapping all the statements in BODY
927 using the inlining information in ID. */
929 static gimple_seq
930 remap_gimple_seq (gimple_seq body, copy_body_data *id)
932 gimple_stmt_iterator si;
933 gimple_seq new_body = NULL;
935 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
937 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
938 gimple_seq_add_seq (&new_body, new_stmts);
941 return new_body;
945 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
946 block using the mapping information in ID. */
948 static gimple *
949 copy_gimple_bind (gbind *stmt, copy_body_data *id)
951 gimple *new_bind;
952 tree new_block, new_vars;
953 gimple_seq body, new_body;
955 /* Copy the statement. Note that we purposely don't use copy_stmt
956 here because we need to remap statements as we copy. */
957 body = gimple_bind_body (stmt);
958 new_body = remap_gimple_seq (body, id);
960 new_block = gimple_bind_block (stmt);
961 if (new_block)
962 remap_block (&new_block, id);
964 /* This will remap a lot of the same decls again, but this should be
965 harmless. */
966 new_vars = gimple_bind_vars (stmt);
967 if (new_vars)
968 new_vars = remap_decls (new_vars, NULL, id);
970 new_bind = gimple_build_bind (new_vars, new_body, new_block);
972 return new_bind;
975 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
977 static bool
978 is_parm (tree decl)
980 if (TREE_CODE (decl) == SSA_NAME)
982 decl = SSA_NAME_VAR (decl);
983 if (!decl)
984 return false;
987 return (TREE_CODE (decl) == PARM_DECL);
990 /* Remap the dependence CLIQUE from the source to the destination function
991 as specified in ID. */
993 static unsigned short
994 remap_dependence_clique (copy_body_data *id, unsigned short clique)
996 if (clique == 0 || processing_debug_stmt)
997 return 0;
998 if (!id->dependence_map)
999 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1000 bool existed;
1001 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1002 if (!existed)
1004 /* Clique 1 is reserved for local ones set by PTA. */
1005 if (cfun->last_clique == 0)
1006 cfun->last_clique = 1;
1007 newc = get_new_clique (cfun);
1009 return newc;
1012 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1013 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1014 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1015 recursing into the children nodes of *TP. */
1017 static tree
1018 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1020 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1021 copy_body_data *id = (copy_body_data *) wi_p->info;
1022 tree fn = id->src_fn;
1024 /* For recursive invocations this is no longer the LHS itself. */
1025 bool is_lhs = wi_p->is_lhs;
1026 wi_p->is_lhs = false;
1028 if (TREE_CODE (*tp) == SSA_NAME)
1030 *tp = remap_ssa_name (*tp, id);
1031 *walk_subtrees = 0;
1032 if (is_lhs)
1033 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1034 return NULL;
1036 else if (auto_var_in_fn_p (*tp, fn))
1038 /* Local variables and labels need to be replaced by equivalent
1039 variables. We don't want to copy static variables; there's
1040 only one of those, no matter how many times we inline the
1041 containing function. Similarly for globals from an outer
1042 function. */
1043 tree new_decl;
1045 /* Remap the declaration. */
1046 new_decl = remap_decl (*tp, id);
1047 gcc_assert (new_decl);
1048 /* Replace this variable with the copy. */
1049 STRIP_TYPE_NOPS (new_decl);
1050 /* ??? The C++ frontend uses void * pointer zero to initialize
1051 any other type. This confuses the middle-end type verification.
1052 As cloned bodies do not go through gimplification again the fixup
1053 there doesn't trigger. */
1054 if (TREE_CODE (new_decl) == INTEGER_CST
1055 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1056 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1057 *tp = new_decl;
1058 *walk_subtrees = 0;
1060 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1061 gcc_unreachable ();
1062 else if (TREE_CODE (*tp) == SAVE_EXPR)
1063 gcc_unreachable ();
1064 else if (TREE_CODE (*tp) == LABEL_DECL
1065 && (!DECL_CONTEXT (*tp)
1066 || decl_function_context (*tp) == id->src_fn))
1067 /* These may need to be remapped for EH handling. */
1068 *tp = remap_decl (*tp, id);
1069 else if (TREE_CODE (*tp) == FIELD_DECL)
1071 /* If the enclosing record type is variably_modified_type_p, the field
1072 has already been remapped. Otherwise, it need not be. */
1073 tree *n = id->decl_map->get (*tp);
1074 if (n)
1075 *tp = *n;
1076 *walk_subtrees = 0;
1078 else if (TYPE_P (*tp))
1079 /* Types may need remapping as well. */
1080 *tp = remap_type (*tp, id);
1081 else if (CONSTANT_CLASS_P (*tp))
1083 /* If this is a constant, we have to copy the node iff the type
1084 will be remapped. copy_tree_r will not copy a constant. */
1085 tree new_type = remap_type (TREE_TYPE (*tp), id);
1087 if (new_type == TREE_TYPE (*tp))
1088 *walk_subtrees = 0;
1090 else if (TREE_CODE (*tp) == INTEGER_CST)
1091 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1092 else
1094 *tp = copy_node (*tp);
1095 TREE_TYPE (*tp) = new_type;
1098 else
1100 /* Otherwise, just copy the node. Note that copy_tree_r already
1101 knows not to copy VAR_DECLs, etc., so this is safe. */
1103 if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1105 /* We need to re-canonicalize MEM_REFs from inline substitutions
1106 that can happen when a pointer argument is an ADDR_EXPR.
1107 Recurse here manually to allow that. */
1108 tree ptr = TREE_OPERAND (*tp, 0);
1109 tree type = remap_type (TREE_TYPE (*tp), id);
1110 tree old = *tp;
1111 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1112 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1113 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1114 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1115 copy_warning (*tp, old);
1116 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1118 MR_DEPENDENCE_CLIQUE (*tp)
1119 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1120 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1122 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1123 remapped a parameter as the property might be valid only
1124 for the parameter itself. */
1125 if (TREE_THIS_NOTRAP (old)
1126 && (!is_parm (TREE_OPERAND (old, 0))
1127 || (!id->transform_parameter && is_parm (ptr))))
1128 TREE_THIS_NOTRAP (*tp) = 1;
1129 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1130 *walk_subtrees = 0;
1131 return NULL;
1134 /* Here is the "usual case". Copy this tree node, and then
1135 tweak some special cases. */
1136 copy_tree_r (tp, walk_subtrees, NULL);
1138 if (TREE_CODE (*tp) != OMP_CLAUSE)
1139 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1141 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1143 /* The copied TARGET_EXPR has never been expanded, even if the
1144 original node was expanded already. */
1145 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1146 TREE_OPERAND (*tp, 3) = NULL_TREE;
1148 else if (TREE_CODE (*tp) == ADDR_EXPR)
1150 /* Variable substitution need not be simple. In particular,
1151 the MEM_REF substitution above. Make sure that
1152 TREE_CONSTANT and friends are up-to-date. */
1153 int invariant = is_gimple_min_invariant (*tp);
1154 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1155 recompute_tree_invariant_for_addr_expr (*tp);
1157 /* If this used to be invariant, but is not any longer,
1158 then regimplification is probably needed. */
1159 if (invariant && !is_gimple_min_invariant (*tp))
1160 id->regimplify = true;
1162 *walk_subtrees = 0;
1166 /* Update the TREE_BLOCK for the cloned expr. */
1167 if (EXPR_P (*tp))
1169 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1170 tree old_block = TREE_BLOCK (*tp);
1171 if (old_block)
1173 tree *n;
1174 n = id->decl_map->get (TREE_BLOCK (*tp));
1175 if (n)
1176 new_block = *n;
1178 TREE_SET_BLOCK (*tp, new_block);
1181 /* Keep iterating. */
1182 return NULL_TREE;
1186 /* Called from copy_body_id via walk_tree. DATA is really a
1187 `copy_body_data *'. */
1189 tree
1190 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1192 copy_body_data *id = (copy_body_data *) data;
1193 tree fn = id->src_fn;
1194 tree new_block;
1196 /* Begin by recognizing trees that we'll completely rewrite for the
1197 inlining context. Our output for these trees is completely
1198 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1199 into an edge). Further down, we'll handle trees that get
1200 duplicated and/or tweaked. */
1202 /* When requested, RETURN_EXPRs should be transformed to just the
1203 contained MODIFY_EXPR. The branch semantics of the return will
1204 be handled elsewhere by manipulating the CFG rather than a statement. */
1205 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1207 tree assignment = TREE_OPERAND (*tp, 0);
1209 /* If we're returning something, just turn that into an
1210 assignment into the equivalent of the original RESULT_DECL.
1211 If the "assignment" is just the result decl, the result
1212 decl has already been set (e.g. a recent "foo (&result_decl,
1213 ...)"); just toss the entire RETURN_EXPR. */
1214 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1216 /* Replace the RETURN_EXPR with (a copy of) the
1217 MODIFY_EXPR hanging underneath. */
1218 *tp = copy_node (assignment);
1220 else /* Else the RETURN_EXPR returns no value. */
1222 *tp = NULL;
1223 return (tree) (void *)1;
1226 else if (TREE_CODE (*tp) == SSA_NAME)
1228 *tp = remap_ssa_name (*tp, id);
1229 *walk_subtrees = 0;
1230 return NULL;
1233 /* Local variables and labels need to be replaced by equivalent
1234 variables. We don't want to copy static variables; there's only
1235 one of those, no matter how many times we inline the containing
1236 function. Similarly for globals from an outer function. */
1237 else if (auto_var_in_fn_p (*tp, fn))
1239 tree new_decl;
1241 /* Remap the declaration. */
1242 new_decl = remap_decl (*tp, id);
1243 gcc_assert (new_decl);
1244 /* Replace this variable with the copy. */
1245 STRIP_TYPE_NOPS (new_decl);
1246 *tp = new_decl;
1247 *walk_subtrees = 0;
1249 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1250 copy_statement_list (tp);
1251 else if (TREE_CODE (*tp) == SAVE_EXPR
1252 || TREE_CODE (*tp) == TARGET_EXPR)
1253 remap_save_expr (tp, id->decl_map, walk_subtrees);
1254 else if (TREE_CODE (*tp) == LABEL_DECL
1255 && (! DECL_CONTEXT (*tp)
1256 || decl_function_context (*tp) == id->src_fn))
1257 /* These may need to be remapped for EH handling. */
1258 *tp = remap_decl (*tp, id);
1259 else if (TREE_CODE (*tp) == BIND_EXPR)
1260 copy_bind_expr (tp, walk_subtrees, id);
1261 /* Types may need remapping as well. */
1262 else if (TYPE_P (*tp))
1263 *tp = remap_type (*tp, id);
1265 /* If this is a constant, we have to copy the node iff the type will be
1266 remapped. copy_tree_r will not copy a constant. */
1267 else if (CONSTANT_CLASS_P (*tp))
1269 tree new_type = remap_type (TREE_TYPE (*tp), id);
1271 if (new_type == TREE_TYPE (*tp))
1272 *walk_subtrees = 0;
1274 else if (TREE_CODE (*tp) == INTEGER_CST)
1275 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1276 else
1278 *tp = copy_node (*tp);
1279 TREE_TYPE (*tp) = new_type;
1283 /* Otherwise, just copy the node. Note that copy_tree_r already
1284 knows not to copy VAR_DECLs, etc., so this is safe. */
1285 else
1287 /* Here we handle trees that are not completely rewritten.
1288 First we detect some inlining-induced bogosities for
1289 discarding. */
1290 if (TREE_CODE (*tp) == MODIFY_EXPR
1291 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1292 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1294 /* Some assignments VAR = VAR; don't generate any rtl code
1295 and thus don't count as variable modification. Avoid
1296 keeping bogosities like 0 = 0. */
1297 tree decl = TREE_OPERAND (*tp, 0), value;
1298 tree *n;
1300 n = id->decl_map->get (decl);
1301 if (n)
1303 value = *n;
1304 STRIP_TYPE_NOPS (value);
1305 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1307 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1308 return copy_tree_body_r (tp, walk_subtrees, data);
1312 else if (INDIRECT_REF_P (*tp))
1314 /* Get rid of *& from inline substitutions that can happen when a
1315 pointer argument is an ADDR_EXPR. */
1316 tree decl = TREE_OPERAND (*tp, 0);
1317 tree *n = id->decl_map->get (decl);
1318 if (n)
1320 /* If we happen to get an ADDR_EXPR in n->value, strip
1321 it manually here as we'll eventually get ADDR_EXPRs
1322 which lie about their types pointed to. In this case
1323 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1324 but we absolutely rely on that. As fold_indirect_ref
1325 does other useful transformations, try that first, though. */
1326 tree type = TREE_TYPE (*tp);
1327 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1328 tree old = *tp;
1329 *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1330 if (! *tp)
1332 type = remap_type (type, id);
1333 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1336 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1337 /* ??? We should either assert here or build
1338 a VIEW_CONVERT_EXPR instead of blindly leaking
1339 incompatible types to our IL. */
1340 if (! *tp)
1341 *tp = TREE_OPERAND (ptr, 0);
1343 else
1345 *tp = build1 (INDIRECT_REF, type, ptr);
1346 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1347 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1348 TREE_READONLY (*tp) = TREE_READONLY (old);
1349 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1350 have remapped a parameter as the property might be
1351 valid only for the parameter itself. */
1352 if (TREE_THIS_NOTRAP (old)
1353 && (!is_parm (TREE_OPERAND (old, 0))
1354 || (!id->transform_parameter && is_parm (ptr))))
1355 TREE_THIS_NOTRAP (*tp) = 1;
1358 *walk_subtrees = 0;
1359 return NULL;
1362 else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1364 /* We need to re-canonicalize MEM_REFs from inline substitutions
1365 that can happen when a pointer argument is an ADDR_EXPR.
1366 Recurse here manually to allow that. */
1367 tree ptr = TREE_OPERAND (*tp, 0);
1368 tree type = remap_type (TREE_TYPE (*tp), id);
1369 tree old = *tp;
1370 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1371 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1372 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1373 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1374 copy_warning (*tp, old);
1375 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1377 MR_DEPENDENCE_CLIQUE (*tp)
1378 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1379 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1381 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1382 remapped a parameter as the property might be valid only
1383 for the parameter itself. */
1384 if (TREE_THIS_NOTRAP (old)
1385 && (!is_parm (TREE_OPERAND (old, 0))
1386 || (!id->transform_parameter && is_parm (ptr))))
1387 TREE_THIS_NOTRAP (*tp) = 1;
1388 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1389 *walk_subtrees = 0;
1390 return NULL;
1393 /* Here is the "usual case". Copy this tree node, and then
1394 tweak some special cases. */
1395 copy_tree_r (tp, walk_subtrees, NULL);
1397 /* If EXPR has block defined, map it to newly constructed block.
1398 When inlining we want EXPRs without block appear in the block
1399 of function call if we are not remapping a type. */
1400 if (EXPR_P (*tp))
1402 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1403 if (TREE_BLOCK (*tp))
1405 tree *n;
1406 n = id->decl_map->get (TREE_BLOCK (*tp));
1407 if (n)
1408 new_block = *n;
1410 TREE_SET_BLOCK (*tp, new_block);
1413 if (TREE_CODE (*tp) != OMP_CLAUSE)
1414 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1416 /* The copied TARGET_EXPR has never been expanded, even if the
1417 original node was expanded already. */
1418 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1420 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1421 TREE_OPERAND (*tp, 3) = NULL_TREE;
1424 /* Variable substitution need not be simple. In particular, the
1425 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1426 and friends are up-to-date. */
1427 else if (TREE_CODE (*tp) == ADDR_EXPR)
1429 int invariant = is_gimple_min_invariant (*tp);
1430 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1432 /* Handle the case where we substituted an INDIRECT_REF
1433 into the operand of the ADDR_EXPR. */
1434 if (INDIRECT_REF_P (TREE_OPERAND (*tp, 0))
1435 && !id->do_not_fold)
1437 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1438 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1439 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1440 *tp = t;
1442 else
1443 recompute_tree_invariant_for_addr_expr (*tp);
1445 /* If this used to be invariant, but is not any longer,
1446 then regimplification is probably needed. */
1447 if (invariant && !is_gimple_min_invariant (*tp))
1448 id->regimplify = true;
1450 *walk_subtrees = 0;
1452 else if (TREE_CODE (*tp) == OMP_CLAUSE
1453 && (OMP_CLAUSE_CODE (*tp) == OMP_CLAUSE_AFFINITY
1454 || OMP_CLAUSE_CODE (*tp) == OMP_CLAUSE_DEPEND))
1456 tree t = OMP_CLAUSE_DECL (*tp);
1457 if (t
1458 && TREE_CODE (t) == TREE_LIST
1459 && TREE_PURPOSE (t)
1460 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
1462 *walk_subtrees = 0;
1463 OMP_CLAUSE_DECL (*tp) = copy_node (t);
1464 t = OMP_CLAUSE_DECL (*tp);
1465 TREE_PURPOSE (t) = copy_node (TREE_PURPOSE (t));
1466 for (int i = 0; i <= 4; i++)
1467 walk_tree (&TREE_VEC_ELT (TREE_PURPOSE (t), i),
1468 copy_tree_body_r, id, NULL);
1469 if (TREE_VEC_ELT (TREE_PURPOSE (t), 5))
1470 remap_block (&TREE_VEC_ELT (TREE_PURPOSE (t), 5), id);
1471 walk_tree (&TREE_VALUE (t), copy_tree_body_r, id, NULL);
1476 /* Keep iterating. */
1477 return NULL_TREE;
1480 /* Helper for remap_gimple_stmt. Given an EH region number for the
1481 source function, map that to the duplicate EH region number in
1482 the destination function. */
1484 static int
1485 remap_eh_region_nr (int old_nr, copy_body_data *id)
1487 eh_region old_r, new_r;
1489 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1490 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1492 return new_r->index;
1495 /* Similar, but operate on INTEGER_CSTs. */
1497 static tree
1498 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1500 int old_nr, new_nr;
1502 old_nr = tree_to_shwi (old_t_nr);
1503 new_nr = remap_eh_region_nr (old_nr, id);
1505 return build_int_cst (integer_type_node, new_nr);
1508 /* Helper for copy_bb. Remap statement STMT using the inlining
1509 information in ID. Return the new statement copy. */
1511 static gimple_seq
1512 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1514 gimple *copy = NULL;
1515 struct walk_stmt_info wi;
1516 bool skip_first = false;
1517 gimple_seq stmts = NULL;
1519 if (is_gimple_debug (stmt)
1520 && (gimple_debug_nonbind_marker_p (stmt)
1521 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1522 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1523 return NULL;
1525 if (!is_gimple_debug (stmt)
1526 && id->param_body_adjs
1527 && id->param_body_adjs->m_dead_stmts.contains (stmt))
1529 tree *dval = id->param_body_adjs->m_dead_stmt_debug_equiv.get (stmt);
1530 if (!dval)
1531 return NULL;
1533 gcc_assert (is_gimple_assign (stmt));
1534 tree lhs = gimple_assign_lhs (stmt);
1535 tree *dvar = id->param_body_adjs->m_dead_ssa_debug_equiv.get (lhs);
1536 gdebug *bind = gimple_build_debug_bind (*dvar, *dval, stmt);
1537 if (id->reset_location)
1538 gimple_set_location (bind, input_location);
1539 id->debug_stmts.safe_push (bind);
1540 gimple_seq_add_stmt_without_update (&stmts, bind);
1541 return stmts;
1544 /* Begin by recognizing trees that we'll completely rewrite for the
1545 inlining context. Our output for these trees is completely
1546 different from our input (e.g. RETURN_EXPR is deleted and morphs
1547 into an edge). Further down, we'll handle trees that get
1548 duplicated and/or tweaked. */
1550 /* When requested, GIMPLE_RETURN should be transformed to just the
1551 contained GIMPLE_ASSIGN. The branch semantics of the return will
1552 be handled elsewhere by manipulating the CFG rather than the
1553 statement. */
1554 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1556 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1558 /* If we're returning something, just turn that into an
1559 assignment to the equivalent of the original RESULT_DECL.
1560 If RETVAL is just the result decl, the result decl has
1561 already been set (e.g. a recent "foo (&result_decl, ...)");
1562 just toss the entire GIMPLE_RETURN. Likewise for when the
1563 call doesn't want the return value. */
1564 if (retval
1565 && (TREE_CODE (retval) != RESULT_DECL
1566 && (!id->call_stmt
1567 || gimple_call_lhs (id->call_stmt) != NULL_TREE)
1568 && (TREE_CODE (retval) != SSA_NAME
1569 || ! SSA_NAME_VAR (retval)
1570 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1572 copy = gimple_build_assign (id->do_not_unshare
1573 ? id->retvar : unshare_expr (id->retvar),
1574 retval);
1575 /* id->retvar is already substituted. Skip it on later remapping. */
1576 skip_first = true;
1578 else
1579 return NULL;
1581 else if (gimple_has_substatements (stmt))
1583 gimple_seq s1, s2;
1585 /* When cloning bodies from the C++ front end, we will be handed bodies
1586 in High GIMPLE form. Handle here all the High GIMPLE statements that
1587 have embedded statements. */
1588 switch (gimple_code (stmt))
1590 case GIMPLE_BIND:
1591 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1592 break;
1594 case GIMPLE_CATCH:
1596 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1597 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1598 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1600 break;
1602 case GIMPLE_EH_FILTER:
1603 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1604 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1605 break;
1607 case GIMPLE_TRY:
1608 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1609 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1610 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1611 break;
1613 case GIMPLE_WITH_CLEANUP_EXPR:
1614 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1615 copy = gimple_build_wce (s1);
1616 break;
1618 case GIMPLE_OMP_PARALLEL:
1620 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1621 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1622 copy = gimple_build_omp_parallel
1623 (s1,
1624 gimple_omp_parallel_clauses (omp_par_stmt),
1625 gimple_omp_parallel_child_fn (omp_par_stmt),
1626 gimple_omp_parallel_data_arg (omp_par_stmt));
1628 break;
1630 case GIMPLE_OMP_TASK:
1631 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1632 copy = gimple_build_omp_task
1633 (s1,
1634 gimple_omp_task_clauses (stmt),
1635 gimple_omp_task_child_fn (stmt),
1636 gimple_omp_task_data_arg (stmt),
1637 gimple_omp_task_copy_fn (stmt),
1638 gimple_omp_task_arg_size (stmt),
1639 gimple_omp_task_arg_align (stmt));
1640 break;
1642 case GIMPLE_OMP_FOR:
1643 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1644 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1645 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1646 gimple_omp_for_clauses (stmt),
1647 gimple_omp_for_collapse (stmt), s2);
1649 size_t i;
1650 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1652 gimple_omp_for_set_index (copy, i,
1653 gimple_omp_for_index (stmt, i));
1654 gimple_omp_for_set_initial (copy, i,
1655 gimple_omp_for_initial (stmt, i));
1656 gimple_omp_for_set_final (copy, i,
1657 gimple_omp_for_final (stmt, i));
1658 gimple_omp_for_set_incr (copy, i,
1659 gimple_omp_for_incr (stmt, i));
1660 gimple_omp_for_set_cond (copy, i,
1661 gimple_omp_for_cond (stmt, i));
1664 break;
1666 case GIMPLE_OMP_MASTER:
1667 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1668 copy = gimple_build_omp_master (s1);
1669 break;
1671 case GIMPLE_OMP_MASKED:
1672 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1673 copy = gimple_build_omp_masked
1674 (s1, gimple_omp_masked_clauses (stmt));
1675 break;
1677 case GIMPLE_OMP_SCOPE:
1678 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1679 copy = gimple_build_omp_scope
1680 (s1, gimple_omp_scope_clauses (stmt));
1681 break;
1683 case GIMPLE_OMP_TASKGROUP:
1684 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1685 copy = gimple_build_omp_taskgroup
1686 (s1, gimple_omp_taskgroup_clauses (stmt));
1687 break;
1689 case GIMPLE_OMP_ORDERED:
1690 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1691 copy = gimple_build_omp_ordered
1692 (s1,
1693 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1694 break;
1696 case GIMPLE_OMP_SCAN:
1697 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1698 copy = gimple_build_omp_scan
1699 (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1700 break;
1702 case GIMPLE_OMP_SECTION:
1703 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1704 copy = gimple_build_omp_section (s1);
1705 break;
1707 case GIMPLE_OMP_SECTIONS:
1708 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1709 copy = gimple_build_omp_sections
1710 (s1, gimple_omp_sections_clauses (stmt));
1711 break;
1713 case GIMPLE_OMP_STRUCTURED_BLOCK:
1714 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1715 copy = gimple_build_omp_structured_block (s1);
1716 break;
1718 case GIMPLE_OMP_SINGLE:
1719 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1720 copy = gimple_build_omp_single
1721 (s1, gimple_omp_single_clauses (stmt));
1722 break;
1724 case GIMPLE_OMP_TARGET:
1725 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1726 copy = gimple_build_omp_target
1727 (s1, gimple_omp_target_kind (stmt),
1728 gimple_omp_target_clauses (stmt));
1729 break;
1731 case GIMPLE_OMP_TEAMS:
1732 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1733 copy = gimple_build_omp_teams
1734 (s1, gimple_omp_teams_clauses (stmt));
1735 break;
1737 case GIMPLE_OMP_CRITICAL:
1738 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1739 copy = gimple_build_omp_critical (s1,
1740 gimple_omp_critical_name
1741 (as_a <gomp_critical *> (stmt)),
1742 gimple_omp_critical_clauses
1743 (as_a <gomp_critical *> (stmt)));
1744 break;
1746 case GIMPLE_ASSUME:
1747 s1 = remap_gimple_seq (gimple_assume_body (stmt), id);
1748 copy = gimple_build_assume (gimple_assume_guard (stmt), s1);
1749 break;
1751 case GIMPLE_TRANSACTION:
1753 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1754 gtransaction *new_trans_stmt;
1755 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1756 id);
1757 copy = new_trans_stmt = gimple_build_transaction (s1);
1758 gimple_transaction_set_subcode (new_trans_stmt,
1759 gimple_transaction_subcode (old_trans_stmt));
1760 gimple_transaction_set_label_norm (new_trans_stmt,
1761 gimple_transaction_label_norm (old_trans_stmt));
1762 gimple_transaction_set_label_uninst (new_trans_stmt,
1763 gimple_transaction_label_uninst (old_trans_stmt));
1764 gimple_transaction_set_label_over (new_trans_stmt,
1765 gimple_transaction_label_over (old_trans_stmt));
1767 break;
1769 default:
1770 gcc_unreachable ();
1773 else
1775 if (gimple_assign_single_p (stmt)
1776 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1777 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1779 /* Here we handle statements that are not completely rewritten.
1780 First we detect some inlining-induced bogosities for
1781 discarding. */
1783 /* Some assignments VAR = VAR; don't generate any rtl code
1784 and thus don't count as variable modification. Avoid
1785 keeping bogosities like 0 = 0. */
1786 tree decl = gimple_assign_lhs (stmt), value;
1787 tree *n;
1789 n = id->decl_map->get (decl);
1790 if (n)
1792 value = *n;
1793 STRIP_TYPE_NOPS (value);
1794 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1795 return NULL;
1799 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1800 in a block that we aren't copying during tree_function_versioning,
1801 just drop the clobber stmt. */
1802 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1804 tree lhs = gimple_assign_lhs (stmt);
1805 if (TREE_CODE (lhs) == MEM_REF
1806 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1808 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1809 if (gimple_bb (def_stmt)
1810 && !bitmap_bit_p (id->blocks_to_copy,
1811 gimple_bb (def_stmt)->index))
1812 return NULL;
1816 /* We do not allow CLOBBERs of handled components. In case
1817 returned value is stored via such handled component, remove
1818 the clobber so stmt verifier is happy. */
1819 if (gimple_clobber_p (stmt)
1820 && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1822 tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1823 if (!DECL_P (remapped)
1824 && TREE_CODE (remapped) != MEM_REF)
1825 return NULL;
1828 if (gimple_debug_bind_p (stmt))
1830 tree var = gimple_debug_bind_get_var (stmt);
1831 tree value = gimple_debug_bind_get_value (stmt);
1832 if (id->param_body_adjs
1833 && id->param_body_adjs->m_dead_stmts.contains (stmt))
1835 value = unshare_expr_without_location (value);
1836 id->param_body_adjs->remap_with_debug_expressions (&value);
1839 gdebug *copy = gimple_build_debug_bind (var, value, stmt);
1840 if (id->reset_location)
1841 gimple_set_location (copy, input_location);
1842 id->debug_stmts.safe_push (copy);
1843 gimple_seq_add_stmt_without_update (&stmts, copy);
1844 return stmts;
1846 if (gimple_debug_source_bind_p (stmt))
1848 gdebug *copy = gimple_build_debug_source_bind
1849 (gimple_debug_source_bind_get_var (stmt),
1850 gimple_debug_source_bind_get_value (stmt),
1851 stmt);
1852 if (id->reset_location)
1853 gimple_set_location (copy, input_location);
1854 id->debug_stmts.safe_push (copy);
1855 gimple_seq_add_stmt_without_update (&stmts, copy);
1856 return stmts;
1858 if (gimple_debug_nonbind_marker_p (stmt))
1860 /* If the inlined function has too many debug markers,
1861 don't copy them. */
1862 if (id->src_cfun->debug_marker_count
1863 > param_max_debug_marker_count
1864 || id->reset_location)
1865 return stmts;
1867 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1868 id->debug_stmts.safe_push (copy);
1869 gimple_seq_add_stmt_without_update (&stmts, copy);
1870 return stmts;
1873 /* Create a new deep copy of the statement. */
1874 copy = gimple_copy (stmt);
1876 /* Clear flags that need revisiting. */
1877 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1879 if (gimple_call_tail_p (call_stmt))
1880 gimple_call_set_tail (call_stmt, false);
1881 if (gimple_call_from_thunk_p (call_stmt))
1882 gimple_call_set_from_thunk (call_stmt, false);
1883 if (gimple_call_internal_p (call_stmt))
1884 switch (gimple_call_internal_fn (call_stmt))
1886 case IFN_GOMP_SIMD_LANE:
1887 case IFN_GOMP_SIMD_VF:
1888 case IFN_GOMP_SIMD_LAST_LANE:
1889 case IFN_GOMP_SIMD_ORDERED_START:
1890 case IFN_GOMP_SIMD_ORDERED_END:
1891 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1892 break;
1893 default:
1894 break;
1898 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1899 RESX and EH_DISPATCH. */
1900 if (id->eh_map)
1901 switch (gimple_code (copy))
1903 case GIMPLE_CALL:
1905 tree r, fndecl = gimple_call_fndecl (copy);
1906 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1907 switch (DECL_FUNCTION_CODE (fndecl))
1909 case BUILT_IN_EH_COPY_VALUES:
1910 r = gimple_call_arg (copy, 1);
1911 r = remap_eh_region_tree_nr (r, id);
1912 gimple_call_set_arg (copy, 1, r);
1913 /* FALLTHRU */
1915 case BUILT_IN_EH_POINTER:
1916 case BUILT_IN_EH_FILTER:
1917 r = gimple_call_arg (copy, 0);
1918 r = remap_eh_region_tree_nr (r, id);
1919 gimple_call_set_arg (copy, 0, r);
1920 break;
1922 default:
1923 break;
1926 /* Reset alias info if we didn't apply measures to
1927 keep it valid over inlining by setting DECL_PT_UID. */
1928 if (!id->src_cfun->gimple_df
1929 || !id->src_cfun->gimple_df->ipa_pta)
1930 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1932 break;
1934 case GIMPLE_RESX:
1936 gresx *resx_stmt = as_a <gresx *> (copy);
1937 int r = gimple_resx_region (resx_stmt);
1938 r = remap_eh_region_nr (r, id);
1939 gimple_resx_set_region (resx_stmt, r);
1941 break;
1943 case GIMPLE_EH_DISPATCH:
1945 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1946 int r = gimple_eh_dispatch_region (eh_dispatch);
1947 r = remap_eh_region_nr (r, id);
1948 gimple_eh_dispatch_set_region (eh_dispatch, r);
1950 break;
1952 default:
1953 break;
1957 /* If STMT has a block defined, map it to the newly constructed block. */
1958 if (tree block = gimple_block (copy))
1960 tree *n;
1961 n = id->decl_map->get (block);
1962 gcc_assert (n);
1963 gimple_set_block (copy, *n);
1965 if (id->param_body_adjs)
1967 gimple_seq extra_stmts = NULL;
1968 id->param_body_adjs->modify_gimple_stmt (&copy, &extra_stmts, stmt);
1969 if (!gimple_seq_empty_p (extra_stmts))
1971 memset (&wi, 0, sizeof (wi));
1972 wi.info = id;
1973 for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
1974 !gsi_end_p (egsi);
1975 gsi_next (&egsi))
1976 walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
1977 gimple_seq_add_seq_without_update (&stmts, extra_stmts);
1981 if (id->reset_location)
1982 gimple_set_location (copy, input_location);
1984 /* Debug statements ought to be rebuilt and not copied. */
1985 gcc_checking_assert (!is_gimple_debug (copy));
1987 /* Remap all the operands in COPY. */
1988 memset (&wi, 0, sizeof (wi));
1989 wi.info = id;
1990 if (skip_first)
1991 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1992 else
1993 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1995 /* Clear the copied virtual operands. We are not remapping them here
1996 but are going to recreate them from scratch. */
1997 if (gimple_has_mem_ops (copy))
1999 gimple_set_vdef (copy, NULL_TREE);
2000 gimple_set_vuse (copy, NULL_TREE);
2003 if (cfun->can_throw_non_call_exceptions)
2005 /* When inlining a function which does not have non-call exceptions
2006 enabled into a function that has (which only happens with
2007 always-inline) we have to fixup stmts that cannot throw. */
2008 if (gcond *cond = dyn_cast <gcond *> (copy))
2009 if (gimple_could_trap_p (cond))
2011 gassign *cmp
2012 = gimple_build_assign (make_ssa_name (boolean_type_node),
2013 gimple_cond_code (cond),
2014 gimple_cond_lhs (cond),
2015 gimple_cond_rhs (cond));
2016 gimple_seq_add_stmt_without_update (&stmts, cmp);
2017 gimple_cond_set_code (cond, NE_EXPR);
2018 gimple_cond_set_lhs (cond, gimple_assign_lhs (cmp));
2019 gimple_cond_set_rhs (cond, boolean_false_node);
2023 gimple_seq_add_stmt_without_update (&stmts, copy);
2024 return stmts;
2028 /* Copy basic block, scale profile accordingly. Edges will be taken care of
2029 later */
2031 static basic_block
2032 copy_bb (copy_body_data *id, basic_block bb,
2033 profile_count num, profile_count den)
2035 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
2036 basic_block copy_basic_block;
2037 tree decl;
2038 basic_block prev;
2040 profile_count::adjust_for_ipa_scaling (&num, &den);
2042 /* Search for previous copied basic block. */
2043 prev = bb->prev_bb;
2044 while (!prev->aux)
2045 prev = prev->prev_bb;
2047 /* create_basic_block() will append every new block to
2048 basic_block_info automatically. */
2049 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
2050 copy_basic_block->count = bb->count.apply_scale (num, den);
2052 copy_gsi = gsi_start_bb (copy_basic_block);
2054 unsigned min_cond_uid = 0;
2055 if (id->src_cfun->cond_uids)
2057 if (!cfun->cond_uids)
2058 cfun->cond_uids = new hash_map <gcond*, unsigned> ();
2060 for (auto itr : *id->src_cfun->cond_uids)
2061 if (itr.second >= min_cond_uid)
2062 min_cond_uid = itr.second + 1;
2065 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2067 gimple_seq stmts;
2068 gimple *stmt = gsi_stmt (gsi);
2069 gimple *orig_stmt = stmt;
2070 gimple_stmt_iterator stmts_gsi;
2071 bool stmt_added = false;
2073 id->regimplify = false;
2074 stmts = remap_gimple_stmt (stmt, id);
2076 if (gimple_seq_empty_p (stmts))
2077 continue;
2079 seq_gsi = copy_gsi;
2081 for (stmts_gsi = gsi_start (stmts);
2082 !gsi_end_p (stmts_gsi); )
2084 stmt = gsi_stmt (stmts_gsi);
2086 /* Advance iterator now before stmt is moved to seq_gsi. */
2087 gsi_next (&stmts_gsi);
2089 if (gimple_nop_p (stmt))
2090 continue;
2092 /* If -fcondition-coverage is used, register the inlined conditions
2093 in the cond->expression mapping of the caller. The expression tag
2094 is shifted conditions from the two bodies are not mixed. */
2095 if (id->src_cfun->cond_uids && is_a <gcond*> (stmt))
2097 gcond *orig_cond = as_a <gcond*> (orig_stmt);
2098 gcond *cond = as_a <gcond*> (stmt);
2099 unsigned *v = id->src_cfun->cond_uids->get (orig_cond);
2100 if (v)
2101 cfun->cond_uids->put (cond, *v + min_cond_uid);
2104 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
2105 orig_stmt);
2107 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2109 if (id->regimplify)
2110 gimple_regimplify_operands (stmt, &seq_gsi);
2112 stmt_added = true;
2115 if (!stmt_added)
2116 continue;
2118 /* If copy_basic_block has been empty at the start of this iteration,
2119 call gsi_start_bb again to get at the newly added statements. */
2120 if (gsi_end_p (copy_gsi))
2121 copy_gsi = gsi_start_bb (copy_basic_block);
2122 else
2123 gsi_next (&copy_gsi);
2125 /* Process the new statement. The call to gimple_regimplify_operands
2126 possibly turned the statement into multiple statements, we
2127 need to process all of them. */
2130 tree fn;
2131 gcall *call_stmt;
2133 stmt = gsi_stmt (copy_gsi);
2134 call_stmt = dyn_cast <gcall *> (stmt);
2135 if (call_stmt
2136 && gimple_call_va_arg_pack_p (call_stmt)
2137 && id->call_stmt
2138 && ! gimple_call_va_arg_pack_p (id->call_stmt))
2140 /* __builtin_va_arg_pack () should be replaced by
2141 all arguments corresponding to ... in the caller. */
2142 tree p;
2143 gcall *new_call;
2144 vec<tree> argarray;
2145 size_t nargs_caller = gimple_call_num_args (id->call_stmt);
2146 size_t nargs = nargs_caller;
2148 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2150 /* Avoid crashing on invalid IL that doesn't have a
2151 varargs function or that passes not enough arguments. */
2152 if (nargs == 0)
2153 break;
2154 nargs--;
2157 /* Create the new array of arguments. */
2158 size_t nargs_callee = gimple_call_num_args (call_stmt);
2159 size_t n = nargs + nargs_callee;
2160 argarray.create (n);
2161 argarray.safe_grow_cleared (n, true);
2163 /* Copy all the arguments before '...' */
2164 if (nargs_callee)
2165 memcpy (argarray.address (),
2166 gimple_call_arg_ptr (call_stmt, 0),
2167 nargs_callee * sizeof (tree));
2169 /* Append the arguments passed in '...' */
2170 if (nargs)
2171 memcpy (argarray.address () + nargs_callee,
2172 gimple_call_arg_ptr (id->call_stmt, 0)
2173 + (nargs_caller - nargs), nargs * sizeof (tree));
2175 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2176 argarray);
2178 argarray.release ();
2180 /* Copy all GIMPLE_CALL flags, location and block, except
2181 GF_CALL_VA_ARG_PACK. */
2182 gimple_call_copy_flags (new_call, call_stmt);
2183 gimple_call_set_va_arg_pack (new_call, false);
2184 gimple_call_set_fntype (new_call, gimple_call_fntype (call_stmt));
2185 /* location includes block. */
2186 gimple_set_location (new_call, gimple_location (stmt));
2187 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2189 gsi_replace (&copy_gsi, new_call, false);
2190 stmt = new_call;
2192 else if (call_stmt
2193 && id->call_stmt
2194 && (decl = gimple_call_fndecl (stmt))
2195 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2197 /* __builtin_va_arg_pack_len () should be replaced by
2198 the number of anonymous arguments. */
2199 size_t nargs = gimple_call_num_args (id->call_stmt);
2200 tree count, p;
2201 gimple *new_stmt;
2203 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2204 nargs--;
2206 if (!gimple_call_lhs (stmt))
2208 /* Drop unused calls. */
2209 gsi_remove (&copy_gsi, false);
2210 continue;
2212 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2214 count = build_int_cst (integer_type_node, nargs);
2215 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2216 gsi_replace (&copy_gsi, new_stmt, false);
2217 stmt = new_stmt;
2219 else if (nargs != 0)
2221 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2222 count = build_int_cst (integer_type_node, nargs);
2223 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2224 PLUS_EXPR, newlhs, count);
2225 gimple_call_set_lhs (stmt, newlhs);
2226 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2229 else if (call_stmt
2230 && id->call_stmt
2231 && gimple_call_internal_p (stmt))
2232 switch (gimple_call_internal_fn (stmt))
2234 case IFN_TSAN_FUNC_EXIT:
2235 /* Drop .TSAN_FUNC_EXIT () internal calls during inlining. */
2236 gsi_remove (&copy_gsi, false);
2237 continue;
2238 case IFN_ASAN_MARK:
2239 /* Drop .ASAN_MARK internal calls during inlining into
2240 no_sanitize functions. */
2241 if (!sanitize_flags_p (SANITIZE_ADDRESS, id->dst_fn)
2242 && !sanitize_flags_p (SANITIZE_HWADDRESS, id->dst_fn))
2244 gsi_remove (&copy_gsi, false);
2245 continue;
2247 break;
2248 default:
2249 break;
2252 /* Statements produced by inlining can be unfolded, especially
2253 when we constant propagated some operands. We can't fold
2254 them right now for two reasons:
2255 1) folding require SSA_NAME_DEF_STMTs to be correct
2256 2) we can't change function calls to builtins.
2257 So we just mark statement for later folding. We mark
2258 all new statements, instead just statements that has changed
2259 by some nontrivial substitution so even statements made
2260 foldable indirectly are updated. If this turns out to be
2261 expensive, copy_body can be told to watch for nontrivial
2262 changes. */
2263 if (id->statements_to_fold)
2264 id->statements_to_fold->add (stmt);
2266 /* We're duplicating a CALL_EXPR. Find any corresponding
2267 callgraph edges and update or duplicate them. */
2268 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2270 struct cgraph_edge *edge;
2272 switch (id->transform_call_graph_edges)
2274 case CB_CGE_DUPLICATE:
2275 edge = id->src_node->get_edge (orig_stmt);
2276 if (edge)
2278 struct cgraph_edge *old_edge = edge;
2280 /* A speculative call is consist of multiple
2281 edges - indirect edge and one or more direct edges
2282 Duplicate the whole thing and distribute frequencies
2283 accordingly. */
2284 if (edge->speculative)
2286 int n = 0;
2287 profile_count direct_cnt
2288 = profile_count::zero ();
2290 /* First figure out the distribution of counts
2291 so we can re-scale BB profile accordingly. */
2292 for (cgraph_edge *e = old_edge; e;
2293 e = e->next_speculative_call_target ())
2294 direct_cnt = direct_cnt + e->count;
2296 cgraph_edge *indirect
2297 = old_edge->speculative_call_indirect_edge ();
2298 profile_count indir_cnt = indirect->count;
2300 /* Next iterate all direct edges, clone it and its
2301 corresponding reference and update profile. */
2302 for (cgraph_edge *e = old_edge;
2304 e = e->next_speculative_call_target ())
2306 profile_count cnt = e->count;
2308 id->dst_node->clone_reference
2309 (e->speculative_call_target_ref (), stmt);
2310 edge = e->clone (id->dst_node, call_stmt,
2311 gimple_uid (stmt), num, den,
2312 true);
2313 profile_probability prob
2314 = cnt.probability_in (direct_cnt
2315 + indir_cnt);
2316 edge->count
2317 = copy_basic_block->count.apply_probability
2318 (prob);
2319 n++;
2321 gcc_checking_assert
2322 (indirect->num_speculative_call_targets_p ()
2323 == n);
2325 /* Duplicate the indirect edge after all direct edges
2326 cloned. */
2327 indirect = indirect->clone (id->dst_node, call_stmt,
2328 gimple_uid (stmt),
2329 num, den,
2330 true);
2332 profile_probability prob
2333 = indir_cnt.probability_in (direct_cnt
2334 + indir_cnt);
2335 indirect->count
2336 = copy_basic_block->count.apply_probability (prob);
2338 else
2340 edge = edge->clone (id->dst_node, call_stmt,
2341 gimple_uid (stmt),
2342 num, den,
2343 true);
2344 edge->count = copy_basic_block->count;
2347 break;
2349 case CB_CGE_MOVE_CLONES:
2350 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2351 call_stmt);
2352 edge = id->dst_node->get_edge (stmt);
2353 break;
2355 case CB_CGE_MOVE:
2356 edge = id->dst_node->get_edge (orig_stmt);
2357 if (edge)
2358 edge = cgraph_edge::set_call_stmt (edge, call_stmt);
2359 break;
2361 default:
2362 gcc_unreachable ();
2365 /* Constant propagation on argument done during inlining
2366 may create new direct call. Produce an edge for it. */
2367 if ((!edge
2368 || (edge->indirect_inlining_edge
2369 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2370 && id->dst_node->definition
2371 && (fn = gimple_call_fndecl (stmt)) != NULL)
2373 struct cgraph_node *dest = cgraph_node::get_create (fn);
2375 /* We have missing edge in the callgraph. This can happen
2376 when previous inlining turned an indirect call into a
2377 direct call by constant propagating arguments or we are
2378 producing dead clone (for further cloning). In all
2379 other cases we hit a bug (incorrect node sharing is the
2380 most common reason for missing edges). */
2381 gcc_assert (!dest->definition
2382 || dest->address_taken
2383 || !id->src_node->definition
2384 || !id->dst_node->definition);
2385 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2386 id->dst_node->create_edge_including_clones
2387 (dest, orig_stmt, call_stmt, bb->count,
2388 CIF_ORIGINALLY_INDIRECT_CALL);
2389 else
2390 id->dst_node->create_edge (dest, call_stmt,
2391 bb->count)->inline_failed
2392 = CIF_ORIGINALLY_INDIRECT_CALL;
2393 if (dump_file)
2395 fprintf (dump_file, "Created new direct edge to %s\n",
2396 dest->dump_name ());
2400 notice_special_calls (as_a <gcall *> (stmt));
2403 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2404 id->eh_map, id->eh_lp_nr);
2406 gsi_next (&copy_gsi);
2408 while (!gsi_end_p (copy_gsi));
2410 copy_gsi = gsi_last_bb (copy_basic_block);
2413 return copy_basic_block;
2416 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2417 form is quite easy, since dominator relationship for old basic blocks does
2418 not change.
2420 There is however exception where inlining might change dominator relation
2421 across EH edges from basic block within inlined functions destinating
2422 to landing pads in function we inline into.
2424 The function fills in PHI_RESULTs of such PHI nodes if they refer
2425 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2426 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2427 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2428 set, and this means that there will be no overlapping live ranges
2429 for the underlying symbol.
2431 This might change in future if we allow redirecting of EH edges and
2432 we might want to change way build CFG pre-inlining to include
2433 all the possible edges then. */
2434 static void
2435 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2436 bool can_throw, bool nonlocal_goto)
2438 edge e;
2439 edge_iterator ei;
2441 FOR_EACH_EDGE (e, ei, bb->succs)
2442 if (!e->dest->aux
2443 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2445 gphi *phi;
2446 gphi_iterator si;
2448 if (!nonlocal_goto)
2449 gcc_assert (e->flags & EDGE_EH);
2451 if (!can_throw)
2452 gcc_assert (!(e->flags & EDGE_EH));
2454 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2456 edge re;
2458 phi = si.phi ();
2460 /* For abnormal goto/call edges the receiver can be the
2461 ENTRY_BLOCK. Do not assert this cannot happen. */
2463 gcc_assert ((e->flags & EDGE_EH)
2464 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2466 re = find_edge (ret_bb, e->dest);
2467 gcc_checking_assert (re);
2468 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2469 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2471 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2472 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2477 /* Insert clobbers for automatic variables of inlined ID->src_fn
2478 function at the start of basic block ID->eh_landing_pad_dest. */
2480 static void
2481 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2483 tree var;
2484 basic_block bb = id->eh_landing_pad_dest;
2485 live_vars_map *vars = NULL;
2486 unsigned int cnt = 0;
2487 unsigned int i;
2488 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2489 if (VAR_P (var)
2490 && !DECL_HARD_REGISTER (var)
2491 && !TREE_THIS_VOLATILE (var)
2492 && !DECL_HAS_VALUE_EXPR_P (var)
2493 && !is_gimple_reg (var)
2494 && auto_var_in_fn_p (var, id->src_fn)
2495 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2497 tree *t = id->decl_map->get (var);
2498 if (!t)
2499 continue;
2500 tree new_var = *t;
2501 if (VAR_P (new_var)
2502 && !DECL_HARD_REGISTER (new_var)
2503 && !TREE_THIS_VOLATILE (new_var)
2504 && !DECL_HAS_VALUE_EXPR_P (new_var)
2505 && !is_gimple_reg (new_var)
2506 && auto_var_in_fn_p (new_var, id->dst_fn))
2508 if (vars == NULL)
2509 vars = new live_vars_map;
2510 vars->put (DECL_UID (var), cnt++);
2513 if (vars == NULL)
2514 return;
2516 vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2517 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2518 if (VAR_P (var))
2520 edge e;
2521 edge_iterator ei;
2522 bool needed = false;
2523 unsigned int *v = vars->get (DECL_UID (var));
2524 if (v == NULL)
2525 continue;
2526 FOR_EACH_EDGE (e, ei, bb->preds)
2527 if ((e->flags & EDGE_EH) != 0
2528 && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2530 basic_block src_bb = (basic_block) e->src->aux;
2532 if (bitmap_bit_p (&live[src_bb->index], *v))
2534 needed = true;
2535 break;
2538 if (needed)
2540 tree new_var = *id->decl_map->get (var);
2541 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2542 tree clobber = build_clobber (TREE_TYPE (new_var));
2543 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2544 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2547 destroy_live_vars (live);
2548 delete vars;
2551 /* Copy edges from BB into its copy constructed earlier, scale profile
2552 accordingly. Edges will be taken care of later. Assume aux
2553 pointers to point to the copies of each BB. Return true if any
2554 debug stmts are left after a statement that must end the basic block. */
2556 static bool
2557 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2558 basic_block ret_bb, basic_block abnormal_goto_dest,
2559 copy_body_data *id)
2561 basic_block new_bb = (basic_block) bb->aux;
2562 edge_iterator ei;
2563 edge old_edge;
2564 gimple_stmt_iterator si;
2565 bool need_debug_cleanup = false;
2567 /* Use the indices from the original blocks to create edges for the
2568 new ones. */
2569 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2570 if (!(old_edge->flags & EDGE_EH))
2572 edge new_edge;
2573 int flags = old_edge->flags;
2574 location_t locus = old_edge->goto_locus;
2576 /* Return edges do get a FALLTHRU flag when they get inlined. */
2577 if (old_edge->dest->index == EXIT_BLOCK
2578 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2579 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2580 flags |= EDGE_FALLTHRU;
2582 new_edge
2583 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2584 new_edge->probability = old_edge->probability;
2585 if (!id->reset_location)
2586 new_edge->goto_locus = remap_location (locus, id);
2589 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2590 return false;
2592 /* When doing function splitting, we must decrease count of the return block
2593 which was previously reachable by block we did not copy. */
2594 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2595 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2596 if (old_edge->src->index != ENTRY_BLOCK
2597 && !old_edge->src->aux)
2598 new_bb->count -= old_edge->count ().apply_scale (num, den);
2600 /* Walk stmts from end to start so that splitting will adjust the BB
2601 pointer for each stmt at most once, even when we split the block
2602 multiple times. */
2603 bool seen_nondebug = false;
2604 for (si = gsi_last_bb (new_bb); !gsi_end_p (si);)
2606 bool can_throw, nonlocal_goto;
2607 gimple *copy_stmt = gsi_stmt (si);
2609 /* Do this before the possible split_block. */
2610 gsi_prev (&si);
2612 /* If this tree could throw an exception, there are two
2613 cases where we need to add abnormal edge(s): the
2614 tree wasn't in a region and there is a "current
2615 region" in the caller; or the original tree had
2616 EH edges. In both cases split the block after the tree,
2617 and add abnormal edge(s) as needed; we need both
2618 those from the callee and the caller.
2619 We check whether the copy can throw, because the const
2620 propagation can change an INDIRECT_REF which throws
2621 into a COMPONENT_REF which doesn't. If the copy
2622 can throw, the original could also throw. */
2623 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2624 nonlocal_goto
2625 = (stmt_can_make_abnormal_goto (copy_stmt)
2626 && !computed_goto_p (copy_stmt));
2628 if (can_throw || nonlocal_goto)
2630 /* If there's only debug insns after copy_stmt don't split
2631 the block but instead mark the block for cleanup. */
2632 if (!seen_nondebug)
2633 need_debug_cleanup = true;
2634 else
2636 /* Note that bb's predecessor edges aren't necessarily
2637 right at this point; split_block doesn't care. */
2638 edge e = split_block (new_bb, copy_stmt);
2639 e->dest->aux = new_bb->aux;
2640 seen_nondebug = false;
2644 if (!is_gimple_debug (copy_stmt))
2645 seen_nondebug = true;
2647 bool update_probs = false;
2649 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2651 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2652 update_probs = true;
2654 else if (can_throw)
2656 make_eh_edge (copy_stmt);
2657 update_probs = true;
2660 /* EH edges may not match old edges. Copy as much as possible. */
2661 if (update_probs)
2663 edge e;
2664 edge_iterator ei;
2665 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2667 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2668 if ((old_edge->flags & EDGE_EH)
2669 && (e = find_edge (copy_stmt_bb,
2670 (basic_block) old_edge->dest->aux))
2671 && (e->flags & EDGE_EH))
2672 e->probability = old_edge->probability;
2674 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2675 if (e->flags & EDGE_EH)
2677 if (!e->probability.initialized_p ())
2678 e->probability = profile_probability::never ();
2679 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2681 if (id->eh_landing_pad_dest == NULL)
2682 id->eh_landing_pad_dest = e->dest;
2683 else
2684 gcc_assert (id->eh_landing_pad_dest == e->dest);
2690 /* If the call we inline cannot make abnormal goto do not add
2691 additional abnormal edges but only retain those already present
2692 in the original function body. */
2693 if (abnormal_goto_dest == NULL)
2694 nonlocal_goto = false;
2695 if (nonlocal_goto)
2697 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2699 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2700 nonlocal_goto = false;
2701 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2702 in OpenMP regions which aren't allowed to be left abnormally.
2703 So, no need to add abnormal edge in that case. */
2704 else if (is_gimple_call (copy_stmt)
2705 && gimple_call_internal_p (copy_stmt)
2706 && (gimple_call_internal_fn (copy_stmt)
2707 == IFN_ABNORMAL_DISPATCHER)
2708 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2709 nonlocal_goto = false;
2710 else
2711 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2712 EDGE_ABNORMAL);
2715 if ((can_throw || nonlocal_goto)
2716 && gimple_in_ssa_p (cfun))
2717 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2718 can_throw, nonlocal_goto);
2720 return need_debug_cleanup;
2723 /* Copy the PHIs. All blocks and edges are copied, some blocks
2724 was possibly split and new outgoing EH edges inserted.
2725 BB points to the block of original function and AUX pointers links
2726 the original and newly copied blocks. */
2728 static void
2729 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2731 basic_block const new_bb = (basic_block) bb->aux;
2732 edge_iterator ei;
2733 gphi *phi;
2734 gphi_iterator si;
2735 edge new_edge;
2736 bool inserted = false;
2738 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2740 tree res, new_res;
2741 gphi *new_phi;
2743 phi = si.phi ();
2744 res = PHI_RESULT (phi);
2745 new_res = res;
2746 if (!virtual_operand_p (res)
2747 && (!id->param_body_adjs
2748 || !id->param_body_adjs->m_dead_stmts.contains (phi)))
2750 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2751 if (EDGE_COUNT (new_bb->preds) == 0)
2753 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2754 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2756 else
2758 new_phi = create_phi_node (new_res, new_bb);
2759 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2761 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2762 bb);
2763 tree arg;
2764 tree new_arg;
2765 edge_iterator ei2;
2766 location_t locus;
2768 /* When doing partial cloning, we allow PHIs on the entry
2769 block as long as all the arguments are the same.
2770 Find any input edge to see argument to copy. */
2771 if (!old_edge)
2772 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2773 if (!old_edge->src->aux)
2774 break;
2776 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2777 new_arg = arg;
2778 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2779 gcc_assert (new_arg);
2780 /* With return slot optimization we can end up with
2781 non-gimple (foo *)&this->m, fix that here. */
2782 if (TREE_CODE (new_arg) != SSA_NAME
2783 && TREE_CODE (new_arg) != FUNCTION_DECL
2784 && !is_gimple_val (new_arg))
2786 gimple_seq stmts = NULL;
2787 new_arg = force_gimple_operand (new_arg, &stmts, true,
2788 NULL);
2789 gsi_insert_seq_on_edge (new_edge, stmts);
2790 inserted = true;
2792 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2793 if (id->reset_location)
2794 locus = input_location;
2795 else
2796 locus = remap_location (locus, id);
2797 add_phi_arg (new_phi, new_arg, new_edge, locus);
2803 /* Commit the delayed edge insertions. */
2804 if (inserted)
2805 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2806 gsi_commit_one_edge_insert (new_edge, NULL);
2810 /* Wrapper for remap_decl so it can be used as a callback. */
2812 static tree
2813 remap_decl_1 (tree decl, void *data)
2815 return remap_decl (decl, (copy_body_data *) data);
2818 /* Build struct function and associated datastructures for the new clone
2819 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2820 the cfun to the function of new_fndecl (and current_function_decl too). */
2822 static void
2823 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2825 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2827 /* Register specific tree functions. */
2828 gimple_register_cfg_hooks ();
2830 /* Get clean struct function. */
2831 push_struct_function (new_fndecl, true);
2832 targetm.target_option.relayout_function (new_fndecl);
2834 /* We will rebuild these, so just sanity check that they are empty. */
2835 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2836 gcc_assert (cfun->local_decls == NULL);
2837 gcc_assert (cfun->cfg == NULL);
2838 gcc_assert (cfun->decl == new_fndecl);
2840 /* Copy items we preserve during cloning. */
2841 cfun->static_chain_decl = src_cfun->static_chain_decl;
2842 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2843 cfun->function_end_locus = src_cfun->function_end_locus;
2844 cfun->curr_properties = src_cfun->curr_properties;
2845 cfun->last_verified = src_cfun->last_verified;
2846 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2847 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2848 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2849 cfun->calls_eh_return = src_cfun->calls_eh_return;
2850 cfun->stdarg = src_cfun->stdarg;
2851 cfun->after_inlining = src_cfun->after_inlining;
2852 cfun->can_throw_non_call_exceptions
2853 = src_cfun->can_throw_non_call_exceptions;
2854 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2855 cfun->returns_struct = src_cfun->returns_struct;
2856 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2858 init_empty_tree_cfg ();
2860 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2861 cfun->cfg->full_profile = src_cfun->cfg->full_profile;
2863 profile_count num = count;
2864 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2865 profile_count::adjust_for_ipa_scaling (&num, &den);
2867 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2868 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2869 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2870 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2871 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2872 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2873 if (src_cfun->eh)
2874 init_eh_for_function ();
2876 if (src_cfun->gimple_df)
2878 init_tree_ssa (cfun);
2879 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2880 if (cfun->gimple_df->in_ssa_p)
2881 init_ssa_operands (cfun);
2885 /* Helper function for copy_cfg_body. Move debug stmts from the end
2886 of NEW_BB to the beginning of successor basic blocks when needed. If the
2887 successor has multiple predecessors, reset them, otherwise keep
2888 their value. */
2890 static void
2891 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2893 edge e;
2894 edge_iterator ei;
2895 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2897 if (gsi_end_p (si)
2898 || gsi_one_before_end_p (si)
2899 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2900 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2901 return;
2903 FOR_EACH_EDGE (e, ei, new_bb->succs)
2905 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2906 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2907 while (is_gimple_debug (gsi_stmt (ssi)))
2909 gimple *stmt = gsi_stmt (ssi);
2910 gdebug *new_stmt;
2911 tree var;
2912 tree value;
2914 /* For the last edge move the debug stmts instead of copying
2915 them. */
2916 if (ei_one_before_end_p (ei))
2918 si = ssi;
2919 gsi_prev (&ssi);
2920 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2922 gimple_debug_bind_reset_value (stmt);
2923 gimple_set_location (stmt, UNKNOWN_LOCATION);
2925 gsi_remove (&si, false);
2926 gsi_insert_before (&dsi, stmt, GSI_NEW_STMT);
2927 continue;
2930 if (gimple_debug_bind_p (stmt))
2932 var = gimple_debug_bind_get_var (stmt);
2933 if (single_pred_p (e->dest))
2935 value = gimple_debug_bind_get_value (stmt);
2936 value = unshare_expr (value);
2937 new_stmt = gimple_build_debug_bind (var, value, stmt);
2939 else
2940 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2942 else if (gimple_debug_source_bind_p (stmt))
2944 var = gimple_debug_source_bind_get_var (stmt);
2945 value = gimple_debug_source_bind_get_value (stmt);
2946 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2948 else if (gimple_debug_nonbind_marker_p (stmt))
2949 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2950 else
2951 gcc_unreachable ();
2952 gsi_insert_before (&dsi, new_stmt, GSI_NEW_STMT);
2953 id->debug_stmts.safe_push (new_stmt);
2954 gsi_prev (&ssi);
2959 /* Make a copy of the sub-loops of SRC_PARENT and place them
2960 as siblings of DEST_PARENT. */
2962 static void
2963 copy_loops (copy_body_data *id,
2964 class loop *dest_parent, class loop *src_parent)
2966 class loop *src_loop = src_parent->inner;
2967 while (src_loop)
2969 if (!id->blocks_to_copy
2970 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2972 class loop *dest_loop = alloc_loop ();
2974 /* Assign the new loop its header and latch and associate
2975 those with the new loop. */
2976 dest_loop->header = (basic_block)src_loop->header->aux;
2977 dest_loop->header->loop_father = dest_loop;
2978 if (src_loop->latch != NULL)
2980 dest_loop->latch = (basic_block)src_loop->latch->aux;
2981 dest_loop->latch->loop_father = dest_loop;
2984 /* Copy loop meta-data. */
2985 copy_loop_info (src_loop, dest_loop);
2986 if (dest_loop->unroll)
2987 cfun->has_unroll = true;
2988 if (dest_loop->force_vectorize)
2989 cfun->has_force_vectorize_loops = true;
2990 if (id->src_cfun->last_clique != 0)
2991 dest_loop->owned_clique
2992 = remap_dependence_clique (id,
2993 src_loop->owned_clique
2994 ? src_loop->owned_clique : 1);
2996 /* Finally place it into the loop array and the loop tree. */
2997 place_new_loop (cfun, dest_loop);
2998 flow_loop_tree_node_add (dest_parent, dest_loop);
3000 if (src_loop->simduid)
3002 dest_loop->simduid = remap_decl (src_loop->simduid, id);
3003 cfun->has_simduid_loops = true;
3006 /* Recurse. */
3007 copy_loops (id, dest_loop, src_loop);
3009 src_loop = src_loop->next;
3013 /* Call redirect_call_stmt_to_callee on all calls in BB. */
3015 void
3016 redirect_all_calls (copy_body_data * id, basic_block bb)
3018 gimple_stmt_iterator si;
3019 gimple *last = last_nondebug_stmt (bb);
3020 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
3022 gimple *stmt = gsi_stmt (si);
3023 if (is_gimple_call (stmt))
3025 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
3026 if (edge)
3028 if (!id->killed_new_ssa_names)
3029 id->killed_new_ssa_names = new hash_set<tree> (16);
3030 cgraph_edge::redirect_call_stmt_to_callee (edge,
3031 id->killed_new_ssa_names);
3033 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
3034 gimple_purge_dead_eh_edges (bb);
3040 /* Make a copy of the body of FN so that it can be inserted inline in
3041 another function. Walks FN via CFG, returns new fndecl. */
3043 static tree
3044 copy_cfg_body (copy_body_data * id,
3045 basic_block entry_block_map, basic_block exit_block_map,
3046 basic_block new_entry)
3048 tree callee_fndecl = id->src_fn;
3049 /* Original cfun for the callee, doesn't change. */
3050 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3051 struct function *cfun_to_copy;
3052 basic_block bb;
3053 tree new_fndecl = NULL;
3054 bool need_debug_cleanup = false;
3055 int last;
3056 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
3057 profile_count num = entry_block_map->count;
3059 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3061 /* Register specific tree functions. */
3062 gimple_register_cfg_hooks ();
3064 /* If we are inlining just region of the function, make sure to connect
3065 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
3066 part of loop, we must compute frequency and probability of
3067 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
3068 probabilities of edges incoming from nonduplicated region. */
3069 if (new_entry)
3071 edge e;
3072 edge_iterator ei;
3073 den = profile_count::zero ();
3075 FOR_EACH_EDGE (e, ei, new_entry->preds)
3076 if (!e->src->aux)
3077 den += e->count ();
3078 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
3081 profile_count::adjust_for_ipa_scaling (&num, &den);
3083 /* Must have a CFG here at this point. */
3084 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
3085 (DECL_STRUCT_FUNCTION (callee_fndecl)));
3088 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
3089 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
3090 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
3091 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
3093 /* Duplicate any exception-handling regions. */
3094 if (cfun->eh)
3095 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
3096 remap_decl_1, id);
3098 /* Use aux pointers to map the original blocks to copy. */
3099 FOR_EACH_BB_FN (bb, cfun_to_copy)
3100 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
3102 basic_block new_bb = copy_bb (id, bb, num, den);
3103 bb->aux = new_bb;
3104 new_bb->aux = bb;
3105 new_bb->loop_father = entry_block_map->loop_father;
3108 last = last_basic_block_for_fn (cfun);
3110 /* Now that we've duplicated the blocks, duplicate their edges. */
3111 basic_block abnormal_goto_dest = NULL;
3112 if (id->call_stmt
3113 && stmt_can_make_abnormal_goto (id->call_stmt))
3115 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
3117 bb = gimple_bb (id->call_stmt);
3118 gsi_next (&gsi);
3119 if (gsi_end_p (gsi))
3120 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
3122 FOR_ALL_BB_FN (bb, cfun_to_copy)
3123 if (!id->blocks_to_copy
3124 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3125 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
3126 abnormal_goto_dest, id);
3128 if (id->eh_landing_pad_dest)
3130 add_clobbers_to_eh_landing_pad (id);
3131 id->eh_landing_pad_dest = NULL;
3134 if (new_entry)
3136 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
3137 EDGE_FALLTHRU);
3138 e->probability = profile_probability::always ();
3141 /* Duplicate the loop tree, if available and wanted. */
3142 if (loops_for_fn (src_cfun) != NULL
3143 && current_loops != NULL)
3145 copy_loops (id, entry_block_map->loop_father,
3146 get_loop (src_cfun, 0));
3147 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
3148 loops_state_set (LOOPS_NEED_FIXUP);
3151 /* If the loop tree in the source function needed fixup, mark the
3152 destination loop tree for fixup, too. */
3153 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
3154 loops_state_set (LOOPS_NEED_FIXUP);
3156 if (gimple_in_ssa_p (cfun))
3157 FOR_ALL_BB_FN (bb, cfun_to_copy)
3158 if (!id->blocks_to_copy
3159 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3160 copy_phis_for_bb (bb, id);
3162 FOR_ALL_BB_FN (bb, cfun_to_copy)
3163 if (bb->aux)
3165 if (need_debug_cleanup
3166 && bb->index != ENTRY_BLOCK
3167 && bb->index != EXIT_BLOCK)
3168 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3169 /* Update call edge destinations. This cannot be done before loop
3170 info is updated, because we may split basic blocks. */
3171 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3172 && bb->index != ENTRY_BLOCK
3173 && bb->index != EXIT_BLOCK)
3174 redirect_all_calls (id, (basic_block)bb->aux);
3175 ((basic_block)bb->aux)->aux = NULL;
3176 bb->aux = NULL;
3179 /* Zero out AUX fields of newly created block during EH edge
3180 insertion. */
3181 for (; last < last_basic_block_for_fn (cfun); last++)
3183 if (need_debug_cleanup)
3184 maybe_move_debug_stmts_to_successors (id,
3185 BASIC_BLOCK_FOR_FN (cfun, last));
3186 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3187 /* Update call edge destinations. This cannot be done before loop
3188 info is updated, because we may split basic blocks. */
3189 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3190 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3192 entry_block_map->aux = NULL;
3193 exit_block_map->aux = NULL;
3195 if (id->eh_map)
3197 delete id->eh_map;
3198 id->eh_map = NULL;
3200 if (id->dependence_map)
3202 delete id->dependence_map;
3203 id->dependence_map = NULL;
3206 return new_fndecl;
3209 /* Copy the debug STMT using ID. We deal with these statements in a
3210 special way: if any variable in their VALUE expression wasn't
3211 remapped yet, we won't remap it, because that would get decl uids
3212 out of sync, causing codegen differences between -g and -g0. If
3213 this arises, we drop the VALUE expression altogether. */
3215 static void
3216 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3218 tree t, *n;
3219 struct walk_stmt_info wi;
3221 if (tree block = gimple_block (stmt))
3223 n = id->decl_map->get (block);
3224 gimple_set_block (stmt, n ? *n : id->block);
3227 if (gimple_debug_nonbind_marker_p (stmt))
3229 if (id->call_stmt && !gimple_block (stmt))
3231 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3232 gsi_remove (&gsi, true);
3234 return;
3237 /* Remap all the operands in COPY. */
3238 memset (&wi, 0, sizeof (wi));
3239 wi.info = id;
3241 processing_debug_stmt = 1;
3243 if (gimple_debug_source_bind_p (stmt))
3244 t = gimple_debug_source_bind_get_var (stmt);
3245 else if (gimple_debug_bind_p (stmt))
3246 t = gimple_debug_bind_get_var (stmt);
3247 else
3248 gcc_unreachable ();
3250 if (TREE_CODE (t) == PARM_DECL
3251 && id->debug_map
3252 && (n = id->debug_map->get (t)))
3254 gcc_assert (VAR_P (*n));
3255 t = *n;
3257 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3258 /* T is a non-localized variable. */;
3259 else
3260 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3262 if (gimple_debug_bind_p (stmt))
3264 gimple_debug_bind_set_var (stmt, t);
3266 if (gimple_debug_bind_has_value_p (stmt))
3267 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3268 remap_gimple_op_r, &wi, NULL);
3270 /* Punt if any decl couldn't be remapped. */
3271 if (processing_debug_stmt < 0)
3272 gimple_debug_bind_reset_value (stmt);
3274 else if (gimple_debug_source_bind_p (stmt))
3276 gimple_debug_source_bind_set_var (stmt, t);
3277 /* When inlining and source bind refers to one of the optimized
3278 away parameters, change the source bind into normal debug bind
3279 referring to the corresponding DEBUG_EXPR_DECL that should have
3280 been bound before the call stmt. */
3281 t = gimple_debug_source_bind_get_value (stmt);
3282 if (t != NULL_TREE
3283 && TREE_CODE (t) == PARM_DECL
3284 && id->call_stmt)
3286 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3287 unsigned int i;
3288 if (debug_args != NULL)
3290 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3291 if ((**debug_args)[i] == DECL_ORIGIN (t)
3292 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3294 t = (**debug_args)[i + 1];
3295 stmt->subcode = GIMPLE_DEBUG_BIND;
3296 gimple_debug_bind_set_value (stmt, t);
3297 break;
3301 if (gimple_debug_source_bind_p (stmt))
3302 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3303 remap_gimple_op_r, &wi, NULL);
3306 processing_debug_stmt = 0;
3308 update_stmt (stmt);
3311 /* Process deferred debug stmts. In order to give values better odds
3312 of being successfully remapped, we delay the processing of debug
3313 stmts until all other stmts that might require remapping are
3314 processed. */
3316 static void
3317 copy_debug_stmts (copy_body_data *id)
3319 if (!id->debug_stmts.exists ())
3320 return;
3322 for (gdebug *stmt : id->debug_stmts)
3323 copy_debug_stmt (stmt, id);
3325 id->debug_stmts.release ();
3328 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3329 another function. */
3331 static tree
3332 copy_tree_body (copy_body_data *id)
3334 tree fndecl = id->src_fn;
3335 tree body = DECL_SAVED_TREE (fndecl);
3337 walk_tree (&body, copy_tree_body_r, id, NULL);
3339 return body;
3342 /* Make a copy of the body of FN so that it can be inserted inline in
3343 another function. */
3345 static tree
3346 copy_body (copy_body_data *id,
3347 basic_block entry_block_map, basic_block exit_block_map,
3348 basic_block new_entry)
3350 tree fndecl = id->src_fn;
3351 tree body;
3353 /* If this body has a CFG, walk CFG and copy. */
3354 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3355 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3356 new_entry);
3357 copy_debug_stmts (id);
3358 if (id->killed_new_ssa_names)
3360 ipa_release_ssas_in_hash (id->killed_new_ssa_names);
3361 delete id->killed_new_ssa_names;
3362 id->killed_new_ssa_names = NULL;
3365 return body;
3368 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3369 defined in function FN, or of a data member thereof. */
3371 static bool
3372 self_inlining_addr_expr (tree value, tree fn)
3374 tree var;
3376 if (TREE_CODE (value) != ADDR_EXPR)
3377 return false;
3379 var = get_base_address (TREE_OPERAND (value, 0));
3381 return var && auto_var_in_fn_p (var, fn);
3384 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3385 lexical block and line number information from base_stmt, if given,
3386 or from the last stmt of the block otherwise. */
3388 static gimple *
3389 insert_init_debug_bind (copy_body_data *id,
3390 basic_block bb, tree var, tree value,
3391 gimple *base_stmt)
3393 gimple *note;
3394 gimple_stmt_iterator gsi;
3395 tree tracked_var;
3397 if (!gimple_in_ssa_p (id->src_cfun))
3398 return NULL;
3400 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3401 return NULL;
3403 tracked_var = target_for_debug_bind (var);
3404 if (!tracked_var)
3405 return NULL;
3407 if (bb)
3409 gsi = gsi_last_bb (bb);
3410 if (!base_stmt && !gsi_end_p (gsi))
3411 base_stmt = gsi_stmt (gsi);
3414 note = gimple_build_debug_bind (tracked_var,
3415 value == error_mark_node
3416 ? NULL_TREE : unshare_expr (value),
3417 base_stmt);
3419 if (bb)
3421 if (!gsi_end_p (gsi))
3422 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3423 else
3424 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3427 return note;
3430 static void
3431 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3433 /* If VAR represents a zero-sized variable, it's possible that the
3434 assignment statement may result in no gimple statements. */
3435 if (init_stmt)
3437 gimple_stmt_iterator si = gsi_last_bb (bb);
3439 /* We can end up with init statements that store to a non-register
3440 from a rhs with a conversion. Handle that here by forcing the
3441 rhs into a temporary. gimple_regimplify_operands is not
3442 prepared to do this for us. */
3443 if (!is_gimple_debug (init_stmt)
3444 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3445 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3446 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3448 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3449 TREE_TYPE (gimple_assign_lhs (init_stmt)),
3450 gimple_assign_rhs1 (init_stmt));
3451 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3452 GSI_NEW_STMT);
3453 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3454 gimple_assign_set_rhs1 (init_stmt, rhs);
3456 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3457 if (!is_gimple_debug (init_stmt))
3459 gimple_regimplify_operands (init_stmt, &si);
3461 tree def = gimple_assign_lhs (init_stmt);
3462 insert_init_debug_bind (id, bb, def, def, init_stmt);
3467 /* Deal with mismatched formal/actual parameters, in a rather brute-force way
3468 if need be (which should only be necessary for invalid programs). Attempt
3469 to convert VAL to TYPE and return the result if it is possible, just return
3470 a zero constant of the given type if it fails. */
3472 tree
3473 force_value_to_type (tree type, tree value)
3475 /* If we can match up types by promotion/demotion do so. */
3476 if (fold_convertible_p (type, value))
3477 return fold_convert (type, value);
3479 /* ??? For valid programs we should not end up here.
3480 Still if we end up with truly mismatched types here, fall back
3481 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3482 GIMPLE to the following passes. */
3483 if (TREE_CODE (value) == WITH_SIZE_EXPR)
3484 return error_mark_node;
3485 else if (!is_gimple_reg_type (TREE_TYPE (value))
3486 || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
3487 return fold_build1 (VIEW_CONVERT_EXPR, type, value);
3488 else
3489 return build_zero_cst (type);
3492 /* Initialize parameter P with VALUE. If needed, produce init statement
3493 at the end of BB. When BB is NULL, we return init statement to be
3494 output later. */
3495 static gimple *
3496 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3497 basic_block bb, tree *vars)
3499 gimple *init_stmt = NULL;
3500 tree var;
3501 tree def = (gimple_in_ssa_p (cfun)
3502 ? ssa_default_def (id->src_cfun, p) : NULL);
3504 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3505 here since the type of this decl must be visible to the calling
3506 function. */
3507 var = copy_decl_to_var (p, id);
3509 /* Declare this new variable. */
3510 DECL_CHAIN (var) = *vars;
3511 *vars = var;
3513 /* Make gimplifier happy about this variable. */
3514 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3516 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3517 we would not need to create a new variable here at all, if it
3518 weren't for debug info. Still, we can just use the argument
3519 value. */
3520 if (TREE_READONLY (p)
3521 && !TREE_ADDRESSABLE (p)
3522 && value
3523 && !TREE_SIDE_EFFECTS (value)
3524 && !def)
3526 /* We may produce non-gimple trees by adding NOPs or introduce invalid
3527 sharing when the value is not constant or DECL. And we need to make
3528 sure that it cannot be modified from another path in the callee. */
3529 if (((is_gimple_min_invariant (value)
3530 /* When the parameter is used in a context that forces it to
3531 not be a GIMPLE register avoid substituting something that
3532 is not a decl there. */
3533 && ! DECL_NOT_GIMPLE_REG_P (p))
3534 || (DECL_P (value) && TREE_READONLY (value))
3535 || (auto_var_in_fn_p (value, id->dst_fn)
3536 && !TREE_ADDRESSABLE (value)))
3537 && useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value))
3538 /* We have to be very careful about ADDR_EXPR. Make sure
3539 the base variable isn't a local variable of the inlined
3540 function, e.g., when doing recursive inlining, direct or
3541 mutually-recursive or whatever, which is why we don't
3542 just test whether fn == current_function_decl. */
3543 && ! self_inlining_addr_expr (value, fn))
3545 insert_decl_map (id, p, value);
3546 if (!id->debug_map)
3547 id->debug_map = new hash_map<tree, tree>;
3548 id->debug_map->put (p, var);
3549 return insert_init_debug_bind (id, bb, var, value, NULL);
3553 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3554 that way, when the PARM_DECL is encountered, it will be
3555 automatically replaced by the VAR_DECL. */
3556 insert_decl_map (id, p, var);
3558 /* Even if P was TREE_READONLY, the new VAR should not be. In the original
3559 code, we would have constructed a temporary, and then the function body
3560 would have never changed the value of P. However, now, we will be
3561 constructing VAR directly. Therefore, it must not be TREE_READONLY. */
3562 TREE_READONLY (var) = 0;
3564 tree rhs = value;
3565 if (value
3566 && value != error_mark_node
3567 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3568 rhs = force_value_to_type (TREE_TYPE (p), value);
3570 /* If there is no setup required and we are in SSA, take the easy route
3571 replacing all SSA names representing the function parameter by the
3572 SSA name passed to function.
3574 We need to construct map for the variable anyway as it might be used
3575 in different SSA names when parameter is set in function.
3577 Do replacement at -O0 for const arguments replaced by constant.
3578 This is important for builtin_constant_p and other construct requiring
3579 constant argument to be visible in inlined function body. */
3580 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3581 && (optimize
3582 || (TREE_READONLY (p)
3583 && is_gimple_min_invariant (rhs)))
3584 && (TREE_CODE (rhs) == SSA_NAME
3585 || is_gimple_min_invariant (rhs))
3586 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3588 insert_decl_map (id, def, rhs);
3589 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3592 /* If the value of argument is never used, don't care about initializing
3593 it. */
3594 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3596 /* When there's a gross type mismatch between the passed value
3597 and the declared argument type drop it on the floor and do
3598 not bother to insert a debug bind. */
3599 if (value && !is_gimple_reg_type (TREE_TYPE (value)))
3600 return NULL;
3601 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3604 /* Initialize this VAR_DECL from the equivalent argument. Convert
3605 the argument to the proper type in case it was promoted. */
3606 if (value)
3608 if (rhs == error_mark_node)
3610 insert_decl_map (id, p, var);
3611 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3614 STRIP_USELESS_TYPE_CONVERSION (rhs);
3616 /* If we are in SSA form properly remap the default definition. */
3617 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3619 if (def)
3621 def = remap_ssa_name (def, id);
3622 init_stmt = gimple_build_assign (def, rhs);
3623 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3624 set_ssa_default_def (cfun, var, NULL);
3627 else if (!is_empty_type (TREE_TYPE (var)))
3628 init_stmt = gimple_build_assign (var, rhs);
3630 if (bb && init_stmt)
3631 insert_init_stmt (id, bb, init_stmt);
3633 return init_stmt;
3636 /* Generate code to initialize the parameters of the function at the
3637 top of the stack in ID from the GIMPLE_CALL STMT. */
3639 static void
3640 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3641 tree fn, basic_block bb)
3643 tree parms;
3644 size_t i;
3645 tree p;
3646 tree vars = NULL_TREE;
3647 tree static_chain = gimple_call_chain (stmt);
3649 /* Figure out what the parameters are. */
3650 parms = DECL_ARGUMENTS (fn);
3652 /* Loop through the parameter declarations, replacing each with an
3653 equivalent VAR_DECL, appropriately initialized. */
3654 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3656 tree val;
3657 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3658 setup_one_parameter (id, p, val, fn, bb, &vars);
3660 /* After remapping parameters remap their types. This has to be done
3661 in a second loop over all parameters to appropriately remap
3662 variable sized arrays when the size is specified in a
3663 parameter following the array. */
3664 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3666 tree *varp = id->decl_map->get (p);
3667 if (varp && VAR_P (*varp))
3669 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3670 ? ssa_default_def (id->src_cfun, p) : NULL);
3671 tree var = *varp;
3672 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3673 /* Also remap the default definition if it was remapped
3674 to the default definition of the parameter replacement
3675 by the parameter setup. */
3676 if (def)
3678 tree *defp = id->decl_map->get (def);
3679 if (defp
3680 && TREE_CODE (*defp) == SSA_NAME
3681 && SSA_NAME_VAR (*defp) == var)
3682 TREE_TYPE (*defp) = TREE_TYPE (var);
3684 /* When not optimizing and the parameter is unused, assign to
3685 a dummy SSA name. Do this after remapping the type above. */
3686 else if (!optimize
3687 && is_gimple_reg (p)
3688 && i < gimple_call_num_args (stmt))
3690 tree val = gimple_call_arg (stmt, i);
3691 if (val != error_mark_node)
3693 if (!useless_type_conversion_p (TREE_TYPE (p),
3694 TREE_TYPE (val)))
3695 val = force_value_to_type (TREE_TYPE (p), val);
3696 def = make_ssa_name (var);
3697 gimple *init_stmt = gimple_build_assign (def, val);
3698 insert_init_stmt (id, bb, init_stmt);
3704 /* Initialize the static chain. */
3705 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3706 gcc_assert (fn != current_function_decl);
3707 if (p)
3709 /* No static chain? Seems like a bug in tree-nested.cc. */
3710 gcc_assert (static_chain);
3712 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3715 /* Reverse so the variables appear in the correct order in DWARF
3716 debug info. */
3717 vars = nreverse (vars);
3719 declare_inline_vars (id->block, vars);
3723 /* Declare a return variable to replace the RESULT_DECL for the
3724 function we are calling. An appropriate DECL_STMT is returned.
3725 The USE_STMT is filled to contain a use of the declaration to
3726 indicate the return value of the function.
3728 RETURN_SLOT, if non-null is place where to store the result. It
3729 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3730 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3732 The return value is a (possibly null) value that holds the result
3733 as seen by the caller. */
3735 static tree
3736 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3737 basic_block entry_bb)
3739 tree callee = id->src_fn;
3740 tree result = DECL_RESULT (callee);
3741 tree callee_type = TREE_TYPE (result);
3742 tree caller_type;
3743 tree var, use;
3745 /* Handle type-mismatches in the function declaration return type
3746 vs. the call expression. */
3747 if (modify_dest)
3748 caller_type = TREE_TYPE (modify_dest);
3749 else if (return_slot)
3750 caller_type = TREE_TYPE (return_slot);
3751 else /* No LHS on the call. */
3752 caller_type = TREE_TYPE (TREE_TYPE (callee));
3754 /* We don't need to do anything for functions that don't return anything. */
3755 if (VOID_TYPE_P (callee_type))
3756 return NULL_TREE;
3758 /* If there was a return slot, then the return value is the
3759 dereferenced address of that object. */
3760 if (return_slot)
3762 /* The front end shouldn't have used both return_slot and
3763 a modify expression. */
3764 gcc_assert (!modify_dest);
3765 if (DECL_BY_REFERENCE (result))
3767 tree return_slot_addr = build_fold_addr_expr (return_slot);
3768 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3770 /* We are going to construct *&return_slot and we can't do that
3771 for variables believed to be not addressable.
3773 FIXME: This check possibly can match, because values returned
3774 via return slot optimization are not believed to have address
3775 taken by alias analysis. */
3776 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3777 var = return_slot_addr;
3778 mark_addressable (return_slot);
3780 else
3782 var = return_slot;
3783 gcc_assert (TREE_CODE (var) != SSA_NAME);
3784 if (TREE_ADDRESSABLE (result))
3785 mark_addressable (var);
3787 if (DECL_NOT_GIMPLE_REG_P (result)
3788 && DECL_P (var))
3789 DECL_NOT_GIMPLE_REG_P (var) = 1;
3791 if (!useless_type_conversion_p (callee_type, caller_type))
3792 var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
3794 use = NULL;
3795 goto done;
3798 /* All types requiring non-trivial constructors should have been handled. */
3799 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3801 /* Attempt to avoid creating a new temporary variable. */
3802 if (modify_dest
3803 && TREE_CODE (modify_dest) != SSA_NAME)
3805 bool use_it = false;
3807 /* We can't use MODIFY_DEST if there's type promotion involved. */
3808 if (!useless_type_conversion_p (callee_type, caller_type))
3809 use_it = false;
3811 /* ??? If we're assigning to a variable sized type, then we must
3812 reuse the destination variable, because we've no good way to
3813 create variable sized temporaries at this point. */
3814 else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
3815 use_it = true;
3817 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3818 reuse it as the result of the call directly. Don't do this if
3819 it would promote MODIFY_DEST to addressable. */
3820 else if (TREE_ADDRESSABLE (result))
3821 use_it = false;
3822 else
3824 tree base_m = get_base_address (modify_dest);
3826 /* If the base isn't a decl, then it's a pointer, and we don't
3827 know where that's going to go. */
3828 if (!DECL_P (base_m))
3829 use_it = false;
3830 else if (is_global_var (base_m))
3831 use_it = false;
3832 else if (DECL_NOT_GIMPLE_REG_P (result)
3833 && !DECL_NOT_GIMPLE_REG_P (base_m))
3834 use_it = false;
3835 else if (!TREE_ADDRESSABLE (base_m))
3836 use_it = true;
3839 if (use_it)
3841 var = modify_dest;
3842 use = NULL;
3843 goto done;
3847 gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
3849 var = copy_result_decl_to_var (result, id);
3850 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3852 /* Do not have the rest of GCC warn about this variable as it should
3853 not be visible to the user. */
3854 suppress_warning (var /* OPT_Wuninitialized? */);
3856 declare_inline_vars (id->block, var);
3858 /* Build the use expr. If the return type of the function was
3859 promoted, convert it back to the expected type. */
3860 use = var;
3861 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3863 /* If we can match up types by promotion/demotion do so. */
3864 if (fold_convertible_p (caller_type, var))
3865 use = fold_convert (caller_type, var);
3866 else
3868 /* ??? For valid programs we should not end up here.
3869 Still if we end up with truly mismatched types here, fall back
3870 to using a MEM_REF to not leak invalid GIMPLE to the following
3871 passes. */
3872 /* Prevent var from being written into SSA form. */
3873 if (is_gimple_reg_type (TREE_TYPE (var)))
3874 DECL_NOT_GIMPLE_REG_P (var) = true;
3875 use = fold_build2 (MEM_REF, caller_type,
3876 build_fold_addr_expr (var),
3877 build_int_cst (ptr_type_node, 0));
3881 STRIP_USELESS_TYPE_CONVERSION (use);
3883 if (DECL_BY_REFERENCE (result))
3885 TREE_ADDRESSABLE (var) = 1;
3886 var = build_fold_addr_expr (var);
3889 done:
3890 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3891 way, when the RESULT_DECL is encountered, it will be
3892 automatically replaced by the VAR_DECL.
3894 When returning by reference, ensure that RESULT_DECL remaps to
3895 gimple_val. */
3896 if (DECL_BY_REFERENCE (result)
3897 && !is_gimple_val (var))
3899 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3900 insert_decl_map (id, result, temp);
3901 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3902 it's default_def SSA_NAME. */
3903 if (gimple_in_ssa_p (id->src_cfun)
3904 && is_gimple_reg (result))
3905 if (tree default_def = ssa_default_def (id->src_cfun, result))
3907 temp = make_ssa_name (temp);
3908 insert_decl_map (id, default_def, temp);
3910 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3912 else
3913 insert_decl_map (id, result, var);
3915 /* Remember this so we can ignore it in remap_decls. */
3916 id->retvar = var;
3917 return use;
3920 /* Determine if the function can be copied. If so return NULL. If
3921 not return a string describng the reason for failure. */
3923 const char *
3924 copy_forbidden (struct function *fun)
3926 const char *reason = fun->cannot_be_copied_reason;
3928 /* Only examine the function once. */
3929 if (fun->cannot_be_copied_set)
3930 return reason;
3932 /* We cannot copy a function that receives a non-local goto
3933 because we cannot remap the destination label used in the
3934 function that is performing the non-local goto. */
3935 /* ??? Actually, this should be possible, if we work at it.
3936 No doubt there's just a handful of places that simply
3937 assume it doesn't happen and don't substitute properly. */
3938 if (fun->has_nonlocal_label)
3940 reason = G_("function %q+F can never be copied "
3941 "because it receives a non-local goto");
3942 goto fail;
3945 if (fun->has_forced_label_in_static)
3947 reason = G_("function %q+F can never be copied because it saves "
3948 "address of local label in a static variable");
3949 goto fail;
3952 fail:
3953 fun->cannot_be_copied_reason = reason;
3954 fun->cannot_be_copied_set = true;
3955 return reason;
3959 static const char *inline_forbidden_reason;
3961 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3962 iff a function cannot be inlined. Also sets the reason why. */
3964 static tree
3965 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3966 struct walk_stmt_info *wip)
3968 tree fn = (tree) wip->info;
3969 tree t;
3970 gimple *stmt = gsi_stmt (*gsi);
3972 switch (gimple_code (stmt))
3974 case GIMPLE_CALL:
3975 /* Refuse to inline alloca call unless user explicitly forced so as
3976 this may change program's memory overhead drastically when the
3977 function using alloca is called in loop. In GCC present in
3978 SPEC2000 inlining into schedule_block cause it to require 2GB of
3979 RAM instead of 256MB. Don't do so for alloca calls emitted for
3980 VLA objects as those can't cause unbounded growth (they're always
3981 wrapped inside stack_save/stack_restore regions. */
3982 if (gimple_maybe_alloca_call_p (stmt)
3983 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3984 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3986 inline_forbidden_reason
3987 = G_("function %q+F can never be inlined because it uses "
3988 "alloca (override using the always_inline attribute)");
3989 *handled_ops_p = true;
3990 return fn;
3993 t = gimple_call_fndecl (stmt);
3994 if (t == NULL_TREE)
3995 break;
3997 /* We cannot inline functions that call setjmp. */
3998 if (setjmp_call_p (t))
4000 inline_forbidden_reason
4001 = G_("function %q+F can never be inlined because it uses setjmp");
4002 *handled_ops_p = true;
4003 return t;
4006 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
4007 switch (DECL_FUNCTION_CODE (t))
4009 /* We cannot inline functions that take a variable number of
4010 arguments. */
4011 case BUILT_IN_VA_START:
4012 case BUILT_IN_NEXT_ARG:
4013 case BUILT_IN_VA_END:
4014 inline_forbidden_reason
4015 = G_("function %q+F can never be inlined because it "
4016 "uses variable argument lists");
4017 *handled_ops_p = true;
4018 return t;
4020 case BUILT_IN_LONGJMP:
4021 /* We can't inline functions that call __builtin_longjmp at
4022 all. The non-local goto machinery really requires the
4023 destination be in a different function. If we allow the
4024 function calling __builtin_longjmp to be inlined into the
4025 function calling __builtin_setjmp, Things will Go Awry. */
4026 inline_forbidden_reason
4027 = G_("function %q+F can never be inlined because "
4028 "it uses setjmp-longjmp exception handling");
4029 *handled_ops_p = true;
4030 return t;
4032 case BUILT_IN_NONLOCAL_GOTO:
4033 /* Similarly. */
4034 inline_forbidden_reason
4035 = G_("function %q+F can never be inlined because "
4036 "it uses non-local goto");
4037 *handled_ops_p = true;
4038 return t;
4040 case BUILT_IN_RETURN:
4041 case BUILT_IN_APPLY_ARGS:
4042 /* If a __builtin_apply_args caller would be inlined,
4043 it would be saving arguments of the function it has
4044 been inlined into. Similarly __builtin_return would
4045 return from the function the inline has been inlined into. */
4046 inline_forbidden_reason
4047 = G_("function %q+F can never be inlined because "
4048 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
4049 *handled_ops_p = true;
4050 return t;
4052 default:
4053 break;
4055 break;
4057 case GIMPLE_GOTO:
4058 t = gimple_goto_dest (stmt);
4060 /* We will not inline a function which uses computed goto. The
4061 addresses of its local labels, which may be tucked into
4062 global storage, are of course not constant across
4063 instantiations, which causes unexpected behavior. */
4064 if (TREE_CODE (t) != LABEL_DECL)
4066 inline_forbidden_reason
4067 = G_("function %q+F can never be inlined "
4068 "because it contains a computed goto");
4069 *handled_ops_p = true;
4070 return t;
4072 break;
4074 default:
4075 break;
4078 *handled_ops_p = false;
4079 return NULL_TREE;
4082 /* Return true if FNDECL is a function that cannot be inlined into
4083 another one. */
4085 static bool
4086 inline_forbidden_p (tree fndecl)
4088 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
4089 struct walk_stmt_info wi;
4090 basic_block bb;
4091 bool forbidden_p = false;
4093 /* First check for shared reasons not to copy the code. */
4094 inline_forbidden_reason = copy_forbidden (fun);
4095 if (inline_forbidden_reason != NULL)
4096 return true;
4098 /* Next, walk the statements of the function looking for
4099 constraucts we can't handle, or are non-optimal for inlining. */
4100 hash_set<tree> visited_nodes;
4101 memset (&wi, 0, sizeof (wi));
4102 wi.info = (void *) fndecl;
4103 wi.pset = &visited_nodes;
4105 /* We cannot inline a function with a variable-sized parameter because we
4106 cannot materialize a temporary of such a type in the caller if need be.
4107 Note that the return case is not symmetrical because we can guarantee
4108 that a temporary is not needed by means of CALL_EXPR_RETURN_SLOT_OPT. */
4109 for (tree parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
4110 if (!poly_int_tree_p (DECL_SIZE (parm)))
4112 inline_forbidden_reason
4113 = G_("function %q+F can never be inlined because "
4114 "it has a VLA argument");
4115 return true;
4118 FOR_EACH_BB_FN (bb, fun)
4120 gimple *ret;
4121 gimple_seq seq = bb_seq (bb);
4122 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
4123 forbidden_p = (ret != NULL);
4124 if (forbidden_p)
4125 break;
4128 return forbidden_p;
4131 /* Return false if the function FNDECL cannot be inlined on account of its
4132 attributes, true otherwise. */
4133 static bool
4134 function_attribute_inlinable_p (const_tree fndecl)
4136 for (auto scoped_attributes : targetm.attribute_table)
4138 const_tree a;
4140 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
4142 const_tree name = get_attribute_name (a);
4144 for (const attribute_spec &attribute : scoped_attributes->attributes)
4145 if (is_attribute_p (attribute.name, name))
4146 return targetm.function_attribute_inlinable_p (fndecl);
4150 return true;
4153 /* Returns nonzero if FN is a function that does not have any
4154 fundamental inline blocking properties. */
4156 bool
4157 tree_inlinable_function_p (tree fn)
4159 bool inlinable = true;
4160 bool do_warning;
4161 tree always_inline;
4163 /* If we've already decided this function shouldn't be inlined,
4164 there's no need to check again. */
4165 if (DECL_UNINLINABLE (fn))
4166 return false;
4168 /* We only warn for functions declared `inline' by the user. */
4169 do_warning = (opt_for_fn (fn, warn_inline)
4170 && DECL_DECLARED_INLINE_P (fn)
4171 && !DECL_NO_INLINE_WARNING_P (fn)
4172 && !DECL_IN_SYSTEM_HEADER (fn));
4174 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
4176 if (flag_no_inline
4177 && always_inline == NULL)
4179 if (do_warning)
4180 warning (OPT_Winline, "function %q+F can never be inlined because it "
4181 "is suppressed using %<-fno-inline%>", fn);
4182 inlinable = false;
4185 else if (!function_attribute_inlinable_p (fn))
4187 if (do_warning)
4188 warning (OPT_Winline, "function %q+F can never be inlined because it "
4189 "uses attributes conflicting with inlining", fn);
4190 inlinable = false;
4193 else if (inline_forbidden_p (fn))
4195 /* See if we should warn about uninlinable functions. Previously,
4196 some of these warnings would be issued while trying to expand
4197 the function inline, but that would cause multiple warnings
4198 about functions that would for example call alloca. But since
4199 this a property of the function, just one warning is enough.
4200 As a bonus we can now give more details about the reason why a
4201 function is not inlinable. */
4202 if (always_inline)
4203 error (inline_forbidden_reason, fn);
4204 else if (do_warning)
4205 warning (OPT_Winline, inline_forbidden_reason, fn);
4207 inlinable = false;
4210 /* Squirrel away the result so that we don't have to check again. */
4211 DECL_UNINLINABLE (fn) = !inlinable;
4213 return inlinable;
4216 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
4217 word size and take possible memcpy call into account and return
4218 cost based on whether optimizing for size or speed according to SPEED_P. */
4221 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4223 HOST_WIDE_INT size;
4225 gcc_assert (!VOID_TYPE_P (type));
4227 if (VECTOR_TYPE_P (type))
4229 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4230 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4231 int orig_mode_size
4232 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4233 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4234 return ((orig_mode_size + simd_mode_size - 1)
4235 / simd_mode_size);
4238 size = int_size_in_bytes (type);
4240 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4241 /* Cost of a memcpy call, 3 arguments and the call. */
4242 return 4;
4243 else
4244 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4247 /* Returns cost of operation CODE, according to WEIGHTS */
4249 static int
4250 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4251 tree op1 ATTRIBUTE_UNUSED, tree op2)
4253 switch (code)
4255 /* These are "free" conversions, or their presumed cost
4256 is folded into other operations. */
4257 case RANGE_EXPR:
4258 CASE_CONVERT:
4259 case COMPLEX_EXPR:
4260 case PAREN_EXPR:
4261 case VIEW_CONVERT_EXPR:
4262 return 0;
4264 /* Assign cost of 1 to usual operations.
4265 ??? We may consider mapping RTL costs to this. */
4266 case COND_EXPR:
4267 case VEC_COND_EXPR:
4268 case VEC_PERM_EXPR:
4270 case PLUS_EXPR:
4271 case POINTER_PLUS_EXPR:
4272 case POINTER_DIFF_EXPR:
4273 case MINUS_EXPR:
4274 case MULT_EXPR:
4275 case MULT_HIGHPART_EXPR:
4277 case ADDR_SPACE_CONVERT_EXPR:
4278 case FIXED_CONVERT_EXPR:
4279 case FIX_TRUNC_EXPR:
4281 case NEGATE_EXPR:
4282 case FLOAT_EXPR:
4283 case MIN_EXPR:
4284 case MAX_EXPR:
4285 case ABS_EXPR:
4286 case ABSU_EXPR:
4288 case LSHIFT_EXPR:
4289 case RSHIFT_EXPR:
4290 case LROTATE_EXPR:
4291 case RROTATE_EXPR:
4293 case BIT_IOR_EXPR:
4294 case BIT_XOR_EXPR:
4295 case BIT_AND_EXPR:
4296 case BIT_NOT_EXPR:
4298 case TRUTH_ANDIF_EXPR:
4299 case TRUTH_ORIF_EXPR:
4300 case TRUTH_AND_EXPR:
4301 case TRUTH_OR_EXPR:
4302 case TRUTH_XOR_EXPR:
4303 case TRUTH_NOT_EXPR:
4305 case LT_EXPR:
4306 case LE_EXPR:
4307 case GT_EXPR:
4308 case GE_EXPR:
4309 case EQ_EXPR:
4310 case NE_EXPR:
4311 case ORDERED_EXPR:
4312 case UNORDERED_EXPR:
4314 case UNLT_EXPR:
4315 case UNLE_EXPR:
4316 case UNGT_EXPR:
4317 case UNGE_EXPR:
4318 case UNEQ_EXPR:
4319 case LTGT_EXPR:
4321 case CONJ_EXPR:
4323 case PREDECREMENT_EXPR:
4324 case PREINCREMENT_EXPR:
4325 case POSTDECREMENT_EXPR:
4326 case POSTINCREMENT_EXPR:
4328 case REALIGN_LOAD_EXPR:
4330 case WIDEN_SUM_EXPR:
4331 case WIDEN_MULT_EXPR:
4332 case DOT_PROD_EXPR:
4333 case SAD_EXPR:
4334 case WIDEN_MULT_PLUS_EXPR:
4335 case WIDEN_MULT_MINUS_EXPR:
4336 case WIDEN_LSHIFT_EXPR:
4338 case VEC_WIDEN_MULT_HI_EXPR:
4339 case VEC_WIDEN_MULT_LO_EXPR:
4340 case VEC_WIDEN_MULT_EVEN_EXPR:
4341 case VEC_WIDEN_MULT_ODD_EXPR:
4342 case VEC_UNPACK_HI_EXPR:
4343 case VEC_UNPACK_LO_EXPR:
4344 case VEC_UNPACK_FLOAT_HI_EXPR:
4345 case VEC_UNPACK_FLOAT_LO_EXPR:
4346 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4347 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4348 case VEC_PACK_TRUNC_EXPR:
4349 case VEC_PACK_SAT_EXPR:
4350 case VEC_PACK_FIX_TRUNC_EXPR:
4351 case VEC_PACK_FLOAT_EXPR:
4352 case VEC_WIDEN_LSHIFT_HI_EXPR:
4353 case VEC_WIDEN_LSHIFT_LO_EXPR:
4354 case VEC_DUPLICATE_EXPR:
4355 case VEC_SERIES_EXPR:
4357 return 1;
4359 /* Few special cases of expensive operations. This is useful
4360 to avoid inlining on functions having too many of these. */
4361 case TRUNC_DIV_EXPR:
4362 case CEIL_DIV_EXPR:
4363 case FLOOR_DIV_EXPR:
4364 case ROUND_DIV_EXPR:
4365 case EXACT_DIV_EXPR:
4366 case TRUNC_MOD_EXPR:
4367 case CEIL_MOD_EXPR:
4368 case FLOOR_MOD_EXPR:
4369 case ROUND_MOD_EXPR:
4370 case RDIV_EXPR:
4371 if (TREE_CODE (op2) != INTEGER_CST)
4372 return weights->div_mod_cost;
4373 return 1;
4375 /* Bit-field insertion needs several shift and mask operations. */
4376 case BIT_INSERT_EXPR:
4377 return 3;
4379 default:
4380 /* We expect a copy assignment with no operator. */
4381 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4382 return 0;
4387 /* Estimate number of instructions that will be created by expanding
4388 the statements in the statement sequence STMTS.
4389 WEIGHTS contains weights attributed to various constructs. */
4392 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4394 int cost;
4395 gimple_stmt_iterator gsi;
4397 cost = 0;
4398 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4399 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4401 return cost;
4405 /* Estimate number of instructions that will be created by expanding STMT.
4406 WEIGHTS contains weights attributed to various constructs. */
4409 estimate_num_insns (gimple *stmt, eni_weights *weights)
4411 unsigned cost, i;
4412 enum gimple_code code = gimple_code (stmt);
4413 tree lhs;
4414 tree rhs;
4416 switch (code)
4418 case GIMPLE_ASSIGN:
4419 /* Try to estimate the cost of assignments. We have three cases to
4420 deal with:
4421 1) Simple assignments to registers;
4422 2) Stores to things that must live in memory. This includes
4423 "normal" stores to scalars, but also assignments of large
4424 structures, or constructors of big arrays;
4426 Let us look at the first two cases, assuming we have "a = b + C":
4427 <GIMPLE_ASSIGN <var_decl "a">
4428 <plus_expr <var_decl "b"> <constant C>>
4429 If "a" is a GIMPLE register, the assignment to it is free on almost
4430 any target, because "a" usually ends up in a real register. Hence
4431 the only cost of this expression comes from the PLUS_EXPR, and we
4432 can ignore the GIMPLE_ASSIGN.
4433 If "a" is not a GIMPLE register, the assignment to "a" will most
4434 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4435 of moving something into "a", which we compute using the function
4436 estimate_move_cost. */
4437 if (gimple_clobber_p (stmt))
4438 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4440 lhs = gimple_assign_lhs (stmt);
4441 rhs = gimple_assign_rhs1 (stmt);
4443 cost = 0;
4445 /* Account for the cost of moving to / from memory. */
4446 if (gimple_store_p (stmt))
4447 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4448 if (gimple_assign_load_p (stmt))
4449 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4451 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4452 gimple_assign_rhs1 (stmt),
4453 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4454 == GIMPLE_BINARY_RHS
4455 ? gimple_assign_rhs2 (stmt) : NULL);
4456 break;
4458 case GIMPLE_COND:
4459 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4460 gimple_op (stmt, 0),
4461 gimple_op (stmt, 1));
4462 break;
4464 case GIMPLE_SWITCH:
4466 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4467 /* Take into account cost of the switch + guess 2 conditional jumps for
4468 each case label.
4470 TODO: once the switch expansion logic is sufficiently separated, we can
4471 do better job on estimating cost of the switch. */
4472 if (weights->time_based)
4473 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4474 else
4475 cost = gimple_switch_num_labels (switch_stmt) * 2;
4477 break;
4479 case GIMPLE_CALL:
4481 tree decl;
4483 if (gimple_call_internal_p (stmt))
4484 return 0;
4485 else if ((decl = gimple_call_fndecl (stmt))
4486 && fndecl_built_in_p (decl))
4488 /* Do not special case builtins where we see the body.
4489 This just confuse inliner. */
4490 struct cgraph_node *node;
4491 if ((node = cgraph_node::get (decl))
4492 && node->definition)
4494 /* For buitins that are likely expanded to nothing or
4495 inlined do not account operand costs. */
4496 else if (is_simple_builtin (decl))
4497 return 0;
4498 else if (is_inexpensive_builtin (decl))
4499 return weights->target_builtin_call_cost;
4500 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4502 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4503 specialize the cheap expansion we do here.
4504 ??? This asks for a more general solution. */
4505 switch (DECL_FUNCTION_CODE (decl))
4507 case BUILT_IN_POW:
4508 case BUILT_IN_POWF:
4509 case BUILT_IN_POWL:
4510 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4511 && (real_equal
4512 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4513 &dconst2)))
4514 return estimate_operator_cost
4515 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4516 gimple_call_arg (stmt, 0));
4517 break;
4519 default:
4520 break;
4525 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4526 if (gimple_call_lhs (stmt))
4527 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4528 weights->time_based);
4529 for (i = 0; i < gimple_call_num_args (stmt); i++)
4531 tree arg = gimple_call_arg (stmt, i);
4532 cost += estimate_move_cost (TREE_TYPE (arg),
4533 weights->time_based);
4535 break;
4538 case GIMPLE_RETURN:
4539 return weights->return_cost;
4541 case GIMPLE_GOTO:
4542 case GIMPLE_LABEL:
4543 case GIMPLE_NOP:
4544 case GIMPLE_PHI:
4545 case GIMPLE_PREDICT:
4546 case GIMPLE_DEBUG:
4547 return 0;
4549 case GIMPLE_ASM:
4551 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4552 /* 1000 means infinity. This avoids overflows later
4553 with very long asm statements. */
4554 if (count > 1000)
4555 count = 1000;
4556 /* If this asm is asm inline, count anything as minimum size. */
4557 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4558 count = MIN (1, count);
4559 return MAX (1, count);
4562 case GIMPLE_RESX:
4563 /* This is either going to be an external function call with one
4564 argument, or two register copy statements plus a goto. */
4565 return 2;
4567 case GIMPLE_EH_DISPATCH:
4568 /* ??? This is going to turn into a switch statement. Ideally
4569 we'd have a look at the eh region and estimate the number of
4570 edges involved. */
4571 return 10;
4573 case GIMPLE_BIND:
4574 return estimate_num_insns_seq (
4575 gimple_bind_body (as_a <gbind *> (stmt)),
4576 weights);
4578 case GIMPLE_EH_FILTER:
4579 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4581 case GIMPLE_CATCH:
4582 return estimate_num_insns_seq (gimple_catch_handler (
4583 as_a <gcatch *> (stmt)),
4584 weights);
4586 case GIMPLE_TRY:
4587 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4588 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4590 /* OMP directives are generally very expensive. */
4592 case GIMPLE_OMP_RETURN:
4593 case GIMPLE_OMP_SECTIONS_SWITCH:
4594 case GIMPLE_OMP_ATOMIC_STORE:
4595 case GIMPLE_OMP_CONTINUE:
4596 /* ...except these, which are cheap. */
4597 return 0;
4599 case GIMPLE_OMP_ATOMIC_LOAD:
4600 return weights->omp_cost;
4602 case GIMPLE_OMP_FOR:
4603 return (weights->omp_cost
4604 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4605 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4607 case GIMPLE_OMP_PARALLEL:
4608 case GIMPLE_OMP_TASK:
4609 case GIMPLE_OMP_CRITICAL:
4610 case GIMPLE_OMP_MASTER:
4611 case GIMPLE_OMP_MASKED:
4612 case GIMPLE_OMP_SCOPE:
4613 case GIMPLE_OMP_TASKGROUP:
4614 case GIMPLE_OMP_ORDERED:
4615 case GIMPLE_OMP_SCAN:
4616 case GIMPLE_OMP_SECTION:
4617 case GIMPLE_OMP_SECTIONS:
4618 case GIMPLE_OMP_STRUCTURED_BLOCK:
4619 case GIMPLE_OMP_SINGLE:
4620 case GIMPLE_OMP_TARGET:
4621 case GIMPLE_OMP_TEAMS:
4622 return (weights->omp_cost
4623 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4625 case GIMPLE_TRANSACTION:
4626 return (weights->tm_cost
4627 + estimate_num_insns_seq (gimple_transaction_body (
4628 as_a <gtransaction *> (stmt)),
4629 weights));
4631 default:
4632 gcc_unreachable ();
4635 return cost;
4638 /* Estimate number of instructions that will be created by expanding
4639 function FNDECL. WEIGHTS contains weights attributed to various
4640 constructs. */
4643 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4645 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4646 gimple_stmt_iterator bsi;
4647 basic_block bb;
4648 int n = 0;
4650 gcc_assert (my_function && my_function->cfg);
4651 FOR_EACH_BB_FN (bb, my_function)
4653 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4654 n += estimate_num_insns (gsi_stmt (bsi), weights);
4657 return n;
4661 /* Initializes weights used by estimate_num_insns. */
4663 void
4664 init_inline_once (void)
4666 eni_size_weights.call_cost = 1;
4667 eni_size_weights.indirect_call_cost = 3;
4668 eni_size_weights.target_builtin_call_cost = 1;
4669 eni_size_weights.div_mod_cost = 1;
4670 eni_size_weights.omp_cost = 40;
4671 eni_size_weights.tm_cost = 10;
4672 eni_size_weights.time_based = false;
4673 eni_size_weights.return_cost = 1;
4675 /* Estimating time for call is difficult, since we have no idea what the
4676 called function does. In the current uses of eni_time_weights,
4677 underestimating the cost does less harm than overestimating it, so
4678 we choose a rather small value here. */
4679 eni_time_weights.call_cost = 10;
4680 eni_time_weights.indirect_call_cost = 15;
4681 eni_time_weights.target_builtin_call_cost = 1;
4682 eni_time_weights.div_mod_cost = 10;
4683 eni_time_weights.omp_cost = 40;
4684 eni_time_weights.tm_cost = 40;
4685 eni_time_weights.time_based = true;
4686 eni_time_weights.return_cost = 2;
4690 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4692 static void
4693 prepend_lexical_block (tree current_block, tree new_block)
4695 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4696 BLOCK_SUBBLOCKS (current_block) = new_block;
4697 BLOCK_SUPERCONTEXT (new_block) = current_block;
4700 /* Add local variables from CALLEE to CALLER. */
4702 static inline void
4703 add_local_variables (struct function *callee, struct function *caller,
4704 copy_body_data *id)
4706 tree var;
4707 unsigned ix;
4709 FOR_EACH_LOCAL_DECL (callee, ix, var)
4710 if (!can_be_nonlocal (var, id))
4712 tree new_var = remap_decl (var, id);
4714 /* Remap debug-expressions. */
4715 if (VAR_P (new_var)
4716 && DECL_HAS_DEBUG_EXPR_P (var)
4717 && new_var != var)
4719 tree tem = DECL_DEBUG_EXPR (var);
4720 bool old_regimplify = id->regimplify;
4721 id->remapping_type_depth++;
4722 walk_tree (&tem, copy_tree_body_r, id, NULL);
4723 id->remapping_type_depth--;
4724 id->regimplify = old_regimplify;
4725 SET_DECL_DEBUG_EXPR (new_var, tem);
4726 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4728 add_local_decl (caller, new_var);
4732 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4733 have brought in or introduced any debug stmts for SRCVAR. */
4735 static inline void
4736 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4738 tree *remappedvarp = id->decl_map->get (srcvar);
4740 if (!remappedvarp)
4741 return;
4743 if (!VAR_P (*remappedvarp))
4744 return;
4746 if (*remappedvarp == id->retvar)
4747 return;
4749 tree tvar = target_for_debug_bind (*remappedvarp);
4750 if (!tvar)
4751 return;
4753 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4754 id->call_stmt);
4755 gimple_seq_add_stmt (bindings, stmt);
4758 /* For each inlined variable for which we may have debug bind stmts,
4759 add before GSI a final debug stmt resetting it, marking the end of
4760 its life, so that var-tracking knows it doesn't have to compute
4761 further locations for it. */
4763 static inline void
4764 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4766 tree var;
4767 unsigned ix;
4768 gimple_seq bindings = NULL;
4770 if (!gimple_in_ssa_p (id->src_cfun))
4771 return;
4773 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4774 return;
4776 for (var = DECL_ARGUMENTS (id->src_fn);
4777 var; var = DECL_CHAIN (var))
4778 reset_debug_binding (id, var, &bindings);
4780 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4781 reset_debug_binding (id, var, &bindings);
4783 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4786 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4788 static bool
4789 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4790 bitmap to_purge)
4792 tree use_retvar;
4793 tree fn;
4794 hash_map<tree, tree> *dst;
4795 hash_map<tree, tree> *st = NULL;
4796 tree return_slot;
4797 tree modify_dest;
4798 struct cgraph_edge *cg_edge;
4799 cgraph_inline_failed_t reason;
4800 basic_block return_block;
4801 edge e;
4802 gimple_stmt_iterator gsi, stmt_gsi;
4803 bool successfully_inlined = false;
4804 bool purge_dead_abnormal_edges;
4805 gcall *call_stmt;
4806 unsigned int prop_mask, src_properties;
4807 struct function *dst_cfun;
4808 tree simduid;
4809 use_operand_p use;
4810 gimple *simtenter_stmt = NULL;
4811 vec<tree> *simtvars_save;
4812 tree save_stack = NULL_TREE;
4814 /* The gimplifier uses input_location in too many places, such as
4815 internal_get_tmp_var (). */
4816 location_t saved_location = input_location;
4817 input_location = gimple_location (stmt);
4819 /* From here on, we're only interested in CALL_EXPRs. */
4820 call_stmt = dyn_cast <gcall *> (stmt);
4821 if (!call_stmt)
4822 goto egress;
4824 cg_edge = id->dst_node->get_edge (stmt);
4825 gcc_checking_assert (cg_edge);
4826 /* First, see if we can figure out what function is being called.
4827 If we cannot, then there is no hope of inlining the function. */
4828 if (cg_edge->indirect_unknown_callee)
4829 goto egress;
4830 fn = cg_edge->callee->decl;
4831 gcc_checking_assert (fn);
4833 /* If FN is a declaration of a function in a nested scope that was
4834 globally declared inline, we don't set its DECL_INITIAL.
4835 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4836 C++ front-end uses it for cdtors to refer to their internal
4837 declarations, that are not real functions. Fortunately those
4838 don't have trees to be saved, so we can tell by checking their
4839 gimple_body. */
4840 if (!DECL_INITIAL (fn)
4841 && DECL_ABSTRACT_ORIGIN (fn)
4842 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4843 fn = DECL_ABSTRACT_ORIGIN (fn);
4845 /* Don't try to inline functions that are not well-suited to inlining. */
4846 if (cg_edge->inline_failed)
4848 reason = cg_edge->inline_failed;
4849 /* If this call was originally indirect, we do not want to emit any
4850 inlining related warnings or sorry messages because there are no
4851 guarantees regarding those. */
4852 if (cg_edge->indirect_inlining_edge)
4853 goto egress;
4855 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4856 /* For extern inline functions that get redefined we always
4857 silently ignored always_inline flag. Better behavior would
4858 be to be able to keep both bodies and use extern inline body
4859 for inlining, but we can't do that because frontends overwrite
4860 the body. */
4861 && !cg_edge->callee->redefined_extern_inline
4862 /* During early inline pass, report only when optimization is
4863 not turned on. */
4864 && (symtab->global_info_ready
4865 || !optimize
4866 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4867 /* PR 20090218-1_0.c. Body can be provided by another module. */
4868 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4870 error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4871 cgraph_inline_failed_string (reason));
4872 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4873 inform (gimple_location (stmt), "called from here");
4874 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4875 inform (DECL_SOURCE_LOCATION (cfun->decl),
4876 "called from this function");
4878 else if (opt_for_fn (fn, warn_inline)
4879 && DECL_DECLARED_INLINE_P (fn)
4880 && !DECL_NO_INLINE_WARNING_P (fn)
4881 && !DECL_IN_SYSTEM_HEADER (fn)
4882 && reason != CIF_UNSPECIFIED
4883 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4884 /* Do not warn about not inlined recursive calls. */
4885 && !cg_edge->recursive_p ()
4886 /* Avoid warnings during early inline pass. */
4887 && symtab->global_info_ready)
4889 auto_diagnostic_group d;
4890 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4891 fn, _(cgraph_inline_failed_string (reason))))
4893 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4894 inform (gimple_location (stmt), "called from here");
4895 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4896 inform (DECL_SOURCE_LOCATION (cfun->decl),
4897 "called from this function");
4900 goto egress;
4902 id->src_node = cg_edge->callee;
4904 /* If callee is thunk, all we need is to adjust the THIS pointer
4905 and redirect to function being thunked. */
4906 if (id->src_node->thunk)
4908 cgraph_edge *edge;
4909 tree virtual_offset = NULL;
4910 profile_count count = cg_edge->count;
4911 tree op;
4912 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4913 thunk_info *info = thunk_info::get (id->src_node);
4915 cgraph_edge::remove (cg_edge);
4916 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4917 gimple_uid (stmt),
4918 profile_count::one (),
4919 profile_count::one (),
4920 true);
4921 edge->count = count;
4922 if (info->virtual_offset_p)
4923 virtual_offset = size_int (info->virtual_value);
4924 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4925 NULL);
4926 gsi_insert_before (&iter, gimple_build_assign (op,
4927 gimple_call_arg (stmt, 0)),
4928 GSI_NEW_STMT);
4929 gcc_assert (info->this_adjusting);
4930 op = thunk_adjust (&iter, op, 1, info->fixed_offset,
4931 virtual_offset, info->indirect_offset);
4933 gimple_call_set_arg (stmt, 0, op);
4934 gimple_call_set_fndecl (stmt, edge->callee->decl);
4935 update_stmt (stmt);
4936 id->src_node->remove ();
4937 successfully_inlined = expand_call_inline (bb, stmt, id, to_purge);
4938 maybe_remove_unused_call_args (cfun, stmt);
4939 /* This used to return true even though we do fail to inline in
4940 some cases. See PR98525. */
4941 goto egress;
4943 fn = cg_edge->callee->decl;
4944 cg_edge->callee->get_untransformed_body ();
4946 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4947 cg_edge->callee->verify ();
4949 /* We will be inlining this callee. */
4950 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4952 /* Update the callers EH personality. */
4953 if (DECL_FUNCTION_PERSONALITY (fn))
4954 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4955 = DECL_FUNCTION_PERSONALITY (fn);
4957 /* Split the block before the GIMPLE_CALL. */
4958 stmt_gsi = gsi_for_stmt (stmt);
4959 gsi_prev (&stmt_gsi);
4960 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4961 bb = e->src;
4962 return_block = e->dest;
4963 remove_edge (e);
4965 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4966 been the source of abnormal edges. In this case, schedule
4967 the removal of dead abnormal edges. */
4968 gsi = gsi_start_bb (return_block);
4969 gsi_next (&gsi);
4970 purge_dead_abnormal_edges = gsi_end_p (gsi);
4972 stmt_gsi = gsi_start_bb (return_block);
4974 /* Build a block containing code to initialize the arguments, the
4975 actual inline expansion of the body, and a label for the return
4976 statements within the function to jump to. The type of the
4977 statement expression is the return type of the function call.
4978 ??? If the call does not have an associated block then we will
4979 remap all callee blocks to NULL, effectively dropping most of
4980 its debug information. This should only happen for calls to
4981 artificial decls inserted by the compiler itself. We need to
4982 either link the inlined blocks into the caller block tree or
4983 not refer to them in any way to not break GC for locations. */
4984 if (tree block = gimple_block (stmt))
4986 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4987 to make inlined_function_outer_scope_p return true on this BLOCK. */
4988 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4989 if (loc == UNKNOWN_LOCATION)
4990 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4991 if (loc == UNKNOWN_LOCATION)
4992 loc = BUILTINS_LOCATION;
4993 id->block = make_node (BLOCK);
4994 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4995 BLOCK_SOURCE_LOCATION (id->block) = loc;
4996 prepend_lexical_block (block, id->block);
4999 /* Local declarations will be replaced by their equivalents in this map. */
5000 st = id->decl_map;
5001 id->decl_map = new hash_map<tree, tree>;
5002 dst = id->debug_map;
5003 id->debug_map = NULL;
5004 if (flag_stack_reuse != SR_NONE)
5005 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
5007 /* Record the function we are about to inline. */
5008 id->src_fn = fn;
5009 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
5010 id->reset_location = DECL_IGNORED_P (fn);
5011 id->call_stmt = call_stmt;
5012 cfun->cfg->full_profile &= id->src_cfun->cfg->full_profile;
5014 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
5015 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
5016 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
5017 simtvars_save = id->dst_simt_vars;
5018 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
5019 && (simduid = bb->loop_father->simduid) != NULL_TREE
5020 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
5021 && single_imm_use (simduid, &use, &simtenter_stmt)
5022 && is_gimple_call (simtenter_stmt)
5023 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
5024 vec_alloc (id->dst_simt_vars, 0);
5025 else
5026 id->dst_simt_vars = NULL;
5028 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
5029 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
5031 /* If the src function contains an IFN_VA_ARG, then so will the dst
5032 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
5033 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
5034 src_properties = id->src_cfun->curr_properties & prop_mask;
5035 if (src_properties != prop_mask)
5036 dst_cfun->curr_properties &= src_properties | ~prop_mask;
5037 dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
5038 id->dst_node->calls_declare_variant_alt
5039 |= id->src_node->calls_declare_variant_alt;
5041 gcc_assert (!id->src_cfun->after_inlining);
5043 id->entry_bb = bb;
5044 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
5046 gimple_stmt_iterator si = gsi_last_bb (bb);
5047 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
5048 NOT_TAKEN),
5049 GSI_NEW_STMT);
5051 initialize_inlined_parameters (id, stmt, fn, bb);
5052 if (debug_nonbind_markers_p && debug_inline_points && id->block
5053 && inlined_function_outer_scope_p (id->block))
5055 gimple_stmt_iterator si = gsi_last_bb (bb);
5056 gsi_insert_after (&si, gimple_build_debug_inline_entry
5057 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
5058 GSI_NEW_STMT);
5061 /* If function to be inlined calls alloca, wrap the inlined function
5062 in between save_stack = __builtin_stack_save (); and
5063 __builtin_stack_restore (save_stack); calls. */
5064 if (id->src_cfun->calls_alloca && !gimple_call_noreturn_p (stmt))
5065 /* Don't do this for VLA allocations though, just for user alloca
5066 calls. */
5067 for (struct cgraph_edge *e = id->src_node->callees; e; e = e->next_callee)
5068 if (gimple_maybe_alloca_call_p (e->call_stmt)
5069 && !gimple_call_alloca_for_var_p (e->call_stmt))
5071 tree fn = builtin_decl_implicit (BUILT_IN_STACK_SAVE);
5072 gcall *call = gimple_build_call (fn, 0);
5073 save_stack = make_ssa_name (ptr_type_node);
5074 gimple_call_set_lhs (call, save_stack);
5075 gimple_stmt_iterator si = gsi_last_bb (bb);
5076 gsi_insert_after (&si, call, GSI_NEW_STMT);
5077 struct cgraph_node *dest = cgraph_node::get_create (fn);
5078 id->dst_node->create_edge (dest, call, bb->count)->inline_failed
5079 = CIF_BODY_NOT_AVAILABLE;
5080 break;
5083 if (DECL_INITIAL (fn))
5085 if (gimple_block (stmt))
5087 tree *var;
5089 prepend_lexical_block (id->block,
5090 remap_blocks (DECL_INITIAL (fn), id));
5091 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
5092 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
5093 == NULL_TREE));
5094 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
5095 otherwise for DWARF DW_TAG_formal_parameter will not be children of
5096 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
5097 under it. The parameters can be then evaluated in the debugger,
5098 but don't show in backtraces. */
5099 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
5100 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
5102 tree v = *var;
5103 *var = TREE_CHAIN (v);
5104 TREE_CHAIN (v) = BLOCK_VARS (id->block);
5105 BLOCK_VARS (id->block) = v;
5107 else
5108 var = &TREE_CHAIN (*var);
5110 else
5111 remap_blocks_to_null (DECL_INITIAL (fn), id);
5114 /* Return statements in the function body will be replaced by jumps
5115 to the RET_LABEL. */
5116 gcc_assert (DECL_INITIAL (fn));
5117 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
5119 /* Find the LHS to which the result of this call is assigned. */
5120 return_slot = NULL;
5121 if (gimple_call_lhs (stmt))
5123 modify_dest = gimple_call_lhs (stmt);
5125 /* The function which we are inlining might not return a value,
5126 in which case we should issue a warning that the function
5127 does not return a value. In that case the optimizers will
5128 see that the variable to which the value is assigned was not
5129 initialized. We do not want to issue a warning about that
5130 uninitialized variable. */
5131 if (DECL_P (modify_dest))
5132 suppress_warning (modify_dest, OPT_Wuninitialized);
5134 /* If we have a return slot, we can assign it the result directly,
5135 except in the case where it is a global variable that is only
5136 written to because, the callee being permitted to read or take
5137 the address of its DECL_RESULT, this could invalidate the flag
5138 on the global variable; instead we preventively remove the store,
5139 which would have happened later if the call was not inlined. */
5140 if (gimple_call_return_slot_opt_p (call_stmt))
5142 tree base = get_base_address (modify_dest);
5144 if (VAR_P (base)
5145 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
5146 && varpool_node::get (base)->writeonly)
5147 return_slot = NULL;
5148 else
5149 return_slot = modify_dest;
5151 modify_dest = NULL;
5154 else
5155 modify_dest = NULL;
5157 /* If we are inlining a call to the C++ operator new, we don't want
5158 to use type based alias analysis on the return value. Otherwise
5159 we may get confused if the compiler sees that the inlined new
5160 function returns a pointer which was just deleted. See bug
5161 33407. */
5162 if (DECL_IS_OPERATOR_NEW_P (fn))
5164 return_slot = NULL;
5165 modify_dest = NULL;
5168 /* Declare the return variable for the function. */
5169 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
5171 /* Add local vars in this inlined callee to caller. */
5172 add_local_variables (id->src_cfun, cfun, id);
5174 if (dump_enabled_p ())
5176 char buf[128];
5177 snprintf (buf, sizeof(buf), "%4.2f",
5178 cg_edge->sreal_frequency ().to_double ());
5179 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
5180 call_stmt,
5181 "Inlining %C to %C with frequency %s\n",
5182 id->src_node, id->dst_node, buf);
5183 if (dump_file && (dump_flags & TDF_DETAILS))
5185 id->src_node->dump (dump_file);
5186 id->dst_node->dump (dump_file);
5190 /* This is it. Duplicate the callee body. Assume callee is
5191 pre-gimplified. Note that we must not alter the caller
5192 function in any way before this point, as this CALL_EXPR may be
5193 a self-referential call; if we're calling ourselves, we need to
5194 duplicate our body before altering anything. */
5195 copy_body (id, bb, return_block, NULL);
5197 reset_debug_bindings (id, stmt_gsi);
5199 if (flag_stack_reuse != SR_NONE)
5200 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
5201 if (!TREE_THIS_VOLATILE (p))
5203 /* The value associated with P is a local temporary only if
5204 there is no value associated with P in the debug map. */
5205 tree *varp = id->decl_map->get (p);
5206 if (varp
5207 && VAR_P (*varp)
5208 && !is_gimple_reg (*varp)
5209 && !(id->debug_map && id->debug_map->get (p)))
5211 tree clobber = build_clobber (TREE_TYPE (*varp),
5212 CLOBBER_STORAGE_END);
5213 gimple *clobber_stmt;
5214 clobber_stmt = gimple_build_assign (*varp, clobber);
5215 gimple_set_location (clobber_stmt, gimple_location (stmt));
5216 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5220 if (save_stack)
5222 tree fn = builtin_decl_implicit (BUILT_IN_STACK_RESTORE);
5223 gcall *call = gimple_build_call (fn, 1, save_stack);
5224 gsi_insert_before (&stmt_gsi, call, GSI_SAME_STMT);
5225 struct cgraph_node *dest = cgraph_node::get_create (fn);
5226 id->dst_node->create_edge (dest, call,
5227 return_block->count)->inline_failed
5228 = CIF_BODY_NOT_AVAILABLE;
5231 /* Reset the escaped solution. */
5232 if (cfun->gimple_df)
5234 pt_solution_reset (&cfun->gimple_df->escaped);
5235 pt_solution_reset (&cfun->gimple_df->escaped_return);
5238 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
5239 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
5241 size_t nargs = gimple_call_num_args (simtenter_stmt);
5242 vec<tree> *vars = id->dst_simt_vars;
5243 auto_vec<tree> newargs (nargs + vars->length ());
5244 for (size_t i = 0; i < nargs; i++)
5245 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
5246 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
5248 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
5249 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
5251 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
5252 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
5253 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
5254 gsi_replace (&gsi, g, false);
5256 vec_free (id->dst_simt_vars);
5257 id->dst_simt_vars = simtvars_save;
5259 /* Clean up. */
5260 if (id->debug_map)
5262 delete id->debug_map;
5263 id->debug_map = dst;
5265 delete id->decl_map;
5266 id->decl_map = st;
5268 /* Unlink the calls virtual operands before replacing it. */
5269 unlink_stmt_vdef (stmt);
5270 if (gimple_vdef (stmt)
5271 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
5272 release_ssa_name (gimple_vdef (stmt));
5274 /* If the inlined function returns a result that we care about,
5275 substitute the GIMPLE_CALL with an assignment of the return
5276 variable to the LHS of the call. That is, if STMT was
5277 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
5278 if (use_retvar && gimple_call_lhs (stmt))
5280 gimple *old_stmt = stmt;
5281 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
5282 gimple_set_location (stmt, gimple_location (old_stmt));
5283 gsi_replace (&stmt_gsi, stmt, false);
5284 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5285 /* Append a clobber for id->retvar if easily possible. */
5286 if (flag_stack_reuse != SR_NONE
5287 && id->retvar
5288 && VAR_P (id->retvar)
5289 && id->retvar != return_slot
5290 && id->retvar != modify_dest
5291 && !TREE_THIS_VOLATILE (id->retvar)
5292 && !is_gimple_reg (id->retvar)
5293 && !stmt_ends_bb_p (stmt))
5295 tree clobber = build_clobber (TREE_TYPE (id->retvar),
5296 CLOBBER_STORAGE_END);
5297 gimple *clobber_stmt;
5298 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5299 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5300 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5303 else
5305 /* Handle the case of inlining a function with no return
5306 statement, which causes the return value to become undefined. */
5307 if (gimple_call_lhs (stmt)
5308 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5310 tree name = gimple_call_lhs (stmt);
5311 tree var = SSA_NAME_VAR (name);
5312 tree def = var ? ssa_default_def (cfun, var) : NULL;
5314 if (def)
5316 /* If the variable is used undefined, make this name
5317 undefined via a move. */
5318 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5319 gsi_replace (&stmt_gsi, stmt, true);
5321 else
5323 if (!var)
5325 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5326 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5328 /* Otherwise make this variable undefined. */
5329 gsi_remove (&stmt_gsi, true);
5330 set_ssa_default_def (cfun, var, name);
5331 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5334 /* Replace with a clobber for id->retvar. */
5335 else if (flag_stack_reuse != SR_NONE
5336 && id->retvar
5337 && VAR_P (id->retvar)
5338 && id->retvar != return_slot
5339 && id->retvar != modify_dest
5340 && !TREE_THIS_VOLATILE (id->retvar)
5341 && !is_gimple_reg (id->retvar))
5343 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5344 gimple *clobber_stmt;
5345 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5346 gimple_set_location (clobber_stmt, gimple_location (stmt));
5347 gsi_replace (&stmt_gsi, clobber_stmt, false);
5348 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5350 else
5351 gsi_remove (&stmt_gsi, true);
5354 if (purge_dead_abnormal_edges)
5355 bitmap_set_bit (to_purge, return_block->index);
5357 /* If the value of the new expression is ignored, that's OK. We
5358 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5359 the equivalent inlined version either. */
5360 if (is_gimple_assign (stmt))
5362 gcc_assert (gimple_assign_single_p (stmt)
5363 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5364 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5367 id->add_clobbers_to_eh_landing_pads = 0;
5369 /* Output the inlining info for this abstract function, since it has been
5370 inlined. If we don't do this now, we can lose the information about the
5371 variables in the function when the blocks get blown away as soon as we
5372 remove the cgraph node. */
5373 if (gimple_block (stmt))
5374 (*debug_hooks->outlining_inline_function) (fn);
5376 /* Update callgraph if needed. */
5377 cg_edge->callee->remove ();
5379 id->block = NULL_TREE;
5380 id->retvar = NULL_TREE;
5381 successfully_inlined = true;
5383 egress:
5384 input_location = saved_location;
5385 return successfully_inlined;
5388 /* Expand call statements reachable from STMT_P.
5389 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5390 in a MODIFY_EXPR. */
5392 static bool
5393 gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5394 bitmap to_purge)
5396 gimple_stmt_iterator gsi;
5397 bool inlined = false;
5399 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5401 gimple *stmt = gsi_stmt (gsi);
5402 gsi_prev (&gsi);
5404 if (is_gimple_call (stmt)
5405 && !gimple_call_internal_p (stmt))
5406 inlined |= expand_call_inline (bb, stmt, id, to_purge);
5409 return inlined;
5413 /* Walk all basic blocks created after FIRST and try to fold every statement
5414 in the STATEMENTS pointer set. */
5416 static void
5417 fold_marked_statements (int first, hash_set<gimple *> *statements)
5419 auto_bitmap to_purge;
5421 auto_vec<edge, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
5422 auto_sbitmap visited (last_basic_block_for_fn (cfun));
5423 bitmap_clear (visited);
5425 stack.quick_push (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5426 while (!stack.is_empty ())
5428 /* Look at the edge on the top of the stack. */
5429 edge e = stack.pop ();
5430 basic_block dest = e->dest;
5432 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
5433 || bitmap_bit_p (visited, dest->index))
5434 continue;
5436 bitmap_set_bit (visited, dest->index);
5438 if (dest->index >= first)
5439 for (gimple_stmt_iterator gsi = gsi_start_bb (dest);
5440 !gsi_end_p (gsi); gsi_next (&gsi))
5442 if (!statements->contains (gsi_stmt (gsi)))
5443 continue;
5445 gimple *old_stmt = gsi_stmt (gsi);
5446 tree old_decl = (is_gimple_call (old_stmt)
5447 ? gimple_call_fndecl (old_stmt) : 0);
5448 if (old_decl && fndecl_built_in_p (old_decl))
5450 /* Folding builtins can create multiple instructions,
5451 we need to look at all of them. */
5452 gimple_stmt_iterator i2 = gsi;
5453 gsi_prev (&i2);
5454 if (fold_stmt (&gsi))
5456 gimple *new_stmt;
5457 /* If a builtin at the end of a bb folded into nothing,
5458 the following loop won't work. */
5459 if (gsi_end_p (gsi))
5461 cgraph_update_edges_for_call_stmt (old_stmt,
5462 old_decl, NULL);
5463 break;
5465 if (gsi_end_p (i2))
5466 i2 = gsi_start_bb (dest);
5467 else
5468 gsi_next (&i2);
5469 while (1)
5471 new_stmt = gsi_stmt (i2);
5472 update_stmt (new_stmt);
5473 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5474 new_stmt);
5476 if (new_stmt == gsi_stmt (gsi))
5478 /* It is okay to check only for the very last
5479 of these statements. If it is a throwing
5480 statement nothing will change. If it isn't
5481 this can remove EH edges. If that weren't
5482 correct then because some intermediate stmts
5483 throw, but not the last one. That would mean
5484 we'd have to split the block, which we can't
5485 here and we'd loose anyway. And as builtins
5486 probably never throw, this all
5487 is mood anyway. */
5488 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5489 new_stmt))
5490 bitmap_set_bit (to_purge, dest->index);
5491 break;
5493 gsi_next (&i2);
5497 else if (fold_stmt (&gsi))
5499 /* Re-read the statement from GSI as fold_stmt() may
5500 have changed it. */
5501 gimple *new_stmt = gsi_stmt (gsi);
5502 update_stmt (new_stmt);
5504 if (is_gimple_call (old_stmt)
5505 || is_gimple_call (new_stmt))
5506 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5507 new_stmt);
5509 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5510 bitmap_set_bit (to_purge, dest->index);
5514 if (EDGE_COUNT (dest->succs) > 0)
5516 /* Avoid warnings emitted from folding statements that
5517 became unreachable because of inlined function parameter
5518 propagation. */
5519 e = find_taken_edge (dest, NULL_TREE);
5520 if (e)
5521 stack.quick_push (e);
5522 else
5524 edge_iterator ei;
5525 FOR_EACH_EDGE (e, ei, dest->succs)
5526 stack.safe_push (e);
5531 gimple_purge_all_dead_eh_edges (to_purge);
5534 /* Expand calls to inline functions in the body of FN. */
5536 unsigned int
5537 optimize_inline_calls (tree fn)
5539 copy_body_data id;
5540 basic_block bb;
5541 int last = n_basic_blocks_for_fn (cfun);
5542 bool inlined_p = false;
5544 /* Clear out ID. */
5545 memset (&id, 0, sizeof (id));
5547 id.src_node = id.dst_node = cgraph_node::get (fn);
5548 gcc_assert (id.dst_node->definition);
5549 id.dst_fn = fn;
5550 /* Or any functions that aren't finished yet. */
5551 if (current_function_decl)
5552 id.dst_fn = current_function_decl;
5554 id.copy_decl = copy_decl_maybe_to_var;
5555 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5556 id.transform_new_cfg = false;
5557 id.transform_return_to_modify = true;
5558 id.transform_parameter = true;
5559 id.statements_to_fold = new hash_set<gimple *>;
5561 push_gimplify_context ();
5563 /* We make no attempts to keep dominance info up-to-date. */
5564 free_dominance_info (CDI_DOMINATORS);
5565 free_dominance_info (CDI_POST_DOMINATORS);
5567 /* Register specific gimple functions. */
5568 gimple_register_cfg_hooks ();
5570 /* Reach the trees by walking over the CFG, and note the
5571 enclosing basic-blocks in the call edges. */
5572 /* We walk the blocks going forward, because inlined function bodies
5573 will split id->current_basic_block, and the new blocks will
5574 follow it; we'll trudge through them, processing their CALL_EXPRs
5575 along the way. */
5576 auto_bitmap to_purge;
5577 FOR_EACH_BB_FN (bb, cfun)
5578 inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5580 pop_gimplify_context (NULL);
5582 if (flag_checking)
5584 struct cgraph_edge *e;
5586 id.dst_node->verify ();
5588 /* Double check that we inlined everything we are supposed to inline. */
5589 for (e = id.dst_node->callees; e; e = e->next_callee)
5590 gcc_assert (e->inline_failed);
5593 /* If we didn't inline into the function there is nothing to do. */
5594 if (!inlined_p)
5596 delete id.statements_to_fold;
5597 return 0;
5600 /* Fold queued statements. */
5601 update_max_bb_count ();
5602 fold_marked_statements (last, id.statements_to_fold);
5603 delete id.statements_to_fold;
5605 /* Finally purge EH and abnormal edges from the call stmts we inlined.
5606 We need to do this after fold_marked_statements since that may walk
5607 the SSA use-def chain. */
5608 unsigned i;
5609 bitmap_iterator bi;
5610 EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5612 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5613 if (bb)
5615 gimple_purge_dead_eh_edges (bb);
5616 gimple_purge_dead_abnormal_call_edges (bb);
5620 gcc_assert (!id.debug_stmts.exists ());
5622 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5623 number_blocks (fn);
5625 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5626 id.dst_node->calls_comdat_local = id.dst_node->check_calls_comdat_local_p ();
5628 if (flag_checking)
5629 id.dst_node->verify ();
5631 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5632 not possible yet - the IPA passes might make various functions to not
5633 throw and they don't care to proactively update local EH info. This is
5634 done later in fixup_cfg pass that also execute the verification. */
5635 return (TODO_update_ssa
5636 | TODO_cleanup_cfg
5637 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5638 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0));
5641 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5643 tree
5644 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5646 enum tree_code code = TREE_CODE (*tp);
5647 enum tree_code_class cl = TREE_CODE_CLASS (code);
5649 /* We make copies of most nodes. */
5650 if (IS_EXPR_CODE_CLASS (cl)
5651 || code == TREE_LIST
5652 || code == TREE_VEC
5653 || code == TYPE_DECL
5654 || code == OMP_CLAUSE)
5656 /* Because the chain gets clobbered when we make a copy, we save it
5657 here. */
5658 tree chain = NULL_TREE, new_tree;
5660 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5661 chain = TREE_CHAIN (*tp);
5663 /* Copy the node. */
5664 new_tree = copy_node (*tp);
5666 *tp = new_tree;
5668 /* Now, restore the chain, if appropriate. That will cause
5669 walk_tree to walk into the chain as well. */
5670 if (code == PARM_DECL
5671 || code == TREE_LIST
5672 || code == OMP_CLAUSE)
5673 TREE_CHAIN (*tp) = chain;
5675 /* For now, we don't update BLOCKs when we make copies. So, we
5676 have to nullify all BIND_EXPRs. */
5677 if (TREE_CODE (*tp) == BIND_EXPR)
5678 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5680 else if (code == CONSTRUCTOR)
5682 /* CONSTRUCTOR nodes need special handling because
5683 we need to duplicate the vector of elements. */
5684 tree new_tree;
5686 new_tree = copy_node (*tp);
5687 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5688 *tp = new_tree;
5690 else if (code == STATEMENT_LIST)
5691 /* We used to just abort on STATEMENT_LIST, but we can run into them
5692 with statement-expressions (c++/40975). */
5693 copy_statement_list (tp);
5694 else if (TREE_CODE_CLASS (code) == tcc_type)
5695 *walk_subtrees = 0;
5696 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5697 *walk_subtrees = 0;
5698 else if (TREE_CODE_CLASS (code) == tcc_constant)
5699 *walk_subtrees = 0;
5700 return NULL_TREE;
5703 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5704 information indicating to what new SAVE_EXPR this one should be mapped,
5705 use that one. Otherwise, create a new node and enter it in ST. FN is
5706 the function into which the copy will be placed. */
5708 static void
5709 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5711 tree *n;
5712 tree t;
5714 /* See if we already encountered this SAVE_EXPR. */
5715 n = st->get (*tp);
5717 /* If we didn't already remap this SAVE_EXPR, do so now. */
5718 if (!n)
5720 t = copy_node (*tp);
5722 /* Remember this SAVE_EXPR. */
5723 st->put (*tp, t);
5724 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5725 st->put (t, t);
5727 else
5729 /* We've already walked into this SAVE_EXPR; don't do it again. */
5730 *walk_subtrees = 0;
5731 t = *n;
5734 /* Replace this SAVE_EXPR with the copy. */
5735 *tp = t;
5738 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5739 label, copies the declaration and enters it in the splay_tree in DATA (which
5740 is really a 'copy_body_data *'. */
5742 static tree
5743 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5744 bool *handled_ops_p ATTRIBUTE_UNUSED,
5745 struct walk_stmt_info *wi)
5747 copy_body_data *id = (copy_body_data *) wi->info;
5748 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5750 if (stmt)
5752 tree decl = gimple_label_label (stmt);
5754 /* Copy the decl and remember the copy. */
5755 insert_decl_map (id, decl, id->copy_decl (decl, id));
5758 return NULL_TREE;
5761 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5762 struct walk_stmt_info *wi);
5764 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5765 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5766 remaps all local declarations to appropriate replacements in gimple
5767 operands. */
5769 static tree
5770 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5772 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5773 copy_body_data *id = (copy_body_data *) wi->info;
5774 hash_map<tree, tree> *st = id->decl_map;
5775 tree *n;
5776 tree expr = *tp;
5778 /* For recursive invocations this is no longer the LHS itself. */
5779 bool is_lhs = wi->is_lhs;
5780 wi->is_lhs = false;
5782 if (TREE_CODE (expr) == SSA_NAME)
5784 *tp = remap_ssa_name (*tp, id);
5785 *walk_subtrees = 0;
5786 if (is_lhs)
5787 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5789 /* Only a local declaration (variable or label). */
5790 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5791 || TREE_CODE (expr) == LABEL_DECL)
5793 /* Lookup the declaration. */
5794 n = st->get (expr);
5796 /* If it's there, remap it. */
5797 if (n)
5798 *tp = *n;
5799 *walk_subtrees = 0;
5801 else if (TREE_CODE (expr) == STATEMENT_LIST
5802 || TREE_CODE (expr) == BIND_EXPR
5803 || TREE_CODE (expr) == SAVE_EXPR)
5804 gcc_unreachable ();
5805 else if (TREE_CODE (expr) == TARGET_EXPR)
5807 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5808 It's OK for this to happen if it was part of a subtree that
5809 isn't immediately expanded, such as operand 2 of another
5810 TARGET_EXPR. */
5811 if (!TREE_OPERAND (expr, 1))
5813 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5814 TREE_OPERAND (expr, 3) = NULL_TREE;
5817 else if (TREE_CODE (expr) == OMP_CLAUSE)
5819 /* Before the omplower pass completes, some OMP clauses can contain
5820 sequences that are neither copied by gimple_seq_copy nor walked by
5821 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5822 in those situations, we have to copy and process them explicitely. */
5824 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5826 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5827 seq = duplicate_remap_omp_clause_seq (seq, wi);
5828 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5830 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5832 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5833 seq = duplicate_remap_omp_clause_seq (seq, wi);
5834 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5836 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5838 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5839 seq = duplicate_remap_omp_clause_seq (seq, wi);
5840 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5841 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5842 seq = duplicate_remap_omp_clause_seq (seq, wi);
5843 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5847 /* Keep iterating. */
5848 return NULL_TREE;
5852 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5853 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5854 remaps all local declarations to appropriate replacements in gimple
5855 statements. */
5857 static tree
5858 replace_locals_stmt (gimple_stmt_iterator *gsip,
5859 bool *handled_ops_p ATTRIBUTE_UNUSED,
5860 struct walk_stmt_info *wi)
5862 copy_body_data *id = (copy_body_data *) wi->info;
5863 gimple *gs = gsi_stmt (*gsip);
5865 if (gbind *stmt = dyn_cast <gbind *> (gs))
5867 tree block = gimple_bind_block (stmt);
5869 if (block)
5871 remap_block (&block, id);
5872 gimple_bind_set_block (stmt, block);
5875 /* This will remap a lot of the same decls again, but this should be
5876 harmless. */
5877 if (gimple_bind_vars (stmt))
5879 tree old_var, decls = gimple_bind_vars (stmt);
5881 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5882 if (!can_be_nonlocal (old_var, id)
5883 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5884 remap_decl (old_var, id);
5886 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5887 id->prevent_decl_creation_for_types = true;
5888 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5889 id->prevent_decl_creation_for_types = false;
5893 /* Keep iterating. */
5894 return NULL_TREE;
5897 /* Create a copy of SEQ and remap all decls in it. */
5899 static gimple_seq
5900 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5902 if (!seq)
5903 return NULL;
5905 /* If there are any labels in OMP sequences, they can be only referred to in
5906 the sequence itself and therefore we can do both here. */
5907 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5908 gimple_seq copy = gimple_seq_copy (seq);
5909 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5910 return copy;
5913 /* Copies everything in SEQ and replaces variables and labels local to
5914 current_function_decl. */
5916 gimple_seq
5917 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5919 copy_body_data id;
5920 struct walk_stmt_info wi;
5921 gimple_seq copy;
5923 /* There's nothing to do for NULL_TREE. */
5924 if (seq == NULL)
5925 return seq;
5927 /* Set up ID. */
5928 memset (&id, 0, sizeof (id));
5929 id.src_fn = current_function_decl;
5930 id.dst_fn = current_function_decl;
5931 id.src_cfun = cfun;
5932 id.decl_map = new hash_map<tree, tree>;
5933 id.debug_map = NULL;
5935 id.copy_decl = copy_decl_no_change;
5936 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5937 id.transform_new_cfg = false;
5938 id.transform_return_to_modify = false;
5939 id.transform_parameter = false;
5941 /* Walk the tree once to find local labels. */
5942 memset (&wi, 0, sizeof (wi));
5943 hash_set<tree> visited;
5944 wi.info = &id;
5945 wi.pset = &visited;
5946 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5948 copy = gimple_seq_copy (seq);
5950 /* Walk the copy, remapping decls. */
5951 memset (&wi, 0, sizeof (wi));
5952 wi.info = &id;
5953 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5955 /* Clean up. */
5956 delete id.decl_map;
5957 if (id.debug_map)
5958 delete id.debug_map;
5959 if (id.dependence_map)
5961 delete id.dependence_map;
5962 id.dependence_map = NULL;
5965 return copy;
5969 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5971 static tree
5972 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5974 if (*tp == data)
5975 return (tree) data;
5976 else
5977 return NULL;
5980 DEBUG_FUNCTION bool
5981 debug_find_tree (tree top, tree search)
5983 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5987 /* Declare the variables created by the inliner. Add all the variables in
5988 VARS to BIND_EXPR. */
5990 static void
5991 declare_inline_vars (tree block, tree vars)
5993 tree t;
5994 for (t = vars; t; t = DECL_CHAIN (t))
5996 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5997 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5998 add_local_decl (cfun, t);
6001 if (block)
6002 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
6005 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
6006 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
6007 VAR_DECL translation. */
6009 tree
6010 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
6012 /* Don't generate debug information for the copy if we wouldn't have
6013 generated it for the copy either. */
6014 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
6015 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
6017 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
6018 declaration inspired this copy. */
6019 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
6021 /* The new variable/label has no RTL, yet. */
6022 if (HAS_RTL_P (copy)
6023 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
6024 SET_DECL_RTL (copy, 0);
6025 /* For vector typed decls make sure to update DECL_MODE according
6026 to the new function context. */
6027 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
6028 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
6030 /* These args would always appear unused, if not for this. */
6031 TREE_USED (copy) = 1;
6033 /* Set the context for the new declaration. */
6034 if (!DECL_CONTEXT (decl))
6035 /* Globals stay global. */
6037 else if (DECL_CONTEXT (decl) != id->src_fn)
6038 /* Things that weren't in the scope of the function we're inlining
6039 from aren't in the scope we're inlining to, either. */
6041 else if (TREE_STATIC (decl))
6042 /* Function-scoped static variables should stay in the original
6043 function. */
6045 else
6047 /* Ordinary automatic local variables are now in the scope of the
6048 new function. */
6049 DECL_CONTEXT (copy) = id->dst_fn;
6050 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
6052 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
6053 DECL_ATTRIBUTES (copy)
6054 = tree_cons (get_identifier ("omp simt private"), NULL,
6055 DECL_ATTRIBUTES (copy));
6056 id->dst_simt_vars->safe_push (copy);
6060 return copy;
6063 /* Create a new VAR_DECL that is indentical in all respect to DECL except that
6064 DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
6065 DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
6067 tree
6068 copy_decl_to_var (tree decl, copy_body_data *id)
6070 tree copy, type;
6072 gcc_assert (TREE_CODE (decl) == PARM_DECL
6073 || TREE_CODE (decl) == RESULT_DECL);
6075 type = TREE_TYPE (decl);
6077 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
6078 VAR_DECL, DECL_NAME (decl), type);
6079 if (DECL_PT_UID_SET_P (decl))
6080 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
6081 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
6082 TREE_READONLY (copy) = TREE_READONLY (decl);
6083 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
6084 DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
6085 DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
6087 return copy_decl_for_dup_finish (id, decl, copy);
6090 /* Like copy_decl_to_var, but create a return slot object instead of a
6091 pointer variable for return by invisible reference. */
6093 static tree
6094 copy_result_decl_to_var (tree decl, copy_body_data *id)
6096 tree copy, type;
6098 gcc_assert (TREE_CODE (decl) == PARM_DECL
6099 || TREE_CODE (decl) == RESULT_DECL);
6101 type = TREE_TYPE (decl);
6102 if (DECL_BY_REFERENCE (decl))
6103 type = TREE_TYPE (type);
6105 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
6106 VAR_DECL, DECL_NAME (decl), type);
6107 if (DECL_PT_UID_SET_P (decl))
6108 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
6109 TREE_READONLY (copy) = TREE_READONLY (decl);
6110 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
6111 if (!DECL_BY_REFERENCE (decl))
6113 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
6114 DECL_NOT_GIMPLE_REG_P (copy)
6115 = (DECL_NOT_GIMPLE_REG_P (decl)
6116 /* RESULT_DECLs are treated special by needs_to_live_in_memory,
6117 mirror that to the created VAR_DECL. */
6118 || (TREE_CODE (decl) == RESULT_DECL
6119 && aggregate_value_p (decl, id->src_fn)));
6122 return copy_decl_for_dup_finish (id, decl, copy);
6125 tree
6126 copy_decl_no_change (tree decl, copy_body_data *id)
6128 tree copy;
6130 copy = copy_node (decl);
6132 /* The COPY is not abstract; it will be generated in DST_FN. */
6133 DECL_ABSTRACT_P (copy) = false;
6134 lang_hooks.dup_lang_specific_decl (copy);
6136 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
6137 been taken; it's for internal bookkeeping in expand_goto_internal. */
6138 if (TREE_CODE (copy) == LABEL_DECL)
6140 TREE_ADDRESSABLE (copy) = 0;
6141 LABEL_DECL_UID (copy) = -1;
6144 return copy_decl_for_dup_finish (id, decl, copy);
6147 static tree
6148 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
6150 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
6151 return copy_decl_to_var (decl, id);
6152 else
6153 return copy_decl_no_change (decl, id);
6156 /* Return a copy of the function's argument tree without any modifications. */
6158 static tree
6159 copy_arguments_nochange (tree orig_parm, copy_body_data * id)
6161 tree arg, *parg;
6162 tree new_parm = NULL;
6164 parg = &new_parm;
6165 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
6167 tree new_tree = remap_decl (arg, id);
6168 if (TREE_CODE (new_tree) != PARM_DECL)
6169 new_tree = id->copy_decl (arg, id);
6170 lang_hooks.dup_lang_specific_decl (new_tree);
6171 *parg = new_tree;
6172 parg = &DECL_CHAIN (new_tree);
6174 return new_parm;
6177 /* Return a copy of the function's static chain. */
6178 static tree
6179 copy_static_chain (tree static_chain, copy_body_data * id)
6181 tree *chain_copy, *pvar;
6183 chain_copy = &static_chain;
6184 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
6186 tree new_tree = remap_decl (*pvar, id);
6187 lang_hooks.dup_lang_specific_decl (new_tree);
6188 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
6189 *pvar = new_tree;
6191 return static_chain;
6194 /* Return true if the function is allowed to be versioned.
6195 This is a guard for the versioning functionality. */
6197 bool
6198 tree_versionable_function_p (tree fndecl)
6200 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
6201 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
6204 /* Update clone info after duplication. */
6206 static void
6207 update_clone_info (copy_body_data * id)
6209 struct cgraph_node *this_node = id->dst_node;
6210 if (!this_node->clones)
6211 return;
6212 for (cgraph_node *node = this_node->clones; node != this_node;)
6214 /* First update replace maps to match the new body. */
6215 clone_info *info = clone_info::get (node);
6216 if (info && info->tree_map)
6218 unsigned int i;
6219 for (i = 0; i < vec_safe_length (info->tree_map); i++)
6221 struct ipa_replace_map *replace_info;
6222 replace_info = (*info->tree_map)[i];
6223 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
6227 if (node->clones)
6228 node = node->clones;
6229 else if (node->next_sibling_clone)
6230 node = node->next_sibling_clone;
6231 else
6233 while (node != id->dst_node && !node->next_sibling_clone)
6234 node = node->clone_of;
6235 if (node != id->dst_node)
6236 node = node->next_sibling_clone;
6241 /* Create a copy of a function's tree.
6242 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6243 of the original function and the new copied function
6244 respectively. In case we want to replace a DECL
6245 tree with another tree while duplicating the function's
6246 body, TREE_MAP represents the mapping between these
6247 trees. If UPDATE_CLONES is set, the call_stmt fields
6248 of edges of clones of the function will be updated.
6250 If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6251 function parameters and return value) should be modified).
6252 If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6253 If non_NULL NEW_ENTRY determine new entry BB of the clone.
6255 void
6256 tree_function_versioning (tree old_decl, tree new_decl,
6257 vec<ipa_replace_map *, va_gc> *tree_map,
6258 ipa_param_adjustments *param_adjustments,
6259 bool update_clones, bitmap blocks_to_copy,
6260 basic_block new_entry)
6262 struct cgraph_node *old_version_node;
6263 struct cgraph_node *new_version_node;
6264 copy_body_data id;
6265 tree p;
6266 unsigned i;
6267 struct ipa_replace_map *replace_info;
6268 basic_block old_entry_block, bb;
6269 auto_vec<gimple *, 10> init_stmts;
6270 tree vars = NULL_TREE;
6272 /* We can get called recursively from expand_call_inline via clone
6273 materialization. While expand_call_inline maintains input_location
6274 we cannot tolerate it to leak into the materialized clone. */
6275 location_t saved_location = input_location;
6276 input_location = UNKNOWN_LOCATION;
6278 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
6279 && TREE_CODE (new_decl) == FUNCTION_DECL);
6280 DECL_POSSIBLY_INLINED (old_decl) = 1;
6282 old_version_node = cgraph_node::get (old_decl);
6283 gcc_checking_assert (old_version_node);
6284 new_version_node = cgraph_node::get (new_decl);
6285 gcc_checking_assert (new_version_node);
6287 /* Copy over debug args. */
6288 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
6290 vec<tree, va_gc> **new_debug_args, **old_debug_args;
6291 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6292 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6293 old_debug_args = decl_debug_args_lookup (old_decl);
6294 if (old_debug_args)
6296 new_debug_args = decl_debug_args_insert (new_decl);
6297 *new_debug_args = vec_safe_copy (*old_debug_args);
6301 /* Output the inlining info for this abstract function, since it has been
6302 inlined. If we don't do this now, we can lose the information about the
6303 variables in the function when the blocks get blown away as soon as we
6304 remove the cgraph node. */
6305 (*debug_hooks->outlining_inline_function) (old_decl);
6307 DECL_ARTIFICIAL (new_decl) = 1;
6308 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6309 if (DECL_ORIGIN (old_decl) == old_decl)
6310 old_version_node->used_as_abstract_origin = true;
6311 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6313 /* Prepare the data structures for the tree copy. */
6314 memset (&id, 0, sizeof (id));
6316 /* Generate a new name for the new version. */
6317 id.statements_to_fold = new hash_set<gimple *>;
6319 id.decl_map = new hash_map<tree, tree>;
6320 id.debug_map = NULL;
6321 id.src_fn = old_decl;
6322 id.dst_fn = new_decl;
6323 id.src_node = old_version_node;
6324 id.dst_node = new_version_node;
6325 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6326 id.blocks_to_copy = blocks_to_copy;
6328 id.copy_decl = copy_decl_no_change;
6329 id.transform_call_graph_edges
6330 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6331 id.transform_new_cfg = true;
6332 id.transform_return_to_modify = false;
6333 id.transform_parameter = false;
6335 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (old_decl));
6336 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6337 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6338 initialize_cfun (new_decl, old_decl,
6339 new_entry ? new_entry->count : old_entry_block->count);
6340 new_version_node->calls_declare_variant_alt
6341 = old_version_node->calls_declare_variant_alt;
6342 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6343 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6344 = id.src_cfun->gimple_df->ipa_pta;
6346 /* Copy the function's static chain. */
6347 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6348 if (p)
6349 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6350 = copy_static_chain (p, &id);
6352 auto_vec<int, 16> new_param_indices;
6353 clone_info *info = clone_info::get (old_version_node);
6354 ipa_param_adjustments *old_param_adjustments
6355 = info ? info->param_adjustments : NULL;
6356 if (old_param_adjustments)
6357 old_param_adjustments->get_updated_indices (&new_param_indices);
6359 /* If there's a tree_map, prepare for substitution. */
6360 if (tree_map)
6361 for (i = 0; i < tree_map->length (); i++)
6363 gimple *init;
6364 replace_info = (*tree_map)[i];
6366 int p = replace_info->parm_num;
6367 if (old_param_adjustments)
6368 p = new_param_indices[p];
6370 tree parm;
6371 for (parm = DECL_ARGUMENTS (old_decl); p;
6372 parm = DECL_CHAIN (parm))
6373 p--;
6374 gcc_assert (parm);
6375 init = setup_one_parameter (&id, parm, replace_info->new_tree,
6376 id.src_fn, NULL, &vars);
6377 if (init)
6378 init_stmts.safe_push (init);
6381 ipa_param_body_adjustments *param_body_adjs = NULL;
6382 if (param_adjustments)
6384 param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
6385 new_decl, old_decl,
6386 &id, &vars, tree_map);
6387 id.param_body_adjs = param_body_adjs;
6388 DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
6390 else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6391 DECL_ARGUMENTS (new_decl)
6392 = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
6394 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6395 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6397 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6399 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6400 /* Add local vars. */
6401 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6403 if (DECL_RESULT (old_decl) == NULL_TREE)
6405 else if (param_adjustments && param_adjustments->m_skip_return
6406 && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6408 tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
6409 &id);
6410 declare_inline_vars (NULL, resdecl_repl);
6411 if (DECL_BY_REFERENCE (DECL_RESULT (old_decl)))
6412 resdecl_repl = build_fold_addr_expr (resdecl_repl);
6413 insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
6415 DECL_RESULT (new_decl)
6416 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6417 RESULT_DECL, NULL_TREE, void_type_node);
6418 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6419 DECL_IS_MALLOC (new_decl) = false;
6420 cfun->returns_struct = 0;
6421 cfun->returns_pcc_struct = 0;
6423 else
6425 tree old_name;
6426 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6427 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6428 if (gimple_in_ssa_p (id.src_cfun)
6429 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6430 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6432 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6433 insert_decl_map (&id, old_name, new_name);
6434 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6435 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6439 /* Set up the destination functions loop tree. */
6440 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6442 cfun->curr_properties &= ~PROP_loops;
6443 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6444 cfun->curr_properties |= PROP_loops;
6447 /* Copy the Function's body. */
6448 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6449 new_entry);
6451 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6452 number_blocks (new_decl);
6454 /* We want to create the BB unconditionally, so that the addition of
6455 debug stmts doesn't affect BB count, which may in the end cause
6456 codegen differences. */
6457 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6458 while (init_stmts.length ())
6459 insert_init_stmt (&id, bb, init_stmts.pop ());
6460 if (param_body_adjs)
6461 param_body_adjs->append_init_stmts (bb);
6462 update_clone_info (&id);
6464 /* Remap the nonlocal_goto_save_area, if any. */
6465 if (cfun->nonlocal_goto_save_area)
6467 struct walk_stmt_info wi;
6469 memset (&wi, 0, sizeof (wi));
6470 wi.info = &id;
6471 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6474 /* Clean up. */
6475 delete id.decl_map;
6476 if (id.debug_map)
6477 delete id.debug_map;
6478 free_dominance_info (CDI_DOMINATORS);
6479 free_dominance_info (CDI_POST_DOMINATORS);
6481 update_max_bb_count ();
6482 fold_marked_statements (0, id.statements_to_fold);
6483 delete id.statements_to_fold;
6484 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6485 if (id.dst_node->definition)
6486 cgraph_edge::rebuild_references ();
6487 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6489 calculate_dominance_info (CDI_DOMINATORS);
6490 fix_loop_structure (NULL);
6492 update_ssa (TODO_update_ssa);
6494 /* After partial cloning we need to rescale frequencies, so they are
6495 within proper range in the cloned function. */
6496 if (new_entry)
6498 struct cgraph_edge *e;
6499 rebuild_frequencies ();
6501 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6502 for (e = new_version_node->callees; e; e = e->next_callee)
6504 basic_block bb = gimple_bb (e->call_stmt);
6505 e->count = bb->count;
6507 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6509 basic_block bb = gimple_bb (e->call_stmt);
6510 e->count = bb->count;
6514 if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
6516 vec<tree, va_gc> **debug_args = NULL;
6517 unsigned int len = 0;
6518 unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
6520 for (i = 0; i < reset_len; i++)
6522 tree parm = param_body_adjs->m_reset_debug_decls[i];
6523 gcc_assert (is_gimple_reg (parm));
6524 tree ddecl;
6526 if (debug_args == NULL)
6528 debug_args = decl_debug_args_insert (new_decl);
6529 len = vec_safe_length (*debug_args);
6531 ddecl = build_debug_expr_decl (TREE_TYPE (parm));
6532 /* FIXME: Is setting the mode really necessary? */
6533 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6534 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6535 vec_safe_push (*debug_args, ddecl);
6537 if (debug_args != NULL)
6539 /* On the callee side, add
6540 DEBUG D#Y s=> parm
6541 DEBUG var => D#Y
6542 stmts to the first bb where var is a VAR_DECL created for the
6543 optimized away parameter in DECL_INITIAL block. This hints
6544 in the debug info that var (whole DECL_ORIGIN is the parm
6545 PARM_DECL) is optimized away, but could be looked up at the
6546 call site as value of D#X there. */
6547 gimple_stmt_iterator cgsi
6548 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6549 gimple *def_temp;
6550 tree var = vars;
6551 i = vec_safe_length (*debug_args);
6554 tree vexpr = NULL_TREE;
6555 i -= 2;
6556 while (var != NULL_TREE
6557 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6558 var = TREE_CHAIN (var);
6559 if (var == NULL_TREE)
6560 break;
6561 tree parm = (**debug_args)[i];
6562 if (tree parm_ddef = ssa_default_def (id.src_cfun, parm))
6563 if (tree *d
6564 = param_body_adjs->m_dead_ssa_debug_equiv.get (parm_ddef))
6565 vexpr = *d;
6566 if (!vexpr)
6568 vexpr = build_debug_expr_decl (TREE_TYPE (parm));
6569 /* FIXME: Is setting the mode really necessary? */
6570 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6572 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6573 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6574 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6575 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6577 while (i > len);
6580 delete param_body_adjs;
6581 free_dominance_info (CDI_DOMINATORS);
6582 free_dominance_info (CDI_POST_DOMINATORS);
6584 gcc_assert (!id.debug_stmts.exists ());
6585 pop_cfun ();
6586 input_location = saved_location;
6587 return;
6590 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6591 the callee and return the inlined body on success. */
6593 tree
6594 maybe_inline_call_in_expr (tree exp)
6596 tree fn = get_callee_fndecl (exp);
6598 /* We can only try to inline "const" functions. */
6599 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6601 call_expr_arg_iterator iter;
6602 copy_body_data id;
6603 tree param, arg, t;
6604 hash_map<tree, tree> decl_map;
6606 /* Remap the parameters. */
6607 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6608 param;
6609 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6610 decl_map.put (param, arg);
6612 memset (&id, 0, sizeof (id));
6613 id.src_fn = fn;
6614 id.dst_fn = current_function_decl;
6615 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6616 id.decl_map = &decl_map;
6618 id.copy_decl = copy_decl_no_change;
6619 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6620 id.transform_new_cfg = false;
6621 id.transform_return_to_modify = true;
6622 id.transform_parameter = true;
6624 /* Make sure not to unshare trees behind the front-end's back
6625 since front-end specific mechanisms may rely on sharing. */
6626 id.regimplify = false;
6627 id.do_not_unshare = true;
6629 /* We're not inside any EH region. */
6630 id.eh_lp_nr = 0;
6632 t = copy_tree_body (&id);
6634 /* We can only return something suitable for use in a GENERIC
6635 expression tree. */
6636 if (TREE_CODE (t) == MODIFY_EXPR)
6637 return TREE_OPERAND (t, 1);
6640 return NULL_TREE;
6643 /* Duplicate a type, fields and all. */
6645 tree
6646 build_duplicate_type (tree type)
6648 struct copy_body_data id;
6650 memset (&id, 0, sizeof (id));
6651 id.src_fn = current_function_decl;
6652 id.dst_fn = current_function_decl;
6653 id.src_cfun = cfun;
6654 id.decl_map = new hash_map<tree, tree>;
6655 id.debug_map = NULL;
6656 id.copy_decl = copy_decl_no_change;
6658 type = remap_type_1 (type, &id);
6660 delete id.decl_map;
6661 if (id.debug_map)
6662 delete id.debug_map;
6664 TYPE_CANONICAL (type) = type;
6666 return type;
6669 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6670 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6671 evaluation. */
6673 tree
6674 copy_fn (tree fn, tree& parms, tree& result)
6676 copy_body_data id;
6677 tree param;
6678 hash_map<tree, tree> decl_map;
6680 tree *p = &parms;
6681 *p = NULL_TREE;
6683 memset (&id, 0, sizeof (id));
6684 id.src_fn = fn;
6685 id.dst_fn = current_function_decl;
6686 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6687 id.decl_map = &decl_map;
6689 id.copy_decl = [] (tree decl, copy_body_data *id)
6691 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
6692 /* Don't make copies of local types or injected enumerators,
6693 the C++ constexpr evaluator doesn't need them and they
6694 confuse modules streaming. */
6695 return decl;
6696 return copy_decl_no_change (decl, id);
6698 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6699 id.transform_new_cfg = false;
6700 id.transform_return_to_modify = false;
6701 id.transform_parameter = true;
6703 /* Make sure not to unshare trees behind the front-end's back
6704 since front-end specific mechanisms may rely on sharing. */
6705 id.regimplify = false;
6706 id.do_not_unshare = true;
6707 id.do_not_fold = true;
6709 /* We're not inside any EH region. */
6710 id.eh_lp_nr = 0;
6712 /* Remap the parameters and result and return them to the caller. */
6713 for (param = DECL_ARGUMENTS (fn);
6714 param;
6715 param = DECL_CHAIN (param))
6717 *p = remap_decl (param, &id);
6718 p = &DECL_CHAIN (*p);
6721 if (DECL_RESULT (fn))
6722 result = remap_decl (DECL_RESULT (fn), &id);
6723 else
6724 result = NULL_TREE;
6726 return copy_tree_body (&id);