kernel - support dummy reallocblks in devfs
[dragonfly.git] / contrib / gcc-5.0 / gcc / tree-inline.c
blob83fb5d3eb39aedf6951d56a826a9774fbb517090
1 /* Tree inlining.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "hash-set.h"
27 #include "machmode.h"
28 #include "vec.h"
29 #include "double-int.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "flags.h"
41 #include "params.h"
42 #include "insn-config.h"
43 #include "hashtab.h"
44 #include "langhooks.h"
45 #include "predict.h"
46 #include "hard-reg-set.h"
47 #include "function.h"
48 #include "dominance.h"
49 #include "cfg.h"
50 #include "cfganal.h"
51 #include "basic-block.h"
52 #include "tree-iterator.h"
53 #include "intl.h"
54 #include "tree-ssa-alias.h"
55 #include "internal-fn.h"
56 #include "gimple-fold.h"
57 #include "tree-eh.h"
58 #include "gimple-expr.h"
59 #include "is-a.h"
60 #include "gimple.h"
61 #include "gimplify.h"
62 #include "gimple-iterator.h"
63 #include "gimplify-me.h"
64 #include "gimple-walk.h"
65 #include "gimple-ssa.h"
66 #include "tree-cfg.h"
67 #include "tree-phinodes.h"
68 #include "ssa-iterators.h"
69 #include "stringpool.h"
70 #include "tree-ssanames.h"
71 #include "tree-into-ssa.h"
72 #include "rtl.h"
73 #include "statistics.h"
74 #include "real.h"
75 #include "fixed-value.h"
76 #include "expmed.h"
77 #include "dojump.h"
78 #include "explow.h"
79 #include "emit-rtl.h"
80 #include "varasm.h"
81 #include "stmt.h"
82 #include "expr.h"
83 #include "tree-dfa.h"
84 #include "tree-ssa.h"
85 #include "tree-pretty-print.h"
86 #include "except.h"
87 #include "debug.h"
88 #include "hash-map.h"
89 #include "plugin-api.h"
90 #include "ipa-ref.h"
91 #include "cgraph.h"
92 #include "alloc-pool.h"
93 #include "symbol-summary.h"
94 #include "ipa-prop.h"
95 #include "value-prof.h"
96 #include "tree-pass.h"
97 #include "target.h"
98 #include "cfgloop.h"
99 #include "builtins.h"
100 #include "tree-chkp.h"
102 #include "rtl.h" /* FIXME: For asm_str_count. */
104 /* I'm not real happy about this, but we need to handle gimple and
105 non-gimple trees. */
107 /* Inlining, Cloning, Versioning, Parallelization
109 Inlining: a function body is duplicated, but the PARM_DECLs are
110 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
111 MODIFY_EXPRs that store to a dedicated returned-value variable.
112 The duplicated eh_region info of the copy will later be appended
113 to the info for the caller; the eh_region info in copied throwing
114 statements and RESX statements are adjusted accordingly.
116 Cloning: (only in C++) We have one body for a con/de/structor, and
117 multiple function decls, each with a unique parameter list.
118 Duplicate the body, using the given splay tree; some parameters
119 will become constants (like 0 or 1).
121 Versioning: a function body is duplicated and the result is a new
122 function rather than into blocks of an existing function as with
123 inlining. Some parameters will become constants.
125 Parallelization: a region of a function is duplicated resulting in
126 a new function. Variables may be replaced with complex expressions
127 to enable shared variable semantics.
129 All of these will simultaneously lookup any callgraph edges. If
130 we're going to inline the duplicated function body, and the given
131 function has some cloned callgraph nodes (one for each place this
132 function will be inlined) those callgraph edges will be duplicated.
133 If we're cloning the body, those callgraph edges will be
134 updated to point into the new body. (Note that the original
135 callgraph node and edge list will not be altered.)
137 See the CALL_EXPR handling case in copy_tree_body_r (). */
139 /* To Do:
141 o In order to make inlining-on-trees work, we pessimized
142 function-local static constants. In particular, they are now
143 always output, even when not addressed. Fix this by treating
144 function-local static constants just like global static
145 constants; the back-end already knows not to output them if they
146 are not needed.
148 o Provide heuristics to clamp inlining of recursive template
149 calls? */
152 /* Weights that estimate_num_insns uses to estimate the size of the
153 produced code. */
155 eni_weights eni_size_weights;
157 /* Weights that estimate_num_insns uses to estimate the time necessary
158 to execute the produced code. */
160 eni_weights eni_time_weights;
162 /* Prototypes. */
164 static tree declare_return_variable (copy_body_data *, tree, tree, tree,
165 basic_block);
166 static void remap_block (tree *, copy_body_data *);
167 static void copy_bind_expr (tree *, int *, copy_body_data *);
168 static void declare_inline_vars (tree, tree);
169 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
170 static void prepend_lexical_block (tree current_block, tree new_block);
171 static tree copy_decl_to_var (tree, copy_body_data *);
172 static tree copy_result_decl_to_var (tree, copy_body_data *);
173 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
174 static gimple_seq remap_gimple_stmt (gimple, copy_body_data *);
175 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
176 static void insert_init_stmt (copy_body_data *, basic_block, gimple);
178 /* Insert a tree->tree mapping for ID. Despite the name suggests
179 that the trees should be variables, it is used for more than that. */
181 void
182 insert_decl_map (copy_body_data *id, tree key, tree value)
184 id->decl_map->put (key, value);
186 /* Always insert an identity map as well. If we see this same new
187 node again, we won't want to duplicate it a second time. */
188 if (key != value)
189 id->decl_map->put (value, value);
192 /* Insert a tree->tree mapping for ID. This is only used for
193 variables. */
195 static void
196 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
198 if (!gimple_in_ssa_p (id->src_cfun))
199 return;
201 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
202 return;
204 if (!target_for_debug_bind (key))
205 return;
207 gcc_assert (TREE_CODE (key) == PARM_DECL);
208 gcc_assert (TREE_CODE (value) == VAR_DECL);
210 if (!id->debug_map)
211 id->debug_map = new hash_map<tree, tree>;
213 id->debug_map->put (key, value);
216 /* If nonzero, we're remapping the contents of inlined debug
217 statements. If negative, an error has occurred, such as a
218 reference to a variable that isn't available in the inlined
219 context. */
220 static int processing_debug_stmt = 0;
222 /* Construct new SSA name for old NAME. ID is the inline context. */
224 static tree
225 remap_ssa_name (tree name, copy_body_data *id)
227 tree new_tree, var;
228 tree *n;
230 gcc_assert (TREE_CODE (name) == SSA_NAME);
232 n = id->decl_map->get (name);
233 if (n)
234 return unshare_expr (*n);
236 if (processing_debug_stmt)
238 if (SSA_NAME_IS_DEFAULT_DEF (name)
239 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
240 && id->entry_bb == NULL
241 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
243 tree vexpr = make_node (DEBUG_EXPR_DECL);
244 gimple def_temp;
245 gimple_stmt_iterator gsi;
246 tree val = SSA_NAME_VAR (name);
248 n = id->decl_map->get (val);
249 if (n != NULL)
250 val = *n;
251 if (TREE_CODE (val) != PARM_DECL)
253 processing_debug_stmt = -1;
254 return name;
256 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
257 DECL_ARTIFICIAL (vexpr) = 1;
258 TREE_TYPE (vexpr) = TREE_TYPE (name);
259 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
260 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
261 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
262 return vexpr;
265 processing_debug_stmt = -1;
266 return name;
269 /* Remap anonymous SSA names or SSA names of anonymous decls. */
270 var = SSA_NAME_VAR (name);
271 if (!var
272 || (!SSA_NAME_IS_DEFAULT_DEF (name)
273 && TREE_CODE (var) == VAR_DECL
274 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
275 && DECL_ARTIFICIAL (var)
276 && DECL_IGNORED_P (var)
277 && !DECL_NAME (var)))
279 struct ptr_info_def *pi;
280 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
281 if (!var && SSA_NAME_IDENTIFIER (name))
282 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
283 insert_decl_map (id, name, new_tree);
284 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
285 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
286 /* At least IPA points-to info can be directly transferred. */
287 if (id->src_cfun->gimple_df
288 && id->src_cfun->gimple_df->ipa_pta
289 && (pi = SSA_NAME_PTR_INFO (name))
290 && !pi->pt.anything)
292 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
293 new_pi->pt = pi->pt;
295 return new_tree;
298 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
299 in copy_bb. */
300 new_tree = remap_decl (var, id);
302 /* We might've substituted constant or another SSA_NAME for
303 the variable.
305 Replace the SSA name representing RESULT_DECL by variable during
306 inlining: this saves us from need to introduce PHI node in a case
307 return value is just partly initialized. */
308 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
309 && (!SSA_NAME_VAR (name)
310 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
311 || !id->transform_return_to_modify))
313 struct ptr_info_def *pi;
314 new_tree = make_ssa_name (new_tree);
315 insert_decl_map (id, name, new_tree);
316 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
317 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
318 /* At least IPA points-to info can be directly transferred. */
319 if (id->src_cfun->gimple_df
320 && id->src_cfun->gimple_df->ipa_pta
321 && (pi = SSA_NAME_PTR_INFO (name))
322 && !pi->pt.anything)
324 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
325 new_pi->pt = pi->pt;
327 if (SSA_NAME_IS_DEFAULT_DEF (name))
329 /* By inlining function having uninitialized variable, we might
330 extend the lifetime (variable might get reused). This cause
331 ICE in the case we end up extending lifetime of SSA name across
332 abnormal edge, but also increase register pressure.
334 We simply initialize all uninitialized vars by 0 except
335 for case we are inlining to very first BB. We can avoid
336 this for all BBs that are not inside strongly connected
337 regions of the CFG, but this is expensive to test. */
338 if (id->entry_bb
339 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
340 && (!SSA_NAME_VAR (name)
341 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
342 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
343 0)->dest
344 || EDGE_COUNT (id->entry_bb->preds) != 1))
346 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
347 gimple init_stmt;
348 tree zero = build_zero_cst (TREE_TYPE (new_tree));
350 init_stmt = gimple_build_assign (new_tree, zero);
351 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
352 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
354 else
356 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
357 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
361 else
362 insert_decl_map (id, name, new_tree);
363 return new_tree;
366 /* Remap DECL during the copying of the BLOCK tree for the function. */
368 tree
369 remap_decl (tree decl, copy_body_data *id)
371 tree *n;
373 /* We only remap local variables in the current function. */
375 /* See if we have remapped this declaration. */
377 n = id->decl_map->get (decl);
379 if (!n && processing_debug_stmt)
381 processing_debug_stmt = -1;
382 return decl;
385 /* If we didn't already have an equivalent for this declaration,
386 create one now. */
387 if (!n)
389 /* Make a copy of the variable or label. */
390 tree t = id->copy_decl (decl, id);
392 /* Remember it, so that if we encounter this local entity again
393 we can reuse this copy. Do this early because remap_type may
394 need this decl for TYPE_STUB_DECL. */
395 insert_decl_map (id, decl, t);
397 if (!DECL_P (t))
398 return t;
400 /* Remap types, if necessary. */
401 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
402 if (TREE_CODE (t) == TYPE_DECL)
403 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
405 /* Remap sizes as necessary. */
406 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
407 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
409 /* If fields, do likewise for offset and qualifier. */
410 if (TREE_CODE (t) == FIELD_DECL)
412 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
413 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
414 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
417 return t;
420 if (id->do_not_unshare)
421 return *n;
422 else
423 return unshare_expr (*n);
426 static tree
427 remap_type_1 (tree type, copy_body_data *id)
429 tree new_tree, t;
431 /* We do need a copy. build and register it now. If this is a pointer or
432 reference type, remap the designated type and make a new pointer or
433 reference type. */
434 if (TREE_CODE (type) == POINTER_TYPE)
436 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
437 TYPE_MODE (type),
438 TYPE_REF_CAN_ALIAS_ALL (type));
439 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
440 new_tree = build_type_attribute_qual_variant (new_tree,
441 TYPE_ATTRIBUTES (type),
442 TYPE_QUALS (type));
443 insert_decl_map (id, type, new_tree);
444 return new_tree;
446 else if (TREE_CODE (type) == REFERENCE_TYPE)
448 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
449 TYPE_MODE (type),
450 TYPE_REF_CAN_ALIAS_ALL (type));
451 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
452 new_tree = build_type_attribute_qual_variant (new_tree,
453 TYPE_ATTRIBUTES (type),
454 TYPE_QUALS (type));
455 insert_decl_map (id, type, new_tree);
456 return new_tree;
458 else
459 new_tree = copy_node (type);
461 insert_decl_map (id, type, new_tree);
463 /* This is a new type, not a copy of an old type. Need to reassociate
464 variants. We can handle everything except the main variant lazily. */
465 t = TYPE_MAIN_VARIANT (type);
466 if (type != t)
468 t = remap_type (t, id);
469 TYPE_MAIN_VARIANT (new_tree) = t;
470 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
471 TYPE_NEXT_VARIANT (t) = new_tree;
473 else
475 TYPE_MAIN_VARIANT (new_tree) = new_tree;
476 TYPE_NEXT_VARIANT (new_tree) = NULL;
479 if (TYPE_STUB_DECL (type))
480 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
482 /* Lazily create pointer and reference types. */
483 TYPE_POINTER_TO (new_tree) = NULL;
484 TYPE_REFERENCE_TO (new_tree) = NULL;
486 /* Copy all types that may contain references to local variables; be sure to
487 preserve sharing in between type and its main variant when possible. */
488 switch (TREE_CODE (new_tree))
490 case INTEGER_TYPE:
491 case REAL_TYPE:
492 case FIXED_POINT_TYPE:
493 case ENUMERAL_TYPE:
494 case BOOLEAN_TYPE:
495 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
497 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
498 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
500 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
501 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
503 else
505 t = TYPE_MIN_VALUE (new_tree);
506 if (t && TREE_CODE (t) != INTEGER_CST)
507 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
509 t = TYPE_MAX_VALUE (new_tree);
510 if (t && TREE_CODE (t) != INTEGER_CST)
511 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
513 return new_tree;
515 case FUNCTION_TYPE:
516 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
517 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
518 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
519 else
520 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
521 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
522 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
523 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
524 else
525 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
526 return new_tree;
528 case ARRAY_TYPE:
529 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
530 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
531 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
532 else
533 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
535 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
537 gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
538 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
540 else
541 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
542 break;
544 case RECORD_TYPE:
545 case UNION_TYPE:
546 case QUAL_UNION_TYPE:
547 if (TYPE_MAIN_VARIANT (type) != type
548 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
549 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
550 else
552 tree f, nf = NULL;
554 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
556 t = remap_decl (f, id);
557 DECL_CONTEXT (t) = new_tree;
558 DECL_CHAIN (t) = nf;
559 nf = t;
561 TYPE_FIELDS (new_tree) = nreverse (nf);
563 break;
565 case OFFSET_TYPE:
566 default:
567 /* Shouldn't have been thought variable sized. */
568 gcc_unreachable ();
571 /* All variants of type share the same size, so use the already remaped data. */
572 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
574 gcc_checking_assert (TYPE_SIZE (type) == TYPE_SIZE (TYPE_MAIN_VARIANT (type)));
575 gcc_checking_assert (TYPE_SIZE_UNIT (type) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type)));
577 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
578 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
580 else
582 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
583 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
586 return new_tree;
589 tree
590 remap_type (tree type, copy_body_data *id)
592 tree *node;
593 tree tmp;
595 if (type == NULL)
596 return type;
598 /* See if we have remapped this type. */
599 node = id->decl_map->get (type);
600 if (node)
601 return *node;
603 /* The type only needs remapping if it's variably modified. */
604 if (! variably_modified_type_p (type, id->src_fn))
606 insert_decl_map (id, type, type);
607 return type;
610 id->remapping_type_depth++;
611 tmp = remap_type_1 (type, id);
612 id->remapping_type_depth--;
614 return tmp;
617 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
619 static bool
620 can_be_nonlocal (tree decl, copy_body_data *id)
622 /* We can not duplicate function decls. */
623 if (TREE_CODE (decl) == FUNCTION_DECL)
624 return true;
626 /* Local static vars must be non-local or we get multiple declaration
627 problems. */
628 if (TREE_CODE (decl) == VAR_DECL
629 && !auto_var_in_fn_p (decl, id->src_fn))
630 return true;
632 return false;
635 static tree
636 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
637 copy_body_data *id)
639 tree old_var;
640 tree new_decls = NULL_TREE;
642 /* Remap its variables. */
643 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
645 tree new_var;
647 if (can_be_nonlocal (old_var, id))
649 /* We need to add this variable to the local decls as otherwise
650 nothing else will do so. */
651 if (TREE_CODE (old_var) == VAR_DECL
652 && ! DECL_EXTERNAL (old_var)
653 && cfun)
654 add_local_decl (cfun, old_var);
655 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
656 && !DECL_IGNORED_P (old_var)
657 && nonlocalized_list)
658 vec_safe_push (*nonlocalized_list, old_var);
659 continue;
662 /* Remap the variable. */
663 new_var = remap_decl (old_var, id);
665 /* If we didn't remap this variable, we can't mess with its
666 TREE_CHAIN. If we remapped this variable to the return slot, it's
667 already declared somewhere else, so don't declare it here. */
669 if (new_var == id->retvar)
671 else if (!new_var)
673 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
674 && !DECL_IGNORED_P (old_var)
675 && nonlocalized_list)
676 vec_safe_push (*nonlocalized_list, old_var);
678 else
680 gcc_assert (DECL_P (new_var));
681 DECL_CHAIN (new_var) = new_decls;
682 new_decls = new_var;
684 /* Also copy value-expressions. */
685 if (TREE_CODE (new_var) == VAR_DECL
686 && DECL_HAS_VALUE_EXPR_P (new_var))
688 tree tem = DECL_VALUE_EXPR (new_var);
689 bool old_regimplify = id->regimplify;
690 id->remapping_type_depth++;
691 walk_tree (&tem, copy_tree_body_r, id, NULL);
692 id->remapping_type_depth--;
693 id->regimplify = old_regimplify;
694 SET_DECL_VALUE_EXPR (new_var, tem);
699 return nreverse (new_decls);
702 /* Copy the BLOCK to contain remapped versions of the variables
703 therein. And hook the new block into the block-tree. */
705 static void
706 remap_block (tree *block, copy_body_data *id)
708 tree old_block;
709 tree new_block;
711 /* Make the new block. */
712 old_block = *block;
713 new_block = make_node (BLOCK);
714 TREE_USED (new_block) = TREE_USED (old_block);
715 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
716 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
717 BLOCK_NONLOCALIZED_VARS (new_block)
718 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
719 *block = new_block;
721 /* Remap its variables. */
722 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
723 &BLOCK_NONLOCALIZED_VARS (new_block),
724 id);
726 if (id->transform_lang_insert_block)
727 id->transform_lang_insert_block (new_block);
729 /* Remember the remapped block. */
730 insert_decl_map (id, old_block, new_block);
733 /* Copy the whole block tree and root it in id->block. */
734 static tree
735 remap_blocks (tree block, copy_body_data *id)
737 tree t;
738 tree new_tree = block;
740 if (!block)
741 return NULL;
743 remap_block (&new_tree, id);
744 gcc_assert (new_tree != block);
745 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
746 prepend_lexical_block (new_tree, remap_blocks (t, id));
747 /* Blocks are in arbitrary order, but make things slightly prettier and do
748 not swap order when producing a copy. */
749 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
750 return new_tree;
753 /* Remap the block tree rooted at BLOCK to nothing. */
754 static void
755 remap_blocks_to_null (tree block, copy_body_data *id)
757 tree t;
758 insert_decl_map (id, block, NULL_TREE);
759 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
760 remap_blocks_to_null (t, id);
763 static void
764 copy_statement_list (tree *tp)
766 tree_stmt_iterator oi, ni;
767 tree new_tree;
769 new_tree = alloc_stmt_list ();
770 ni = tsi_start (new_tree);
771 oi = tsi_start (*tp);
772 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
773 *tp = new_tree;
775 for (; !tsi_end_p (oi); tsi_next (&oi))
777 tree stmt = tsi_stmt (oi);
778 if (TREE_CODE (stmt) == STATEMENT_LIST)
779 /* This copy is not redundant; tsi_link_after will smash this
780 STATEMENT_LIST into the end of the one we're building, and we
781 don't want to do that with the original. */
782 copy_statement_list (&stmt);
783 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
787 static void
788 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
790 tree block = BIND_EXPR_BLOCK (*tp);
791 /* Copy (and replace) the statement. */
792 copy_tree_r (tp, walk_subtrees, NULL);
793 if (block)
795 remap_block (&block, id);
796 BIND_EXPR_BLOCK (*tp) = block;
799 if (BIND_EXPR_VARS (*tp))
800 /* This will remap a lot of the same decls again, but this should be
801 harmless. */
802 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
806 /* Create a new gimple_seq by remapping all the statements in BODY
807 using the inlining information in ID. */
809 static gimple_seq
810 remap_gimple_seq (gimple_seq body, copy_body_data *id)
812 gimple_stmt_iterator si;
813 gimple_seq new_body = NULL;
815 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
817 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
818 gimple_seq_add_seq (&new_body, new_stmts);
821 return new_body;
825 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
826 block using the mapping information in ID. */
828 static gimple
829 copy_gimple_bind (gbind *stmt, copy_body_data *id)
831 gimple new_bind;
832 tree new_block, new_vars;
833 gimple_seq body, new_body;
835 /* Copy the statement. Note that we purposely don't use copy_stmt
836 here because we need to remap statements as we copy. */
837 body = gimple_bind_body (stmt);
838 new_body = remap_gimple_seq (body, id);
840 new_block = gimple_bind_block (stmt);
841 if (new_block)
842 remap_block (&new_block, id);
844 /* This will remap a lot of the same decls again, but this should be
845 harmless. */
846 new_vars = gimple_bind_vars (stmt);
847 if (new_vars)
848 new_vars = remap_decls (new_vars, NULL, id);
850 new_bind = gimple_build_bind (new_vars, new_body, new_block);
852 return new_bind;
855 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
857 static bool
858 is_parm (tree decl)
860 if (TREE_CODE (decl) == SSA_NAME)
862 decl = SSA_NAME_VAR (decl);
863 if (!decl)
864 return false;
867 return (TREE_CODE (decl) == PARM_DECL);
870 /* Remap the dependence CLIQUE from the source to the destination function
871 as specified in ID. */
873 static unsigned short
874 remap_dependence_clique (copy_body_data *id, unsigned short clique)
876 if (clique == 0 || processing_debug_stmt)
877 return 0;
878 if (!id->dependence_map)
879 id->dependence_map
880 = new hash_map<unsigned short, unsigned short, dependence_hasher>;
881 bool existed;
882 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
883 if (!existed)
884 newc = ++cfun->last_clique;
885 return newc;
888 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
889 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
890 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
891 recursing into the children nodes of *TP. */
893 static tree
894 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
896 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
897 copy_body_data *id = (copy_body_data *) wi_p->info;
898 tree fn = id->src_fn;
900 if (TREE_CODE (*tp) == SSA_NAME)
902 *tp = remap_ssa_name (*tp, id);
903 *walk_subtrees = 0;
904 return NULL;
906 else if (auto_var_in_fn_p (*tp, fn))
908 /* Local variables and labels need to be replaced by equivalent
909 variables. We don't want to copy static variables; there's
910 only one of those, no matter how many times we inline the
911 containing function. Similarly for globals from an outer
912 function. */
913 tree new_decl;
915 /* Remap the declaration. */
916 new_decl = remap_decl (*tp, id);
917 gcc_assert (new_decl);
918 /* Replace this variable with the copy. */
919 STRIP_TYPE_NOPS (new_decl);
920 /* ??? The C++ frontend uses void * pointer zero to initialize
921 any other type. This confuses the middle-end type verification.
922 As cloned bodies do not go through gimplification again the fixup
923 there doesn't trigger. */
924 if (TREE_CODE (new_decl) == INTEGER_CST
925 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
926 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
927 *tp = new_decl;
928 *walk_subtrees = 0;
930 else if (TREE_CODE (*tp) == STATEMENT_LIST)
931 gcc_unreachable ();
932 else if (TREE_CODE (*tp) == SAVE_EXPR)
933 gcc_unreachable ();
934 else if (TREE_CODE (*tp) == LABEL_DECL
935 && (!DECL_CONTEXT (*tp)
936 || decl_function_context (*tp) == id->src_fn))
937 /* These may need to be remapped for EH handling. */
938 *tp = remap_decl (*tp, id);
939 else if (TREE_CODE (*tp) == FIELD_DECL)
941 /* If the enclosing record type is variably_modified_type_p, the field
942 has already been remapped. Otherwise, it need not be. */
943 tree *n = id->decl_map->get (*tp);
944 if (n)
945 *tp = *n;
946 *walk_subtrees = 0;
948 else if (TYPE_P (*tp))
949 /* Types may need remapping as well. */
950 *tp = remap_type (*tp, id);
951 else if (CONSTANT_CLASS_P (*tp))
953 /* If this is a constant, we have to copy the node iff the type
954 will be remapped. copy_tree_r will not copy a constant. */
955 tree new_type = remap_type (TREE_TYPE (*tp), id);
957 if (new_type == TREE_TYPE (*tp))
958 *walk_subtrees = 0;
960 else if (TREE_CODE (*tp) == INTEGER_CST)
961 *tp = wide_int_to_tree (new_type, *tp);
962 else
964 *tp = copy_node (*tp);
965 TREE_TYPE (*tp) = new_type;
968 else
970 /* Otherwise, just copy the node. Note that copy_tree_r already
971 knows not to copy VAR_DECLs, etc., so this is safe. */
973 if (TREE_CODE (*tp) == MEM_REF)
975 /* We need to re-canonicalize MEM_REFs from inline substitutions
976 that can happen when a pointer argument is an ADDR_EXPR.
977 Recurse here manually to allow that. */
978 tree ptr = TREE_OPERAND (*tp, 0);
979 tree type = remap_type (TREE_TYPE (*tp), id);
980 tree old = *tp;
981 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
982 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
983 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
984 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
985 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
986 if (MR_DEPENDENCE_CLIQUE (old) != 0)
988 MR_DEPENDENCE_CLIQUE (*tp)
989 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
990 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
992 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
993 remapped a parameter as the property might be valid only
994 for the parameter itself. */
995 if (TREE_THIS_NOTRAP (old)
996 && (!is_parm (TREE_OPERAND (old, 0))
997 || (!id->transform_parameter && is_parm (ptr))))
998 TREE_THIS_NOTRAP (*tp) = 1;
999 *walk_subtrees = 0;
1000 return NULL;
1003 /* Here is the "usual case". Copy this tree node, and then
1004 tweak some special cases. */
1005 copy_tree_r (tp, walk_subtrees, NULL);
1007 if (TREE_CODE (*tp) != OMP_CLAUSE)
1008 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1010 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1012 /* The copied TARGET_EXPR has never been expanded, even if the
1013 original node was expanded already. */
1014 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1015 TREE_OPERAND (*tp, 3) = NULL_TREE;
1017 else if (TREE_CODE (*tp) == ADDR_EXPR)
1019 /* Variable substitution need not be simple. In particular,
1020 the MEM_REF substitution above. Make sure that
1021 TREE_CONSTANT and friends are up-to-date. */
1022 int invariant = is_gimple_min_invariant (*tp);
1023 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1024 recompute_tree_invariant_for_addr_expr (*tp);
1026 /* If this used to be invariant, but is not any longer,
1027 then regimplification is probably needed. */
1028 if (invariant && !is_gimple_min_invariant (*tp))
1029 id->regimplify = true;
1031 *walk_subtrees = 0;
1035 /* Update the TREE_BLOCK for the cloned expr. */
1036 if (EXPR_P (*tp))
1038 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1039 tree old_block = TREE_BLOCK (*tp);
1040 if (old_block)
1042 tree *n;
1043 n = id->decl_map->get (TREE_BLOCK (*tp));
1044 if (n)
1045 new_block = *n;
1047 TREE_SET_BLOCK (*tp, new_block);
1050 /* Keep iterating. */
1051 return NULL_TREE;
1055 /* Called from copy_body_id via walk_tree. DATA is really a
1056 `copy_body_data *'. */
1058 tree
1059 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1061 copy_body_data *id = (copy_body_data *) data;
1062 tree fn = id->src_fn;
1063 tree new_block;
1065 /* Begin by recognizing trees that we'll completely rewrite for the
1066 inlining context. Our output for these trees is completely
1067 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1068 into an edge). Further down, we'll handle trees that get
1069 duplicated and/or tweaked. */
1071 /* When requested, RETURN_EXPRs should be transformed to just the
1072 contained MODIFY_EXPR. The branch semantics of the return will
1073 be handled elsewhere by manipulating the CFG rather than a statement. */
1074 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1076 tree assignment = TREE_OPERAND (*tp, 0);
1078 /* If we're returning something, just turn that into an
1079 assignment into the equivalent of the original RESULT_DECL.
1080 If the "assignment" is just the result decl, the result
1081 decl has already been set (e.g. a recent "foo (&result_decl,
1082 ...)"); just toss the entire RETURN_EXPR. */
1083 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1085 /* Replace the RETURN_EXPR with (a copy of) the
1086 MODIFY_EXPR hanging underneath. */
1087 *tp = copy_node (assignment);
1089 else /* Else the RETURN_EXPR returns no value. */
1091 *tp = NULL;
1092 return (tree) (void *)1;
1095 else if (TREE_CODE (*tp) == SSA_NAME)
1097 *tp = remap_ssa_name (*tp, id);
1098 *walk_subtrees = 0;
1099 return NULL;
1102 /* Local variables and labels need to be replaced by equivalent
1103 variables. We don't want to copy static variables; there's only
1104 one of those, no matter how many times we inline the containing
1105 function. Similarly for globals from an outer function. */
1106 else if (auto_var_in_fn_p (*tp, fn))
1108 tree new_decl;
1110 /* Remap the declaration. */
1111 new_decl = remap_decl (*tp, id);
1112 gcc_assert (new_decl);
1113 /* Replace this variable with the copy. */
1114 STRIP_TYPE_NOPS (new_decl);
1115 *tp = new_decl;
1116 *walk_subtrees = 0;
1118 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1119 copy_statement_list (tp);
1120 else if (TREE_CODE (*tp) == SAVE_EXPR
1121 || TREE_CODE (*tp) == TARGET_EXPR)
1122 remap_save_expr (tp, id->decl_map, walk_subtrees);
1123 else if (TREE_CODE (*tp) == LABEL_DECL
1124 && (! DECL_CONTEXT (*tp)
1125 || decl_function_context (*tp) == id->src_fn))
1126 /* These may need to be remapped for EH handling. */
1127 *tp = remap_decl (*tp, id);
1128 else if (TREE_CODE (*tp) == BIND_EXPR)
1129 copy_bind_expr (tp, walk_subtrees, id);
1130 /* Types may need remapping as well. */
1131 else if (TYPE_P (*tp))
1132 *tp = remap_type (*tp, id);
1134 /* If this is a constant, we have to copy the node iff the type will be
1135 remapped. copy_tree_r will not copy a constant. */
1136 else if (CONSTANT_CLASS_P (*tp))
1138 tree new_type = remap_type (TREE_TYPE (*tp), id);
1140 if (new_type == TREE_TYPE (*tp))
1141 *walk_subtrees = 0;
1143 else if (TREE_CODE (*tp) == INTEGER_CST)
1144 *tp = wide_int_to_tree (new_type, *tp);
1145 else
1147 *tp = copy_node (*tp);
1148 TREE_TYPE (*tp) = new_type;
1152 /* Otherwise, just copy the node. Note that copy_tree_r already
1153 knows not to copy VAR_DECLs, etc., so this is safe. */
1154 else
1156 /* Here we handle trees that are not completely rewritten.
1157 First we detect some inlining-induced bogosities for
1158 discarding. */
1159 if (TREE_CODE (*tp) == MODIFY_EXPR
1160 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1161 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1163 /* Some assignments VAR = VAR; don't generate any rtl code
1164 and thus don't count as variable modification. Avoid
1165 keeping bogosities like 0 = 0. */
1166 tree decl = TREE_OPERAND (*tp, 0), value;
1167 tree *n;
1169 n = id->decl_map->get (decl);
1170 if (n)
1172 value = *n;
1173 STRIP_TYPE_NOPS (value);
1174 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1176 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1177 return copy_tree_body_r (tp, walk_subtrees, data);
1181 else if (TREE_CODE (*tp) == INDIRECT_REF)
1183 /* Get rid of *& from inline substitutions that can happen when a
1184 pointer argument is an ADDR_EXPR. */
1185 tree decl = TREE_OPERAND (*tp, 0);
1186 tree *n = id->decl_map->get (decl);
1187 if (n)
1189 /* If we happen to get an ADDR_EXPR in n->value, strip
1190 it manually here as we'll eventually get ADDR_EXPRs
1191 which lie about their types pointed to. In this case
1192 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1193 but we absolutely rely on that. As fold_indirect_ref
1194 does other useful transformations, try that first, though. */
1195 tree type = TREE_TYPE (*tp);
1196 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1197 tree old = *tp;
1198 *tp = gimple_fold_indirect_ref (ptr);
1199 if (! *tp)
1201 if (TREE_CODE (ptr) == ADDR_EXPR)
1204 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1205 /* ??? We should either assert here or build
1206 a VIEW_CONVERT_EXPR instead of blindly leaking
1207 incompatible types to our IL. */
1208 if (! *tp)
1209 *tp = TREE_OPERAND (ptr, 0);
1211 else
1213 *tp = build1 (INDIRECT_REF, type, ptr);
1214 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1215 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1216 TREE_READONLY (*tp) = TREE_READONLY (old);
1217 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1218 have remapped a parameter as the property might be
1219 valid only for the parameter itself. */
1220 if (TREE_THIS_NOTRAP (old)
1221 && (!is_parm (TREE_OPERAND (old, 0))
1222 || (!id->transform_parameter && is_parm (ptr))))
1223 TREE_THIS_NOTRAP (*tp) = 1;
1226 *walk_subtrees = 0;
1227 return NULL;
1230 else if (TREE_CODE (*tp) == MEM_REF)
1232 /* We need to re-canonicalize MEM_REFs from inline substitutions
1233 that can happen when a pointer argument is an ADDR_EXPR.
1234 Recurse here manually to allow that. */
1235 tree ptr = TREE_OPERAND (*tp, 0);
1236 tree type = remap_type (TREE_TYPE (*tp), id);
1237 tree old = *tp;
1238 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1239 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1240 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1241 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1242 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1243 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1245 MR_DEPENDENCE_CLIQUE (*tp)
1246 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1247 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1249 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1250 remapped a parameter as the property might be valid only
1251 for the parameter itself. */
1252 if (TREE_THIS_NOTRAP (old)
1253 && (!is_parm (TREE_OPERAND (old, 0))
1254 || (!id->transform_parameter && is_parm (ptr))))
1255 TREE_THIS_NOTRAP (*tp) = 1;
1256 *walk_subtrees = 0;
1257 return NULL;
1260 /* Here is the "usual case". Copy this tree node, and then
1261 tweak some special cases. */
1262 copy_tree_r (tp, walk_subtrees, NULL);
1264 /* If EXPR has block defined, map it to newly constructed block.
1265 When inlining we want EXPRs without block appear in the block
1266 of function call if we are not remapping a type. */
1267 if (EXPR_P (*tp))
1269 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1270 if (TREE_BLOCK (*tp))
1272 tree *n;
1273 n = id->decl_map->get (TREE_BLOCK (*tp));
1274 if (n)
1275 new_block = *n;
1277 TREE_SET_BLOCK (*tp, new_block);
1280 if (TREE_CODE (*tp) != OMP_CLAUSE)
1281 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1283 /* The copied TARGET_EXPR has never been expanded, even if the
1284 original node was expanded already. */
1285 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1287 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1288 TREE_OPERAND (*tp, 3) = NULL_TREE;
1291 /* Variable substitution need not be simple. In particular, the
1292 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1293 and friends are up-to-date. */
1294 else if (TREE_CODE (*tp) == ADDR_EXPR)
1296 int invariant = is_gimple_min_invariant (*tp);
1297 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1299 /* Handle the case where we substituted an INDIRECT_REF
1300 into the operand of the ADDR_EXPR. */
1301 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1302 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1303 else
1304 recompute_tree_invariant_for_addr_expr (*tp);
1306 /* If this used to be invariant, but is not any longer,
1307 then regimplification is probably needed. */
1308 if (invariant && !is_gimple_min_invariant (*tp))
1309 id->regimplify = true;
1311 *walk_subtrees = 0;
1315 /* Keep iterating. */
1316 return NULL_TREE;
1319 /* Helper for remap_gimple_stmt. Given an EH region number for the
1320 source function, map that to the duplicate EH region number in
1321 the destination function. */
1323 static int
1324 remap_eh_region_nr (int old_nr, copy_body_data *id)
1326 eh_region old_r, new_r;
1328 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1329 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1331 return new_r->index;
1334 /* Similar, but operate on INTEGER_CSTs. */
1336 static tree
1337 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1339 int old_nr, new_nr;
1341 old_nr = tree_to_shwi (old_t_nr);
1342 new_nr = remap_eh_region_nr (old_nr, id);
1344 return build_int_cst (integer_type_node, new_nr);
1347 /* Helper for copy_bb. Remap statement STMT using the inlining
1348 information in ID. Return the new statement copy. */
1350 static gimple_seq
1351 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1353 gimple copy = NULL;
1354 struct walk_stmt_info wi;
1355 bool skip_first = false;
1356 gimple_seq stmts = NULL;
1358 if (is_gimple_debug (stmt)
1359 && !opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
1360 return stmts;
1362 /* Begin by recognizing trees that we'll completely rewrite for the
1363 inlining context. Our output for these trees is completely
1364 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1365 into an edge). Further down, we'll handle trees that get
1366 duplicated and/or tweaked. */
1368 /* When requested, GIMPLE_RETURNs should be transformed to just the
1369 contained GIMPLE_ASSIGN. The branch semantics of the return will
1370 be handled elsewhere by manipulating the CFG rather than the
1371 statement. */
1372 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1374 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1375 tree retbnd = gimple_return_retbnd (stmt);
1376 tree bndslot = id->retbnd;
1378 if (retbnd && bndslot)
1380 gimple bndcopy = gimple_build_assign (bndslot, retbnd);
1381 memset (&wi, 0, sizeof (wi));
1382 wi.info = id;
1383 walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1384 gimple_seq_add_stmt (&stmts, bndcopy);
1387 /* If we're returning something, just turn that into an
1388 assignment into the equivalent of the original RESULT_DECL.
1389 If RETVAL is just the result decl, the result decl has
1390 already been set (e.g. a recent "foo (&result_decl, ...)");
1391 just toss the entire GIMPLE_RETURN. */
1392 if (retval
1393 && (TREE_CODE (retval) != RESULT_DECL
1394 && (TREE_CODE (retval) != SSA_NAME
1395 || ! SSA_NAME_VAR (retval)
1396 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1398 copy = gimple_build_assign (id->do_not_unshare
1399 ? id->retvar : unshare_expr (id->retvar),
1400 retval);
1401 /* id->retvar is already substituted. Skip it on later remapping. */
1402 skip_first = true;
1404 /* We need to copy bounds if return structure with pointers into
1405 instrumented function. */
1406 if (chkp_function_instrumented_p (id->dst_fn)
1407 && !bndslot
1408 && !BOUNDED_P (id->retvar)
1409 && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
1410 id->assign_stmts.safe_push (copy);
1413 else
1414 return stmts;
1416 else if (gimple_has_substatements (stmt))
1418 gimple_seq s1, s2;
1420 /* When cloning bodies from the C++ front end, we will be handed bodies
1421 in High GIMPLE form. Handle here all the High GIMPLE statements that
1422 have embedded statements. */
1423 switch (gimple_code (stmt))
1425 case GIMPLE_BIND:
1426 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1427 break;
1429 case GIMPLE_CATCH:
1431 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1432 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1433 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1435 break;
1437 case GIMPLE_EH_FILTER:
1438 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1439 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1440 break;
1442 case GIMPLE_TRY:
1443 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1444 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1445 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1446 break;
1448 case GIMPLE_WITH_CLEANUP_EXPR:
1449 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1450 copy = gimple_build_wce (s1);
1451 break;
1453 case GIMPLE_OMP_PARALLEL:
1455 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1456 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1457 copy = gimple_build_omp_parallel
1458 (s1,
1459 gimple_omp_parallel_clauses (omp_par_stmt),
1460 gimple_omp_parallel_child_fn (omp_par_stmt),
1461 gimple_omp_parallel_data_arg (omp_par_stmt));
1463 break;
1465 case GIMPLE_OMP_TASK:
1466 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1467 copy = gimple_build_omp_task
1468 (s1,
1469 gimple_omp_task_clauses (stmt),
1470 gimple_omp_task_child_fn (stmt),
1471 gimple_omp_task_data_arg (stmt),
1472 gimple_omp_task_copy_fn (stmt),
1473 gimple_omp_task_arg_size (stmt),
1474 gimple_omp_task_arg_align (stmt));
1475 break;
1477 case GIMPLE_OMP_FOR:
1478 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1479 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1480 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1481 gimple_omp_for_clauses (stmt),
1482 gimple_omp_for_collapse (stmt), s2);
1484 size_t i;
1485 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1487 gimple_omp_for_set_index (copy, i,
1488 gimple_omp_for_index (stmt, i));
1489 gimple_omp_for_set_initial (copy, i,
1490 gimple_omp_for_initial (stmt, i));
1491 gimple_omp_for_set_final (copy, i,
1492 gimple_omp_for_final (stmt, i));
1493 gimple_omp_for_set_incr (copy, i,
1494 gimple_omp_for_incr (stmt, i));
1495 gimple_omp_for_set_cond (copy, i,
1496 gimple_omp_for_cond (stmt, i));
1499 break;
1501 case GIMPLE_OMP_MASTER:
1502 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1503 copy = gimple_build_omp_master (s1);
1504 break;
1506 case GIMPLE_OMP_TASKGROUP:
1507 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1508 copy = gimple_build_omp_taskgroup (s1);
1509 break;
1511 case GIMPLE_OMP_ORDERED:
1512 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1513 copy = gimple_build_omp_ordered (s1);
1514 break;
1516 case GIMPLE_OMP_SECTION:
1517 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1518 copy = gimple_build_omp_section (s1);
1519 break;
1521 case GIMPLE_OMP_SECTIONS:
1522 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1523 copy = gimple_build_omp_sections
1524 (s1, gimple_omp_sections_clauses (stmt));
1525 break;
1527 case GIMPLE_OMP_SINGLE:
1528 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1529 copy = gimple_build_omp_single
1530 (s1, gimple_omp_single_clauses (stmt));
1531 break;
1533 case GIMPLE_OMP_TARGET:
1534 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1535 copy = gimple_build_omp_target
1536 (s1, gimple_omp_target_kind (stmt),
1537 gimple_omp_target_clauses (stmt));
1538 break;
1540 case GIMPLE_OMP_TEAMS:
1541 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1542 copy = gimple_build_omp_teams
1543 (s1, gimple_omp_teams_clauses (stmt));
1544 break;
1546 case GIMPLE_OMP_CRITICAL:
1547 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1548 copy = gimple_build_omp_critical (s1,
1549 gimple_omp_critical_name (
1550 as_a <gomp_critical *> (stmt)));
1551 break;
1553 case GIMPLE_TRANSACTION:
1555 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1556 gtransaction *new_trans_stmt;
1557 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1558 id);
1559 copy = new_trans_stmt
1560 = gimple_build_transaction (
1562 gimple_transaction_label (old_trans_stmt));
1563 gimple_transaction_set_subcode (
1564 new_trans_stmt,
1565 gimple_transaction_subcode (old_trans_stmt));
1567 break;
1569 default:
1570 gcc_unreachable ();
1573 else
1575 if (gimple_assign_copy_p (stmt)
1576 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1577 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1579 /* Here we handle statements that are not completely rewritten.
1580 First we detect some inlining-induced bogosities for
1581 discarding. */
1583 /* Some assignments VAR = VAR; don't generate any rtl code
1584 and thus don't count as variable modification. Avoid
1585 keeping bogosities like 0 = 0. */
1586 tree decl = gimple_assign_lhs (stmt), value;
1587 tree *n;
1589 n = id->decl_map->get (decl);
1590 if (n)
1592 value = *n;
1593 STRIP_TYPE_NOPS (value);
1594 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1595 return NULL;
1599 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1600 in a block that we aren't copying during tree_function_versioning,
1601 just drop the clobber stmt. */
1602 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1604 tree lhs = gimple_assign_lhs (stmt);
1605 if (TREE_CODE (lhs) == MEM_REF
1606 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1608 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1609 if (gimple_bb (def_stmt)
1610 && !bitmap_bit_p (id->blocks_to_copy,
1611 gimple_bb (def_stmt)->index))
1612 return NULL;
1616 if (gimple_debug_bind_p (stmt))
1618 gdebug *copy
1619 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1620 gimple_debug_bind_get_value (stmt),
1621 stmt);
1622 id->debug_stmts.safe_push (copy);
1623 gimple_seq_add_stmt (&stmts, copy);
1624 return stmts;
1626 if (gimple_debug_source_bind_p (stmt))
1628 gdebug *copy = gimple_build_debug_source_bind
1629 (gimple_debug_source_bind_get_var (stmt),
1630 gimple_debug_source_bind_get_value (stmt),
1631 stmt);
1632 id->debug_stmts.safe_push (copy);
1633 gimple_seq_add_stmt (&stmts, copy);
1634 return stmts;
1637 /* Create a new deep copy of the statement. */
1638 copy = gimple_copy (stmt);
1640 /* Clear flags that need revisiting. */
1641 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1643 if (gimple_call_tail_p (call_stmt))
1644 gimple_call_set_tail (call_stmt, false);
1645 if (gimple_call_from_thunk_p (call_stmt))
1646 gimple_call_set_from_thunk (call_stmt, false);
1649 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1650 RESX and EH_DISPATCH. */
1651 if (id->eh_map)
1652 switch (gimple_code (copy))
1654 case GIMPLE_CALL:
1656 tree r, fndecl = gimple_call_fndecl (copy);
1657 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1658 switch (DECL_FUNCTION_CODE (fndecl))
1660 case BUILT_IN_EH_COPY_VALUES:
1661 r = gimple_call_arg (copy, 1);
1662 r = remap_eh_region_tree_nr (r, id);
1663 gimple_call_set_arg (copy, 1, r);
1664 /* FALLTHRU */
1666 case BUILT_IN_EH_POINTER:
1667 case BUILT_IN_EH_FILTER:
1668 r = gimple_call_arg (copy, 0);
1669 r = remap_eh_region_tree_nr (r, id);
1670 gimple_call_set_arg (copy, 0, r);
1671 break;
1673 default:
1674 break;
1677 /* Reset alias info if we didn't apply measures to
1678 keep it valid over inlining by setting DECL_PT_UID. */
1679 if (!id->src_cfun->gimple_df
1680 || !id->src_cfun->gimple_df->ipa_pta)
1681 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1683 break;
1685 case GIMPLE_RESX:
1687 gresx *resx_stmt = as_a <gresx *> (copy);
1688 int r = gimple_resx_region (resx_stmt);
1689 r = remap_eh_region_nr (r, id);
1690 gimple_resx_set_region (resx_stmt, r);
1692 break;
1694 case GIMPLE_EH_DISPATCH:
1696 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1697 int r = gimple_eh_dispatch_region (eh_dispatch);
1698 r = remap_eh_region_nr (r, id);
1699 gimple_eh_dispatch_set_region (eh_dispatch, r);
1701 break;
1703 default:
1704 break;
1708 /* If STMT has a block defined, map it to the newly constructed
1709 block. */
1710 if (gimple_block (copy))
1712 tree *n;
1713 n = id->decl_map->get (gimple_block (copy));
1714 gcc_assert (n);
1715 gimple_set_block (copy, *n);
1718 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1720 gimple_seq_add_stmt (&stmts, copy);
1721 return stmts;
1724 /* Remap all the operands in COPY. */
1725 memset (&wi, 0, sizeof (wi));
1726 wi.info = id;
1727 if (skip_first)
1728 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1729 else
1730 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1732 /* Clear the copied virtual operands. We are not remapping them here
1733 but are going to recreate them from scratch. */
1734 if (gimple_has_mem_ops (copy))
1736 gimple_set_vdef (copy, NULL_TREE);
1737 gimple_set_vuse (copy, NULL_TREE);
1740 gimple_seq_add_stmt (&stmts, copy);
1741 return stmts;
1745 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1746 later */
1748 static basic_block
1749 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1750 gcov_type count_scale)
1752 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1753 basic_block copy_basic_block;
1754 tree decl;
1755 gcov_type freq;
1756 basic_block prev;
1758 /* Search for previous copied basic block. */
1759 prev = bb->prev_bb;
1760 while (!prev->aux)
1761 prev = prev->prev_bb;
1763 /* create_basic_block() will append every new block to
1764 basic_block_info automatically. */
1765 copy_basic_block = create_basic_block (NULL, (void *) 0,
1766 (basic_block) prev->aux);
1767 copy_basic_block->count = apply_scale (bb->count, count_scale);
1769 /* We are going to rebuild frequencies from scratch. These values
1770 have just small importance to drive canonicalize_loop_headers. */
1771 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1773 /* We recompute frequencies after inlining, so this is quite safe. */
1774 if (freq > BB_FREQ_MAX)
1775 freq = BB_FREQ_MAX;
1776 copy_basic_block->frequency = freq;
1778 copy_gsi = gsi_start_bb (copy_basic_block);
1780 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1782 gimple_seq stmts;
1783 gimple stmt = gsi_stmt (gsi);
1784 gimple orig_stmt = stmt;
1785 gimple_stmt_iterator stmts_gsi;
1786 bool stmt_added = false;
1788 id->regimplify = false;
1789 stmts = remap_gimple_stmt (stmt, id);
1791 if (gimple_seq_empty_p (stmts))
1792 continue;
1794 seq_gsi = copy_gsi;
1796 for (stmts_gsi = gsi_start (stmts);
1797 !gsi_end_p (stmts_gsi); )
1799 stmt = gsi_stmt (stmts_gsi);
1801 /* Advance iterator now before stmt is moved to seq_gsi. */
1802 gsi_next (&stmts_gsi);
1804 if (gimple_nop_p (stmt))
1805 continue;
1807 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1808 orig_stmt);
1810 /* With return slot optimization we can end up with
1811 non-gimple (foo *)&this->m, fix that here. */
1812 if (is_gimple_assign (stmt)
1813 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1814 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1816 tree new_rhs;
1817 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1818 gimple_assign_rhs1 (stmt),
1819 true, NULL, false,
1820 GSI_CONTINUE_LINKING);
1821 gimple_assign_set_rhs1 (stmt, new_rhs);
1822 id->regimplify = false;
1825 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1827 if (id->regimplify)
1828 gimple_regimplify_operands (stmt, &seq_gsi);
1830 stmt_added = true;
1833 if (!stmt_added)
1834 continue;
1836 /* If copy_basic_block has been empty at the start of this iteration,
1837 call gsi_start_bb again to get at the newly added statements. */
1838 if (gsi_end_p (copy_gsi))
1839 copy_gsi = gsi_start_bb (copy_basic_block);
1840 else
1841 gsi_next (&copy_gsi);
1843 /* Process the new statement. The call to gimple_regimplify_operands
1844 possibly turned the statement into multiple statements, we
1845 need to process all of them. */
1848 tree fn;
1849 gcall *call_stmt;
1851 stmt = gsi_stmt (copy_gsi);
1852 call_stmt = dyn_cast <gcall *> (stmt);
1853 if (call_stmt
1854 && gimple_call_va_arg_pack_p (call_stmt)
1855 && id->call_stmt)
1857 /* __builtin_va_arg_pack () should be replaced by
1858 all arguments corresponding to ... in the caller. */
1859 tree p;
1860 gcall *new_call;
1861 vec<tree> argarray;
1862 size_t nargs = gimple_call_num_args (id->call_stmt);
1863 size_t n, i, nargs_to_copy;
1864 bool remove_bounds = false;
1866 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1867 nargs--;
1869 /* Bounds should be removed from arg pack in case
1870 we handle not instrumented call in instrumented
1871 function. */
1872 nargs_to_copy = nargs;
1873 if (gimple_call_with_bounds_p (id->call_stmt)
1874 && !gimple_call_with_bounds_p (stmt))
1876 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1877 i < gimple_call_num_args (id->call_stmt);
1878 i++)
1879 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1880 nargs_to_copy--;
1881 remove_bounds = true;
1884 /* Create the new array of arguments. */
1885 n = nargs_to_copy + gimple_call_num_args (call_stmt);
1886 argarray.create (n);
1887 argarray.safe_grow_cleared (n);
1889 /* Copy all the arguments before '...' */
1890 memcpy (argarray.address (),
1891 gimple_call_arg_ptr (call_stmt, 0),
1892 gimple_call_num_args (call_stmt) * sizeof (tree));
1894 if (remove_bounds)
1896 /* Append the rest of arguments removing bounds. */
1897 unsigned cur = gimple_call_num_args (call_stmt);
1898 i = gimple_call_num_args (id->call_stmt) - nargs;
1899 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1900 i < gimple_call_num_args (id->call_stmt);
1901 i++)
1902 if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1903 argarray[cur++] = gimple_call_arg (id->call_stmt, i);
1904 gcc_assert (cur == n);
1906 else
1908 /* Append the arguments passed in '...' */
1909 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1910 gimple_call_arg_ptr (id->call_stmt, 0)
1911 + (gimple_call_num_args (id->call_stmt) - nargs),
1912 nargs * sizeof (tree));
1915 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1916 argarray);
1918 argarray.release ();
1920 /* Copy all GIMPLE_CALL flags, location and block, except
1921 GF_CALL_VA_ARG_PACK. */
1922 gimple_call_copy_flags (new_call, call_stmt);
1923 gimple_call_set_va_arg_pack (new_call, false);
1924 gimple_set_location (new_call, gimple_location (stmt));
1925 gimple_set_block (new_call, gimple_block (stmt));
1926 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1928 gsi_replace (&copy_gsi, new_call, false);
1929 stmt = new_call;
1931 else if (call_stmt
1932 && id->call_stmt
1933 && (decl = gimple_call_fndecl (stmt))
1934 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1935 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1937 /* __builtin_va_arg_pack_len () should be replaced by
1938 the number of anonymous arguments. */
1939 size_t nargs = gimple_call_num_args (id->call_stmt), i;
1940 tree count, p;
1941 gimple new_stmt;
1943 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1944 nargs--;
1946 /* For instrumented calls we should ignore bounds. */
1947 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1948 i < gimple_call_num_args (id->call_stmt);
1949 i++)
1950 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1951 nargs--;
1953 count = build_int_cst (integer_type_node, nargs);
1954 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1955 gsi_replace (&copy_gsi, new_stmt, false);
1956 stmt = new_stmt;
1958 else if (call_stmt
1959 && id->call_stmt
1960 && gimple_call_internal_p (stmt)
1961 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1963 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
1964 gsi_remove (&copy_gsi, false);
1965 continue;
1968 /* Statements produced by inlining can be unfolded, especially
1969 when we constant propagated some operands. We can't fold
1970 them right now for two reasons:
1971 1) folding require SSA_NAME_DEF_STMTs to be correct
1972 2) we can't change function calls to builtins.
1973 So we just mark statement for later folding. We mark
1974 all new statements, instead just statements that has changed
1975 by some nontrivial substitution so even statements made
1976 foldable indirectly are updated. If this turns out to be
1977 expensive, copy_body can be told to watch for nontrivial
1978 changes. */
1979 if (id->statements_to_fold)
1980 id->statements_to_fold->add (stmt);
1982 /* We're duplicating a CALL_EXPR. Find any corresponding
1983 callgraph edges and update or duplicate them. */
1984 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
1986 struct cgraph_edge *edge;
1988 switch (id->transform_call_graph_edges)
1990 case CB_CGE_DUPLICATE:
1991 edge = id->src_node->get_edge (orig_stmt);
1992 if (edge)
1994 int edge_freq = edge->frequency;
1995 int new_freq;
1996 struct cgraph_edge *old_edge = edge;
1997 edge = edge->clone (id->dst_node, call_stmt,
1998 gimple_uid (stmt),
1999 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
2000 true);
2001 /* We could also just rescale the frequency, but
2002 doing so would introduce roundoff errors and make
2003 verifier unhappy. */
2004 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
2005 copy_basic_block);
2007 /* Speculative calls consist of two edges - direct and indirect.
2008 Duplicate the whole thing and distribute frequencies accordingly. */
2009 if (edge->speculative)
2011 struct cgraph_edge *direct, *indirect;
2012 struct ipa_ref *ref;
2014 gcc_assert (!edge->indirect_unknown_callee);
2015 old_edge->speculative_call_info (direct, indirect, ref);
2016 indirect = indirect->clone (id->dst_node, call_stmt,
2017 gimple_uid (stmt),
2018 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
2019 true);
2020 if (old_edge->frequency + indirect->frequency)
2022 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
2023 (old_edge->frequency + indirect->frequency)),
2024 CGRAPH_FREQ_MAX);
2025 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
2026 (old_edge->frequency + indirect->frequency)),
2027 CGRAPH_FREQ_MAX);
2029 id->dst_node->clone_reference (ref, stmt);
2031 else
2033 edge->frequency = new_freq;
2034 if (dump_file
2035 && profile_status_for_fn (cfun) != PROFILE_ABSENT
2036 && (edge_freq > edge->frequency + 10
2037 || edge_freq < edge->frequency - 10))
2039 fprintf (dump_file, "Edge frequency estimated by "
2040 "cgraph %i diverge from inliner's estimate %i\n",
2041 edge_freq,
2042 edge->frequency);
2043 fprintf (dump_file,
2044 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
2045 bb->index,
2046 bb->frequency,
2047 copy_basic_block->frequency);
2051 break;
2053 case CB_CGE_MOVE_CLONES:
2054 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2055 call_stmt);
2056 edge = id->dst_node->get_edge (stmt);
2057 break;
2059 case CB_CGE_MOVE:
2060 edge = id->dst_node->get_edge (orig_stmt);
2061 if (edge)
2062 edge->set_call_stmt (call_stmt);
2063 break;
2065 default:
2066 gcc_unreachable ();
2069 /* Constant propagation on argument done during inlining
2070 may create new direct call. Produce an edge for it. */
2071 if ((!edge
2072 || (edge->indirect_inlining_edge
2073 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2074 && id->dst_node->definition
2075 && (fn = gimple_call_fndecl (stmt)) != NULL)
2077 struct cgraph_node *dest = cgraph_node::get (fn);
2079 /* We have missing edge in the callgraph. This can happen
2080 when previous inlining turned an indirect call into a
2081 direct call by constant propagating arguments or we are
2082 producing dead clone (for further cloning). In all
2083 other cases we hit a bug (incorrect node sharing is the
2084 most common reason for missing edges). */
2085 gcc_assert (!dest->definition
2086 || dest->address_taken
2087 || !id->src_node->definition
2088 || !id->dst_node->definition);
2089 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2090 id->dst_node->create_edge_including_clones
2091 (dest, orig_stmt, call_stmt, bb->count,
2092 compute_call_stmt_bb_frequency (id->dst_node->decl,
2093 copy_basic_block),
2094 CIF_ORIGINALLY_INDIRECT_CALL);
2095 else
2096 id->dst_node->create_edge (dest, call_stmt,
2097 bb->count,
2098 compute_call_stmt_bb_frequency
2099 (id->dst_node->decl,
2100 copy_basic_block))->inline_failed
2101 = CIF_ORIGINALLY_INDIRECT_CALL;
2102 if (dump_file)
2104 fprintf (dump_file, "Created new direct edge to %s\n",
2105 dest->name ());
2109 notice_special_calls (as_a <gcall *> (stmt));
2112 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2113 id->eh_map, id->eh_lp_nr);
2115 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
2117 ssa_op_iter i;
2118 tree def;
2120 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
2121 if (TREE_CODE (def) == SSA_NAME)
2122 SSA_NAME_DEF_STMT (def) = stmt;
2125 gsi_next (&copy_gsi);
2127 while (!gsi_end_p (copy_gsi));
2129 copy_gsi = gsi_last_bb (copy_basic_block);
2132 return copy_basic_block;
2135 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2136 form is quite easy, since dominator relationship for old basic blocks does
2137 not change.
2139 There is however exception where inlining might change dominator relation
2140 across EH edges from basic block within inlined functions destinating
2141 to landing pads in function we inline into.
2143 The function fills in PHI_RESULTs of such PHI nodes if they refer
2144 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2145 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2146 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2147 set, and this means that there will be no overlapping live ranges
2148 for the underlying symbol.
2150 This might change in future if we allow redirecting of EH edges and
2151 we might want to change way build CFG pre-inlining to include
2152 all the possible edges then. */
2153 static void
2154 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2155 bool can_throw, bool nonlocal_goto)
2157 edge e;
2158 edge_iterator ei;
2160 FOR_EACH_EDGE (e, ei, bb->succs)
2161 if (!e->dest->aux
2162 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2164 gphi *phi;
2165 gphi_iterator si;
2167 if (!nonlocal_goto)
2168 gcc_assert (e->flags & EDGE_EH);
2170 if (!can_throw)
2171 gcc_assert (!(e->flags & EDGE_EH));
2173 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2175 edge re;
2177 phi = si.phi ();
2179 /* For abnormal goto/call edges the receiver can be the
2180 ENTRY_BLOCK. Do not assert this cannot happen. */
2182 gcc_assert ((e->flags & EDGE_EH)
2183 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2185 re = find_edge (ret_bb, e->dest);
2186 gcc_checking_assert (re);
2187 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2188 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2190 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2191 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2197 /* Copy edges from BB into its copy constructed earlier, scale profile
2198 accordingly. Edges will be taken care of later. Assume aux
2199 pointers to point to the copies of each BB. Return true if any
2200 debug stmts are left after a statement that must end the basic block. */
2202 static bool
2203 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
2204 basic_block abnormal_goto_dest)
2206 basic_block new_bb = (basic_block) bb->aux;
2207 edge_iterator ei;
2208 edge old_edge;
2209 gimple_stmt_iterator si;
2210 int flags;
2211 bool need_debug_cleanup = false;
2213 /* Use the indices from the original blocks to create edges for the
2214 new ones. */
2215 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2216 if (!(old_edge->flags & EDGE_EH))
2218 edge new_edge;
2220 flags = old_edge->flags;
2222 /* Return edges do get a FALLTHRU flag when the get inlined. */
2223 if (old_edge->dest->index == EXIT_BLOCK
2224 && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2225 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2226 flags |= EDGE_FALLTHRU;
2227 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2228 new_edge->count = apply_scale (old_edge->count, count_scale);
2229 new_edge->probability = old_edge->probability;
2232 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2233 return false;
2235 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2237 gimple copy_stmt;
2238 bool can_throw, nonlocal_goto;
2240 copy_stmt = gsi_stmt (si);
2241 if (!is_gimple_debug (copy_stmt))
2242 update_stmt (copy_stmt);
2244 /* Do this before the possible split_block. */
2245 gsi_next (&si);
2247 /* If this tree could throw an exception, there are two
2248 cases where we need to add abnormal edge(s): the
2249 tree wasn't in a region and there is a "current
2250 region" in the caller; or the original tree had
2251 EH edges. In both cases split the block after the tree,
2252 and add abnormal edge(s) as needed; we need both
2253 those from the callee and the caller.
2254 We check whether the copy can throw, because the const
2255 propagation can change an INDIRECT_REF which throws
2256 into a COMPONENT_REF which doesn't. If the copy
2257 can throw, the original could also throw. */
2258 can_throw = stmt_can_throw_internal (copy_stmt);
2259 nonlocal_goto
2260 = (stmt_can_make_abnormal_goto (copy_stmt)
2261 && !computed_goto_p (copy_stmt));
2263 if (can_throw || nonlocal_goto)
2265 if (!gsi_end_p (si))
2267 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2268 gsi_next (&si);
2269 if (gsi_end_p (si))
2270 need_debug_cleanup = true;
2272 if (!gsi_end_p (si))
2273 /* Note that bb's predecessor edges aren't necessarily
2274 right at this point; split_block doesn't care. */
2276 edge e = split_block (new_bb, copy_stmt);
2278 new_bb = e->dest;
2279 new_bb->aux = e->src->aux;
2280 si = gsi_start_bb (new_bb);
2284 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2285 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2286 else if (can_throw)
2287 make_eh_edges (copy_stmt);
2289 /* If the call we inline cannot make abnormal goto do not add
2290 additional abnormal edges but only retain those already present
2291 in the original function body. */
2292 if (abnormal_goto_dest == NULL)
2293 nonlocal_goto = false;
2294 if (nonlocal_goto)
2296 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2298 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2299 nonlocal_goto = false;
2300 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2301 in OpenMP regions which aren't allowed to be left abnormally.
2302 So, no need to add abnormal edge in that case. */
2303 else if (is_gimple_call (copy_stmt)
2304 && gimple_call_internal_p (copy_stmt)
2305 && (gimple_call_internal_fn (copy_stmt)
2306 == IFN_ABNORMAL_DISPATCHER)
2307 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2308 nonlocal_goto = false;
2309 else
2310 make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2313 if ((can_throw || nonlocal_goto)
2314 && gimple_in_ssa_p (cfun))
2315 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2316 can_throw, nonlocal_goto);
2318 return need_debug_cleanup;
2321 /* Copy the PHIs. All blocks and edges are copied, some blocks
2322 was possibly split and new outgoing EH edges inserted.
2323 BB points to the block of original function and AUX pointers links
2324 the original and newly copied blocks. */
2326 static void
2327 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2329 basic_block const new_bb = (basic_block) bb->aux;
2330 edge_iterator ei;
2331 gphi *phi;
2332 gphi_iterator si;
2333 edge new_edge;
2334 bool inserted = false;
2336 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2338 tree res, new_res;
2339 gphi *new_phi;
2341 phi = si.phi ();
2342 res = PHI_RESULT (phi);
2343 new_res = res;
2344 if (!virtual_operand_p (res))
2346 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2347 new_phi = create_phi_node (new_res, new_bb);
2348 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2350 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2351 tree arg;
2352 tree new_arg;
2353 edge_iterator ei2;
2354 location_t locus;
2356 /* When doing partial cloning, we allow PHIs on the entry block
2357 as long as all the arguments are the same. Find any input
2358 edge to see argument to copy. */
2359 if (!old_edge)
2360 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2361 if (!old_edge->src->aux)
2362 break;
2364 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2365 new_arg = arg;
2366 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2367 gcc_assert (new_arg);
2368 /* With return slot optimization we can end up with
2369 non-gimple (foo *)&this->m, fix that here. */
2370 if (TREE_CODE (new_arg) != SSA_NAME
2371 && TREE_CODE (new_arg) != FUNCTION_DECL
2372 && !is_gimple_val (new_arg))
2374 gimple_seq stmts = NULL;
2375 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2376 gsi_insert_seq_on_edge (new_edge, stmts);
2377 inserted = true;
2379 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2380 if (LOCATION_BLOCK (locus))
2382 tree *n;
2383 n = id->decl_map->get (LOCATION_BLOCK (locus));
2384 gcc_assert (n);
2385 if (*n)
2386 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2387 else
2388 locus = LOCATION_LOCUS (locus);
2390 else
2391 locus = LOCATION_LOCUS (locus);
2393 add_phi_arg (new_phi, new_arg, new_edge, locus);
2398 /* Commit the delayed edge insertions. */
2399 if (inserted)
2400 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2401 gsi_commit_one_edge_insert (new_edge, NULL);
2405 /* Wrapper for remap_decl so it can be used as a callback. */
2407 static tree
2408 remap_decl_1 (tree decl, void *data)
2410 return remap_decl (decl, (copy_body_data *) data);
2413 /* Build struct function and associated datastructures for the new clone
2414 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2415 the cfun to the function of new_fndecl (and current_function_decl too). */
2417 static void
2418 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2420 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2421 gcov_type count_scale;
2423 if (!DECL_ARGUMENTS (new_fndecl))
2424 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2425 if (!DECL_RESULT (new_fndecl))
2426 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2428 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2429 count_scale
2430 = GCOV_COMPUTE_SCALE (count,
2431 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2432 else
2433 count_scale = REG_BR_PROB_BASE;
2435 /* Register specific tree functions. */
2436 gimple_register_cfg_hooks ();
2438 /* Get clean struct function. */
2439 push_struct_function (new_fndecl);
2441 /* We will rebuild these, so just sanity check that they are empty. */
2442 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2443 gcc_assert (cfun->local_decls == NULL);
2444 gcc_assert (cfun->cfg == NULL);
2445 gcc_assert (cfun->decl == new_fndecl);
2447 /* Copy items we preserve during cloning. */
2448 cfun->static_chain_decl = src_cfun->static_chain_decl;
2449 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2450 cfun->function_end_locus = src_cfun->function_end_locus;
2451 cfun->curr_properties = src_cfun->curr_properties;
2452 cfun->last_verified = src_cfun->last_verified;
2453 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2454 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2455 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2456 cfun->stdarg = src_cfun->stdarg;
2457 cfun->after_inlining = src_cfun->after_inlining;
2458 cfun->can_throw_non_call_exceptions
2459 = src_cfun->can_throw_non_call_exceptions;
2460 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2461 cfun->returns_struct = src_cfun->returns_struct;
2462 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2464 init_empty_tree_cfg ();
2466 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2467 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2468 (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2469 REG_BR_PROB_BASE);
2470 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2471 = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2472 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2473 (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2474 REG_BR_PROB_BASE);
2475 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2476 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2477 if (src_cfun->eh)
2478 init_eh_for_function ();
2480 if (src_cfun->gimple_df)
2482 init_tree_ssa (cfun);
2483 cfun->gimple_df->in_ssa_p = true;
2484 init_ssa_operands (cfun);
2488 /* Helper function for copy_cfg_body. Move debug stmts from the end
2489 of NEW_BB to the beginning of successor basic blocks when needed. If the
2490 successor has multiple predecessors, reset them, otherwise keep
2491 their value. */
2493 static void
2494 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2496 edge e;
2497 edge_iterator ei;
2498 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2500 if (gsi_end_p (si)
2501 || gsi_one_before_end_p (si)
2502 || !(stmt_can_throw_internal (gsi_stmt (si))
2503 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2504 return;
2506 FOR_EACH_EDGE (e, ei, new_bb->succs)
2508 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2509 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2510 while (is_gimple_debug (gsi_stmt (ssi)))
2512 gimple stmt = gsi_stmt (ssi);
2513 gdebug *new_stmt;
2514 tree var;
2515 tree value;
2517 /* For the last edge move the debug stmts instead of copying
2518 them. */
2519 if (ei_one_before_end_p (ei))
2521 si = ssi;
2522 gsi_prev (&ssi);
2523 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2524 gimple_debug_bind_reset_value (stmt);
2525 gsi_remove (&si, false);
2526 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2527 continue;
2530 if (gimple_debug_bind_p (stmt))
2532 var = gimple_debug_bind_get_var (stmt);
2533 if (single_pred_p (e->dest))
2535 value = gimple_debug_bind_get_value (stmt);
2536 value = unshare_expr (value);
2538 else
2539 value = NULL_TREE;
2540 new_stmt = gimple_build_debug_bind (var, value, stmt);
2542 else if (gimple_debug_source_bind_p (stmt))
2544 var = gimple_debug_source_bind_get_var (stmt);
2545 value = gimple_debug_source_bind_get_value (stmt);
2546 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2548 else
2549 gcc_unreachable ();
2550 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2551 id->debug_stmts.safe_push (new_stmt);
2552 gsi_prev (&ssi);
2557 /* Make a copy of the sub-loops of SRC_PARENT and place them
2558 as siblings of DEST_PARENT. */
2560 static void
2561 copy_loops (copy_body_data *id,
2562 struct loop *dest_parent, struct loop *src_parent)
2564 struct loop *src_loop = src_parent->inner;
2565 while (src_loop)
2567 if (!id->blocks_to_copy
2568 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2570 struct loop *dest_loop = alloc_loop ();
2572 /* Assign the new loop its header and latch and associate
2573 those with the new loop. */
2574 dest_loop->header = (basic_block)src_loop->header->aux;
2575 dest_loop->header->loop_father = dest_loop;
2576 if (src_loop->latch != NULL)
2578 dest_loop->latch = (basic_block)src_loop->latch->aux;
2579 dest_loop->latch->loop_father = dest_loop;
2582 /* Copy loop meta-data. */
2583 copy_loop_info (src_loop, dest_loop);
2585 /* Finally place it into the loop array and the loop tree. */
2586 place_new_loop (cfun, dest_loop);
2587 flow_loop_tree_node_add (dest_parent, dest_loop);
2589 dest_loop->safelen = src_loop->safelen;
2590 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2591 if (src_loop->force_vectorize)
2593 dest_loop->force_vectorize = true;
2594 cfun->has_force_vectorize_loops = true;
2596 if (src_loop->simduid)
2598 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2599 cfun->has_simduid_loops = true;
2602 /* Recurse. */
2603 copy_loops (id, dest_loop, src_loop);
2605 src_loop = src_loop->next;
2609 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2611 void
2612 redirect_all_calls (copy_body_data * id, basic_block bb)
2614 gimple_stmt_iterator si;
2615 gimple last = last_stmt (bb);
2616 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2618 gimple stmt = gsi_stmt (si);
2619 if (is_gimple_call (stmt))
2621 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2622 if (edge)
2624 edge->redirect_call_stmt_to_callee ();
2625 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2626 gimple_purge_dead_eh_edges (bb);
2632 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2633 with each bb's frequency. Used when NODE has a 0-weight entry
2634 but we are about to inline it into a non-zero count call bb.
2635 See the comments for handle_missing_profiles() in predict.c for
2636 when this can happen for COMDATs. */
2638 void
2639 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2641 basic_block bb;
2642 edge_iterator ei;
2643 edge e;
2644 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2646 FOR_ALL_BB_FN(bb, fn)
2648 bb->count = apply_scale (count,
2649 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2650 FOR_EACH_EDGE (e, ei, bb->succs)
2651 e->count = apply_probability (e->src->count, e->probability);
2655 /* Make a copy of the body of FN so that it can be inserted inline in
2656 another function. Walks FN via CFG, returns new fndecl. */
2658 static tree
2659 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2660 basic_block entry_block_map, basic_block exit_block_map,
2661 basic_block new_entry)
2663 tree callee_fndecl = id->src_fn;
2664 /* Original cfun for the callee, doesn't change. */
2665 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2666 struct function *cfun_to_copy;
2667 basic_block bb;
2668 tree new_fndecl = NULL;
2669 bool need_debug_cleanup = false;
2670 gcov_type count_scale;
2671 int last;
2672 int incoming_frequency = 0;
2673 gcov_type incoming_count = 0;
2675 /* This can happen for COMDAT routines that end up with 0 counts
2676 despite being called (see the comments for handle_missing_profiles()
2677 in predict.c as to why). Apply counts to the blocks in the callee
2678 before inlining, using the guessed edge frequencies, so that we don't
2679 end up with a 0-count inline body which can confuse downstream
2680 optimizations such as function splitting. */
2681 if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2683 /* Apply the larger of the call bb count and the total incoming
2684 call edge count to the callee. */
2685 gcov_type in_count = 0;
2686 struct cgraph_edge *in_edge;
2687 for (in_edge = id->src_node->callers; in_edge;
2688 in_edge = in_edge->next_caller)
2689 in_count += in_edge->count;
2690 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2693 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2694 count_scale
2695 = GCOV_COMPUTE_SCALE (count,
2696 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2697 else
2698 count_scale = REG_BR_PROB_BASE;
2700 /* Register specific tree functions. */
2701 gimple_register_cfg_hooks ();
2703 /* If we are inlining just region of the function, make sure to connect
2704 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2705 part of loop, we must compute frequency and probability of
2706 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2707 probabilities of edges incoming from nonduplicated region. */
2708 if (new_entry)
2710 edge e;
2711 edge_iterator ei;
2713 FOR_EACH_EDGE (e, ei, new_entry->preds)
2714 if (!e->src->aux)
2716 incoming_frequency += EDGE_FREQUENCY (e);
2717 incoming_count += e->count;
2719 incoming_count = apply_scale (incoming_count, count_scale);
2720 incoming_frequency
2721 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2722 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2723 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2726 /* Must have a CFG here at this point. */
2727 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2728 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2730 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2732 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2733 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2734 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2735 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2737 /* Duplicate any exception-handling regions. */
2738 if (cfun->eh)
2739 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2740 remap_decl_1, id);
2742 /* Use aux pointers to map the original blocks to copy. */
2743 FOR_EACH_BB_FN (bb, cfun_to_copy)
2744 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2746 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2747 bb->aux = new_bb;
2748 new_bb->aux = bb;
2749 new_bb->loop_father = entry_block_map->loop_father;
2752 last = last_basic_block_for_fn (cfun);
2754 /* Now that we've duplicated the blocks, duplicate their edges. */
2755 basic_block abnormal_goto_dest = NULL;
2756 if (id->call_stmt
2757 && stmt_can_make_abnormal_goto (id->call_stmt))
2759 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2761 bb = gimple_bb (id->call_stmt);
2762 gsi_next (&gsi);
2763 if (gsi_end_p (gsi))
2764 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2766 FOR_ALL_BB_FN (bb, cfun_to_copy)
2767 if (!id->blocks_to_copy
2768 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2769 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2770 abnormal_goto_dest);
2772 if (new_entry)
2774 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2775 e->probability = REG_BR_PROB_BASE;
2776 e->count = incoming_count;
2779 /* Duplicate the loop tree, if available and wanted. */
2780 if (loops_for_fn (src_cfun) != NULL
2781 && current_loops != NULL)
2783 copy_loops (id, entry_block_map->loop_father,
2784 get_loop (src_cfun, 0));
2785 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2786 loops_state_set (LOOPS_NEED_FIXUP);
2789 /* If the loop tree in the source function needed fixup, mark the
2790 destination loop tree for fixup, too. */
2791 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2792 loops_state_set (LOOPS_NEED_FIXUP);
2794 if (gimple_in_ssa_p (cfun))
2795 FOR_ALL_BB_FN (bb, cfun_to_copy)
2796 if (!id->blocks_to_copy
2797 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2798 copy_phis_for_bb (bb, id);
2800 FOR_ALL_BB_FN (bb, cfun_to_copy)
2801 if (bb->aux)
2803 if (need_debug_cleanup
2804 && bb->index != ENTRY_BLOCK
2805 && bb->index != EXIT_BLOCK)
2806 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2807 /* Update call edge destinations. This can not be done before loop
2808 info is updated, because we may split basic blocks. */
2809 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2810 && bb->index != ENTRY_BLOCK
2811 && bb->index != EXIT_BLOCK)
2812 redirect_all_calls (id, (basic_block)bb->aux);
2813 ((basic_block)bb->aux)->aux = NULL;
2814 bb->aux = NULL;
2817 /* Zero out AUX fields of newly created block during EH edge
2818 insertion. */
2819 for (; last < last_basic_block_for_fn (cfun); last++)
2821 if (need_debug_cleanup)
2822 maybe_move_debug_stmts_to_successors (id,
2823 BASIC_BLOCK_FOR_FN (cfun, last));
2824 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2825 /* Update call edge destinations. This can not be done before loop
2826 info is updated, because we may split basic blocks. */
2827 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2828 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2830 entry_block_map->aux = NULL;
2831 exit_block_map->aux = NULL;
2833 if (id->eh_map)
2835 delete id->eh_map;
2836 id->eh_map = NULL;
2838 if (id->dependence_map)
2840 delete id->dependence_map;
2841 id->dependence_map = NULL;
2844 return new_fndecl;
2847 /* Copy the debug STMT using ID. We deal with these statements in a
2848 special way: if any variable in their VALUE expression wasn't
2849 remapped yet, we won't remap it, because that would get decl uids
2850 out of sync, causing codegen differences between -g and -g0. If
2851 this arises, we drop the VALUE expression altogether. */
2853 static void
2854 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2856 tree t, *n;
2857 struct walk_stmt_info wi;
2859 if (gimple_block (stmt))
2861 n = id->decl_map->get (gimple_block (stmt));
2862 gimple_set_block (stmt, n ? *n : id->block);
2865 /* Remap all the operands in COPY. */
2866 memset (&wi, 0, sizeof (wi));
2867 wi.info = id;
2869 processing_debug_stmt = 1;
2871 if (gimple_debug_source_bind_p (stmt))
2872 t = gimple_debug_source_bind_get_var (stmt);
2873 else
2874 t = gimple_debug_bind_get_var (stmt);
2876 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2877 && (n = id->debug_map->get (t)))
2879 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2880 t = *n;
2882 else if (TREE_CODE (t) == VAR_DECL
2883 && !is_global_var (t)
2884 && !id->decl_map->get (t))
2885 /* T is a non-localized variable. */;
2886 else
2887 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2889 if (gimple_debug_bind_p (stmt))
2891 gimple_debug_bind_set_var (stmt, t);
2893 if (gimple_debug_bind_has_value_p (stmt))
2894 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2895 remap_gimple_op_r, &wi, NULL);
2897 /* Punt if any decl couldn't be remapped. */
2898 if (processing_debug_stmt < 0)
2899 gimple_debug_bind_reset_value (stmt);
2901 else if (gimple_debug_source_bind_p (stmt))
2903 gimple_debug_source_bind_set_var (stmt, t);
2904 /* When inlining and source bind refers to one of the optimized
2905 away parameters, change the source bind into normal debug bind
2906 referring to the corresponding DEBUG_EXPR_DECL that should have
2907 been bound before the call stmt. */
2908 t = gimple_debug_source_bind_get_value (stmt);
2909 if (t != NULL_TREE
2910 && TREE_CODE (t) == PARM_DECL
2911 && id->call_stmt)
2913 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2914 unsigned int i;
2915 if (debug_args != NULL)
2917 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2918 if ((**debug_args)[i] == DECL_ORIGIN (t)
2919 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2921 t = (**debug_args)[i + 1];
2922 stmt->subcode = GIMPLE_DEBUG_BIND;
2923 gimple_debug_bind_set_value (stmt, t);
2924 break;
2928 if (gimple_debug_source_bind_p (stmt))
2929 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2930 remap_gimple_op_r, &wi, NULL);
2933 processing_debug_stmt = 0;
2935 update_stmt (stmt);
2938 /* Process deferred debug stmts. In order to give values better odds
2939 of being successfully remapped, we delay the processing of debug
2940 stmts until all other stmts that might require remapping are
2941 processed. */
2943 static void
2944 copy_debug_stmts (copy_body_data *id)
2946 size_t i;
2947 gdebug *stmt;
2949 if (!id->debug_stmts.exists ())
2950 return;
2952 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2953 copy_debug_stmt (stmt, id);
2955 id->debug_stmts.release ();
2958 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2959 another function. */
2961 static tree
2962 copy_tree_body (copy_body_data *id)
2964 tree fndecl = id->src_fn;
2965 tree body = DECL_SAVED_TREE (fndecl);
2967 walk_tree (&body, copy_tree_body_r, id, NULL);
2969 return body;
2972 /* Make a copy of the body of FN so that it can be inserted inline in
2973 another function. */
2975 static tree
2976 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2977 basic_block entry_block_map, basic_block exit_block_map,
2978 basic_block new_entry)
2980 tree fndecl = id->src_fn;
2981 tree body;
2983 /* If this body has a CFG, walk CFG and copy. */
2984 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2985 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2986 new_entry);
2987 copy_debug_stmts (id);
2989 return body;
2992 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2993 defined in function FN, or of a data member thereof. */
2995 static bool
2996 self_inlining_addr_expr (tree value, tree fn)
2998 tree var;
3000 if (TREE_CODE (value) != ADDR_EXPR)
3001 return false;
3003 var = get_base_address (TREE_OPERAND (value, 0));
3005 return var && auto_var_in_fn_p (var, fn);
3008 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3009 lexical block and line number information from base_stmt, if given,
3010 or from the last stmt of the block otherwise. */
3012 static gimple
3013 insert_init_debug_bind (copy_body_data *id,
3014 basic_block bb, tree var, tree value,
3015 gimple base_stmt)
3017 gimple note;
3018 gimple_stmt_iterator gsi;
3019 tree tracked_var;
3021 if (!gimple_in_ssa_p (id->src_cfun))
3022 return NULL;
3024 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3025 return NULL;
3027 tracked_var = target_for_debug_bind (var);
3028 if (!tracked_var)
3029 return NULL;
3031 if (bb)
3033 gsi = gsi_last_bb (bb);
3034 if (!base_stmt && !gsi_end_p (gsi))
3035 base_stmt = gsi_stmt (gsi);
3038 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3040 if (bb)
3042 if (!gsi_end_p (gsi))
3043 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3044 else
3045 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3048 return note;
3051 static void
3052 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
3054 /* If VAR represents a zero-sized variable, it's possible that the
3055 assignment statement may result in no gimple statements. */
3056 if (init_stmt)
3058 gimple_stmt_iterator si = gsi_last_bb (bb);
3060 /* We can end up with init statements that store to a non-register
3061 from a rhs with a conversion. Handle that here by forcing the
3062 rhs into a temporary. gimple_regimplify_operands is not
3063 prepared to do this for us. */
3064 if (!is_gimple_debug (init_stmt)
3065 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3066 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3067 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3069 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3070 gimple_expr_type (init_stmt),
3071 gimple_assign_rhs1 (init_stmt));
3072 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3073 GSI_NEW_STMT);
3074 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3075 gimple_assign_set_rhs1 (init_stmt, rhs);
3077 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3078 gimple_regimplify_operands (init_stmt, &si);
3080 if (!is_gimple_debug (init_stmt))
3082 tree def = gimple_assign_lhs (init_stmt);
3083 insert_init_debug_bind (id, bb, def, def, init_stmt);
3088 /* Initialize parameter P with VALUE. If needed, produce init statement
3089 at the end of BB. When BB is NULL, we return init statement to be
3090 output later. */
3091 static gimple
3092 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3093 basic_block bb, tree *vars)
3095 gimple init_stmt = NULL;
3096 tree var;
3097 tree rhs = value;
3098 tree def = (gimple_in_ssa_p (cfun)
3099 ? ssa_default_def (id->src_cfun, p) : NULL);
3101 if (value
3102 && value != error_mark_node
3103 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3105 /* If we can match up types by promotion/demotion do so. */
3106 if (fold_convertible_p (TREE_TYPE (p), value))
3107 rhs = fold_convert (TREE_TYPE (p), value);
3108 else
3110 /* ??? For valid programs we should not end up here.
3111 Still if we end up with truly mismatched types here, fall back
3112 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3113 GIMPLE to the following passes. */
3114 if (!is_gimple_reg_type (TREE_TYPE (value))
3115 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3116 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3117 else
3118 rhs = build_zero_cst (TREE_TYPE (p));
3122 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3123 here since the type of this decl must be visible to the calling
3124 function. */
3125 var = copy_decl_to_var (p, id);
3127 /* Declare this new variable. */
3128 DECL_CHAIN (var) = *vars;
3129 *vars = var;
3131 /* Make gimplifier happy about this variable. */
3132 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3134 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3135 we would not need to create a new variable here at all, if it
3136 weren't for debug info. Still, we can just use the argument
3137 value. */
3138 if (TREE_READONLY (p)
3139 && !TREE_ADDRESSABLE (p)
3140 && value && !TREE_SIDE_EFFECTS (value)
3141 && !def)
3143 /* We may produce non-gimple trees by adding NOPs or introduce
3144 invalid sharing when operand is not really constant.
3145 It is not big deal to prohibit constant propagation here as
3146 we will constant propagate in DOM1 pass anyway. */
3147 if (is_gimple_min_invariant (value)
3148 && useless_type_conversion_p (TREE_TYPE (p),
3149 TREE_TYPE (value))
3150 /* We have to be very careful about ADDR_EXPR. Make sure
3151 the base variable isn't a local variable of the inlined
3152 function, e.g., when doing recursive inlining, direct or
3153 mutually-recursive or whatever, which is why we don't
3154 just test whether fn == current_function_decl. */
3155 && ! self_inlining_addr_expr (value, fn))
3157 insert_decl_map (id, p, value);
3158 insert_debug_decl_map (id, p, var);
3159 return insert_init_debug_bind (id, bb, var, value, NULL);
3163 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3164 that way, when the PARM_DECL is encountered, it will be
3165 automatically replaced by the VAR_DECL. */
3166 insert_decl_map (id, p, var);
3168 /* Even if P was TREE_READONLY, the new VAR should not be.
3169 In the original code, we would have constructed a
3170 temporary, and then the function body would have never
3171 changed the value of P. However, now, we will be
3172 constructing VAR directly. The constructor body may
3173 change its value multiple times as it is being
3174 constructed. Therefore, it must not be TREE_READONLY;
3175 the back-end assumes that TREE_READONLY variable is
3176 assigned to only once. */
3177 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3178 TREE_READONLY (var) = 0;
3180 /* If there is no setup required and we are in SSA, take the easy route
3181 replacing all SSA names representing the function parameter by the
3182 SSA name passed to function.
3184 We need to construct map for the variable anyway as it might be used
3185 in different SSA names when parameter is set in function.
3187 Do replacement at -O0 for const arguments replaced by constant.
3188 This is important for builtin_constant_p and other construct requiring
3189 constant argument to be visible in inlined function body. */
3190 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3191 && (optimize
3192 || (TREE_READONLY (p)
3193 && is_gimple_min_invariant (rhs)))
3194 && (TREE_CODE (rhs) == SSA_NAME
3195 || is_gimple_min_invariant (rhs))
3196 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3198 insert_decl_map (id, def, rhs);
3199 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3202 /* If the value of argument is never used, don't care about initializing
3203 it. */
3204 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3206 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3207 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3210 /* Initialize this VAR_DECL from the equivalent argument. Convert
3211 the argument to the proper type in case it was promoted. */
3212 if (value)
3214 if (rhs == error_mark_node)
3216 insert_decl_map (id, p, var);
3217 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3220 STRIP_USELESS_TYPE_CONVERSION (rhs);
3222 /* If we are in SSA form properly remap the default definition
3223 or assign to a dummy SSA name if the parameter is unused and
3224 we are not optimizing. */
3225 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3227 if (def)
3229 def = remap_ssa_name (def, id);
3230 init_stmt = gimple_build_assign (def, rhs);
3231 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3232 set_ssa_default_def (cfun, var, NULL);
3234 else if (!optimize)
3236 def = make_ssa_name (var);
3237 init_stmt = gimple_build_assign (def, rhs);
3240 else
3241 init_stmt = gimple_build_assign (var, rhs);
3243 if (bb && init_stmt)
3244 insert_init_stmt (id, bb, init_stmt);
3246 return init_stmt;
3249 /* Generate code to initialize the parameters of the function at the
3250 top of the stack in ID from the GIMPLE_CALL STMT. */
3252 static void
3253 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
3254 tree fn, basic_block bb)
3256 tree parms;
3257 size_t i;
3258 tree p;
3259 tree vars = NULL_TREE;
3260 tree static_chain = gimple_call_chain (stmt);
3262 /* Figure out what the parameters are. */
3263 parms = DECL_ARGUMENTS (fn);
3265 /* Loop through the parameter declarations, replacing each with an
3266 equivalent VAR_DECL, appropriately initialized. */
3267 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3269 tree val;
3270 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3271 setup_one_parameter (id, p, val, fn, bb, &vars);
3273 /* After remapping parameters remap their types. This has to be done
3274 in a second loop over all parameters to appropriately remap
3275 variable sized arrays when the size is specified in a
3276 parameter following the array. */
3277 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3279 tree *varp = id->decl_map->get (p);
3280 if (varp
3281 && TREE_CODE (*varp) == VAR_DECL)
3283 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3284 ? ssa_default_def (id->src_cfun, p) : NULL);
3285 tree var = *varp;
3286 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3287 /* Also remap the default definition if it was remapped
3288 to the default definition of the parameter replacement
3289 by the parameter setup. */
3290 if (def)
3292 tree *defp = id->decl_map->get (def);
3293 if (defp
3294 && TREE_CODE (*defp) == SSA_NAME
3295 && SSA_NAME_VAR (*defp) == var)
3296 TREE_TYPE (*defp) = TREE_TYPE (var);
3301 /* Initialize the static chain. */
3302 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3303 gcc_assert (fn != current_function_decl);
3304 if (p)
3306 /* No static chain? Seems like a bug in tree-nested.c. */
3307 gcc_assert (static_chain);
3309 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3312 declare_inline_vars (id->block, vars);
3316 /* Declare a return variable to replace the RESULT_DECL for the
3317 function we are calling. An appropriate DECL_STMT is returned.
3318 The USE_STMT is filled to contain a use of the declaration to
3319 indicate the return value of the function.
3321 RETURN_SLOT, if non-null is place where to store the result. It
3322 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3323 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3325 RETURN_BOUNDS holds a destination for returned bounds.
3327 The return value is a (possibly null) value that holds the result
3328 as seen by the caller. */
3330 static tree
3331 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3332 tree return_bounds, basic_block entry_bb)
3334 tree callee = id->src_fn;
3335 tree result = DECL_RESULT (callee);
3336 tree callee_type = TREE_TYPE (result);
3337 tree caller_type;
3338 tree var, use;
3340 /* Handle type-mismatches in the function declaration return type
3341 vs. the call expression. */
3342 if (modify_dest)
3343 caller_type = TREE_TYPE (modify_dest);
3344 else
3345 caller_type = TREE_TYPE (TREE_TYPE (callee));
3347 /* We don't need to do anything for functions that don't return anything. */
3348 if (VOID_TYPE_P (callee_type))
3349 return NULL_TREE;
3351 /* If there was a return slot, then the return value is the
3352 dereferenced address of that object. */
3353 if (return_slot)
3355 /* The front end shouldn't have used both return_slot and
3356 a modify expression. */
3357 gcc_assert (!modify_dest);
3358 if (DECL_BY_REFERENCE (result))
3360 tree return_slot_addr = build_fold_addr_expr (return_slot);
3361 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3363 /* We are going to construct *&return_slot and we can't do that
3364 for variables believed to be not addressable.
3366 FIXME: This check possibly can match, because values returned
3367 via return slot optimization are not believed to have address
3368 taken by alias analysis. */
3369 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3370 var = return_slot_addr;
3372 else
3374 var = return_slot;
3375 gcc_assert (TREE_CODE (var) != SSA_NAME);
3376 if (TREE_ADDRESSABLE (result))
3377 mark_addressable (var);
3379 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3380 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3381 && !DECL_GIMPLE_REG_P (result)
3382 && DECL_P (var))
3383 DECL_GIMPLE_REG_P (var) = 0;
3384 use = NULL;
3385 goto done;
3388 /* All types requiring non-trivial constructors should have been handled. */
3389 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3391 /* Attempt to avoid creating a new temporary variable. */
3392 if (modify_dest
3393 && TREE_CODE (modify_dest) != SSA_NAME)
3395 bool use_it = false;
3397 /* We can't use MODIFY_DEST if there's type promotion involved. */
3398 if (!useless_type_conversion_p (callee_type, caller_type))
3399 use_it = false;
3401 /* ??? If we're assigning to a variable sized type, then we must
3402 reuse the destination variable, because we've no good way to
3403 create variable sized temporaries at this point. */
3404 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3405 use_it = true;
3407 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3408 reuse it as the result of the call directly. Don't do this if
3409 it would promote MODIFY_DEST to addressable. */
3410 else if (TREE_ADDRESSABLE (result))
3411 use_it = false;
3412 else
3414 tree base_m = get_base_address (modify_dest);
3416 /* If the base isn't a decl, then it's a pointer, and we don't
3417 know where that's going to go. */
3418 if (!DECL_P (base_m))
3419 use_it = false;
3420 else if (is_global_var (base_m))
3421 use_it = false;
3422 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3423 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3424 && !DECL_GIMPLE_REG_P (result)
3425 && DECL_GIMPLE_REG_P (base_m))
3426 use_it = false;
3427 else if (!TREE_ADDRESSABLE (base_m))
3428 use_it = true;
3431 if (use_it)
3433 var = modify_dest;
3434 use = NULL;
3435 goto done;
3439 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3441 var = copy_result_decl_to_var (result, id);
3442 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3444 /* Do not have the rest of GCC warn about this variable as it should
3445 not be visible to the user. */
3446 TREE_NO_WARNING (var) = 1;
3448 declare_inline_vars (id->block, var);
3450 /* Build the use expr. If the return type of the function was
3451 promoted, convert it back to the expected type. */
3452 use = var;
3453 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3455 /* If we can match up types by promotion/demotion do so. */
3456 if (fold_convertible_p (caller_type, var))
3457 use = fold_convert (caller_type, var);
3458 else
3460 /* ??? For valid programs we should not end up here.
3461 Still if we end up with truly mismatched types here, fall back
3462 to using a MEM_REF to not leak invalid GIMPLE to the following
3463 passes. */
3464 /* Prevent var from being written into SSA form. */
3465 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3466 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3467 DECL_GIMPLE_REG_P (var) = false;
3468 else if (is_gimple_reg_type (TREE_TYPE (var)))
3469 TREE_ADDRESSABLE (var) = true;
3470 use = fold_build2 (MEM_REF, caller_type,
3471 build_fold_addr_expr (var),
3472 build_int_cst (ptr_type_node, 0));
3476 STRIP_USELESS_TYPE_CONVERSION (use);
3478 if (DECL_BY_REFERENCE (result))
3480 TREE_ADDRESSABLE (var) = 1;
3481 var = build_fold_addr_expr (var);
3484 done:
3485 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3486 way, when the RESULT_DECL is encountered, it will be
3487 automatically replaced by the VAR_DECL.
3489 When returning by reference, ensure that RESULT_DECL remaps to
3490 gimple_val. */
3491 if (DECL_BY_REFERENCE (result)
3492 && !is_gimple_val (var))
3494 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3495 insert_decl_map (id, result, temp);
3496 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3497 it's default_def SSA_NAME. */
3498 if (gimple_in_ssa_p (id->src_cfun)
3499 && is_gimple_reg (result))
3501 temp = make_ssa_name (temp);
3502 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3504 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3506 else
3507 insert_decl_map (id, result, var);
3509 /* Remember this so we can ignore it in remap_decls. */
3510 id->retvar = var;
3512 /* If returned bounds are used, then make var for them. */
3513 if (return_bounds)
3515 tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
3516 DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
3517 TREE_NO_WARNING (bndtemp) = 1;
3518 declare_inline_vars (id->block, bndtemp);
3520 id->retbnd = bndtemp;
3521 insert_init_stmt (id, entry_bb,
3522 gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
3525 return use;
3528 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3529 to a local label. */
3531 static tree
3532 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3534 tree node = *nodep;
3535 tree fn = (tree) fnp;
3537 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3538 return node;
3540 if (TYPE_P (node))
3541 *walk_subtrees = 0;
3543 return NULL_TREE;
3546 /* Determine if the function can be copied. If so return NULL. If
3547 not return a string describng the reason for failure. */
3549 const char *
3550 copy_forbidden (struct function *fun, tree fndecl)
3552 const char *reason = fun->cannot_be_copied_reason;
3553 tree decl;
3554 unsigned ix;
3556 /* Only examine the function once. */
3557 if (fun->cannot_be_copied_set)
3558 return reason;
3560 /* We cannot copy a function that receives a non-local goto
3561 because we cannot remap the destination label used in the
3562 function that is performing the non-local goto. */
3563 /* ??? Actually, this should be possible, if we work at it.
3564 No doubt there's just a handful of places that simply
3565 assume it doesn't happen and don't substitute properly. */
3566 if (fun->has_nonlocal_label)
3568 reason = G_("function %q+F can never be copied "
3569 "because it receives a non-local goto");
3570 goto fail;
3573 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3574 if (TREE_CODE (decl) == VAR_DECL
3575 && TREE_STATIC (decl)
3576 && !DECL_EXTERNAL (decl)
3577 && DECL_INITIAL (decl)
3578 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3579 has_label_address_in_static_1,
3580 fndecl))
3582 reason = G_("function %q+F can never be copied because it saves "
3583 "address of local label in a static variable");
3584 goto fail;
3587 fail:
3588 fun->cannot_be_copied_reason = reason;
3589 fun->cannot_be_copied_set = true;
3590 return reason;
3594 static const char *inline_forbidden_reason;
3596 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3597 iff a function can not be inlined. Also sets the reason why. */
3599 static tree
3600 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3601 struct walk_stmt_info *wip)
3603 tree fn = (tree) wip->info;
3604 tree t;
3605 gimple stmt = gsi_stmt (*gsi);
3607 switch (gimple_code (stmt))
3609 case GIMPLE_CALL:
3610 /* Refuse to inline alloca call unless user explicitly forced so as
3611 this may change program's memory overhead drastically when the
3612 function using alloca is called in loop. In GCC present in
3613 SPEC2000 inlining into schedule_block cause it to require 2GB of
3614 RAM instead of 256MB. Don't do so for alloca calls emitted for
3615 VLA objects as those can't cause unbounded growth (they're always
3616 wrapped inside stack_save/stack_restore regions. */
3617 if (gimple_alloca_call_p (stmt)
3618 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3619 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3621 inline_forbidden_reason
3622 = G_("function %q+F can never be inlined because it uses "
3623 "alloca (override using the always_inline attribute)");
3624 *handled_ops_p = true;
3625 return fn;
3628 t = gimple_call_fndecl (stmt);
3629 if (t == NULL_TREE)
3630 break;
3632 /* We cannot inline functions that call setjmp. */
3633 if (setjmp_call_p (t))
3635 inline_forbidden_reason
3636 = G_("function %q+F can never be inlined because it uses setjmp");
3637 *handled_ops_p = true;
3638 return t;
3641 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3642 switch (DECL_FUNCTION_CODE (t))
3644 /* We cannot inline functions that take a variable number of
3645 arguments. */
3646 case BUILT_IN_VA_START:
3647 case BUILT_IN_NEXT_ARG:
3648 case BUILT_IN_VA_END:
3649 inline_forbidden_reason
3650 = G_("function %q+F can never be inlined because it "
3651 "uses variable argument lists");
3652 *handled_ops_p = true;
3653 return t;
3655 case BUILT_IN_LONGJMP:
3656 /* We can't inline functions that call __builtin_longjmp at
3657 all. The non-local goto machinery really requires the
3658 destination be in a different function. If we allow the
3659 function calling __builtin_longjmp to be inlined into the
3660 function calling __builtin_setjmp, Things will Go Awry. */
3661 inline_forbidden_reason
3662 = G_("function %q+F can never be inlined because "
3663 "it uses setjmp-longjmp exception handling");
3664 *handled_ops_p = true;
3665 return t;
3667 case BUILT_IN_NONLOCAL_GOTO:
3668 /* Similarly. */
3669 inline_forbidden_reason
3670 = G_("function %q+F can never be inlined because "
3671 "it uses non-local goto");
3672 *handled_ops_p = true;
3673 return t;
3675 case BUILT_IN_RETURN:
3676 case BUILT_IN_APPLY_ARGS:
3677 /* If a __builtin_apply_args caller would be inlined,
3678 it would be saving arguments of the function it has
3679 been inlined into. Similarly __builtin_return would
3680 return from the function the inline has been inlined into. */
3681 inline_forbidden_reason
3682 = G_("function %q+F can never be inlined because "
3683 "it uses __builtin_return or __builtin_apply_args");
3684 *handled_ops_p = true;
3685 return t;
3687 default:
3688 break;
3690 break;
3692 case GIMPLE_GOTO:
3693 t = gimple_goto_dest (stmt);
3695 /* We will not inline a function which uses computed goto. The
3696 addresses of its local labels, which may be tucked into
3697 global storage, are of course not constant across
3698 instantiations, which causes unexpected behavior. */
3699 if (TREE_CODE (t) != LABEL_DECL)
3701 inline_forbidden_reason
3702 = G_("function %q+F can never be inlined "
3703 "because it contains a computed goto");
3704 *handled_ops_p = true;
3705 return t;
3707 break;
3709 default:
3710 break;
3713 *handled_ops_p = false;
3714 return NULL_TREE;
3717 /* Return true if FNDECL is a function that cannot be inlined into
3718 another one. */
3720 static bool
3721 inline_forbidden_p (tree fndecl)
3723 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3724 struct walk_stmt_info wi;
3725 basic_block bb;
3726 bool forbidden_p = false;
3728 /* First check for shared reasons not to copy the code. */
3729 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3730 if (inline_forbidden_reason != NULL)
3731 return true;
3733 /* Next, walk the statements of the function looking for
3734 constraucts we can't handle, or are non-optimal for inlining. */
3735 hash_set<tree> visited_nodes;
3736 memset (&wi, 0, sizeof (wi));
3737 wi.info = (void *) fndecl;
3738 wi.pset = &visited_nodes;
3740 FOR_EACH_BB_FN (bb, fun)
3742 gimple ret;
3743 gimple_seq seq = bb_seq (bb);
3744 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3745 forbidden_p = (ret != NULL);
3746 if (forbidden_p)
3747 break;
3750 return forbidden_p;
3753 /* Return false if the function FNDECL cannot be inlined on account of its
3754 attributes, true otherwise. */
3755 static bool
3756 function_attribute_inlinable_p (const_tree fndecl)
3758 if (targetm.attribute_table)
3760 const_tree a;
3762 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3764 const_tree name = TREE_PURPOSE (a);
3765 int i;
3767 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3768 if (is_attribute_p (targetm.attribute_table[i].name, name))
3769 return targetm.function_attribute_inlinable_p (fndecl);
3773 return true;
3776 /* Returns nonzero if FN is a function that does not have any
3777 fundamental inline blocking properties. */
3779 bool
3780 tree_inlinable_function_p (tree fn)
3782 bool inlinable = true;
3783 bool do_warning;
3784 tree always_inline;
3786 /* If we've already decided this function shouldn't be inlined,
3787 there's no need to check again. */
3788 if (DECL_UNINLINABLE (fn))
3789 return false;
3791 /* We only warn for functions declared `inline' by the user. */
3792 do_warning = (warn_inline
3793 && DECL_DECLARED_INLINE_P (fn)
3794 && !DECL_NO_INLINE_WARNING_P (fn)
3795 && !DECL_IN_SYSTEM_HEADER (fn));
3797 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3799 if (flag_no_inline
3800 && always_inline == NULL)
3802 if (do_warning)
3803 warning (OPT_Winline, "function %q+F can never be inlined because it "
3804 "is suppressed using -fno-inline", fn);
3805 inlinable = false;
3808 else if (!function_attribute_inlinable_p (fn))
3810 if (do_warning)
3811 warning (OPT_Winline, "function %q+F can never be inlined because it "
3812 "uses attributes conflicting with inlining", fn);
3813 inlinable = false;
3816 else if (inline_forbidden_p (fn))
3818 /* See if we should warn about uninlinable functions. Previously,
3819 some of these warnings would be issued while trying to expand
3820 the function inline, but that would cause multiple warnings
3821 about functions that would for example call alloca. But since
3822 this a property of the function, just one warning is enough.
3823 As a bonus we can now give more details about the reason why a
3824 function is not inlinable. */
3825 if (always_inline)
3826 error (inline_forbidden_reason, fn);
3827 else if (do_warning)
3828 warning (OPT_Winline, inline_forbidden_reason, fn);
3830 inlinable = false;
3833 /* Squirrel away the result so that we don't have to check again. */
3834 DECL_UNINLINABLE (fn) = !inlinable;
3836 return inlinable;
3839 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3840 word size and take possible memcpy call into account and return
3841 cost based on whether optimizing for size or speed according to SPEED_P. */
3844 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3846 HOST_WIDE_INT size;
3848 gcc_assert (!VOID_TYPE_P (type));
3850 if (TREE_CODE (type) == VECTOR_TYPE)
3852 machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3853 machine_mode simd
3854 = targetm.vectorize.preferred_simd_mode (inner);
3855 int simd_mode_size = GET_MODE_SIZE (simd);
3856 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3857 / simd_mode_size);
3860 size = int_size_in_bytes (type);
3862 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3863 /* Cost of a memcpy call, 3 arguments and the call. */
3864 return 4;
3865 else
3866 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3869 /* Returns cost of operation CODE, according to WEIGHTS */
3871 static int
3872 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3873 tree op1 ATTRIBUTE_UNUSED, tree op2)
3875 switch (code)
3877 /* These are "free" conversions, or their presumed cost
3878 is folded into other operations. */
3879 case RANGE_EXPR:
3880 CASE_CONVERT:
3881 case COMPLEX_EXPR:
3882 case PAREN_EXPR:
3883 case VIEW_CONVERT_EXPR:
3884 return 0;
3886 /* Assign cost of 1 to usual operations.
3887 ??? We may consider mapping RTL costs to this. */
3888 case COND_EXPR:
3889 case VEC_COND_EXPR:
3890 case VEC_PERM_EXPR:
3892 case PLUS_EXPR:
3893 case POINTER_PLUS_EXPR:
3894 case MINUS_EXPR:
3895 case MULT_EXPR:
3896 case MULT_HIGHPART_EXPR:
3897 case FMA_EXPR:
3899 case ADDR_SPACE_CONVERT_EXPR:
3900 case FIXED_CONVERT_EXPR:
3901 case FIX_TRUNC_EXPR:
3903 case NEGATE_EXPR:
3904 case FLOAT_EXPR:
3905 case MIN_EXPR:
3906 case MAX_EXPR:
3907 case ABS_EXPR:
3909 case LSHIFT_EXPR:
3910 case RSHIFT_EXPR:
3911 case LROTATE_EXPR:
3912 case RROTATE_EXPR:
3914 case BIT_IOR_EXPR:
3915 case BIT_XOR_EXPR:
3916 case BIT_AND_EXPR:
3917 case BIT_NOT_EXPR:
3919 case TRUTH_ANDIF_EXPR:
3920 case TRUTH_ORIF_EXPR:
3921 case TRUTH_AND_EXPR:
3922 case TRUTH_OR_EXPR:
3923 case TRUTH_XOR_EXPR:
3924 case TRUTH_NOT_EXPR:
3926 case LT_EXPR:
3927 case LE_EXPR:
3928 case GT_EXPR:
3929 case GE_EXPR:
3930 case EQ_EXPR:
3931 case NE_EXPR:
3932 case ORDERED_EXPR:
3933 case UNORDERED_EXPR:
3935 case UNLT_EXPR:
3936 case UNLE_EXPR:
3937 case UNGT_EXPR:
3938 case UNGE_EXPR:
3939 case UNEQ_EXPR:
3940 case LTGT_EXPR:
3942 case CONJ_EXPR:
3944 case PREDECREMENT_EXPR:
3945 case PREINCREMENT_EXPR:
3946 case POSTDECREMENT_EXPR:
3947 case POSTINCREMENT_EXPR:
3949 case REALIGN_LOAD_EXPR:
3951 case REDUC_MAX_EXPR:
3952 case REDUC_MIN_EXPR:
3953 case REDUC_PLUS_EXPR:
3954 case WIDEN_SUM_EXPR:
3955 case WIDEN_MULT_EXPR:
3956 case DOT_PROD_EXPR:
3957 case SAD_EXPR:
3958 case WIDEN_MULT_PLUS_EXPR:
3959 case WIDEN_MULT_MINUS_EXPR:
3960 case WIDEN_LSHIFT_EXPR:
3962 case VEC_WIDEN_MULT_HI_EXPR:
3963 case VEC_WIDEN_MULT_LO_EXPR:
3964 case VEC_WIDEN_MULT_EVEN_EXPR:
3965 case VEC_WIDEN_MULT_ODD_EXPR:
3966 case VEC_UNPACK_HI_EXPR:
3967 case VEC_UNPACK_LO_EXPR:
3968 case VEC_UNPACK_FLOAT_HI_EXPR:
3969 case VEC_UNPACK_FLOAT_LO_EXPR:
3970 case VEC_PACK_TRUNC_EXPR:
3971 case VEC_PACK_SAT_EXPR:
3972 case VEC_PACK_FIX_TRUNC_EXPR:
3973 case VEC_WIDEN_LSHIFT_HI_EXPR:
3974 case VEC_WIDEN_LSHIFT_LO_EXPR:
3976 return 1;
3978 /* Few special cases of expensive operations. This is useful
3979 to avoid inlining on functions having too many of these. */
3980 case TRUNC_DIV_EXPR:
3981 case CEIL_DIV_EXPR:
3982 case FLOOR_DIV_EXPR:
3983 case ROUND_DIV_EXPR:
3984 case EXACT_DIV_EXPR:
3985 case TRUNC_MOD_EXPR:
3986 case CEIL_MOD_EXPR:
3987 case FLOOR_MOD_EXPR:
3988 case ROUND_MOD_EXPR:
3989 case RDIV_EXPR:
3990 if (TREE_CODE (op2) != INTEGER_CST)
3991 return weights->div_mod_cost;
3992 return 1;
3994 default:
3995 /* We expect a copy assignment with no operator. */
3996 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3997 return 0;
4002 /* Estimate number of instructions that will be created by expanding
4003 the statements in the statement sequence STMTS.
4004 WEIGHTS contains weights attributed to various constructs. */
4006 static
4007 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4009 int cost;
4010 gimple_stmt_iterator gsi;
4012 cost = 0;
4013 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4014 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4016 return cost;
4020 /* Estimate number of instructions that will be created by expanding STMT.
4021 WEIGHTS contains weights attributed to various constructs. */
4024 estimate_num_insns (gimple stmt, eni_weights *weights)
4026 unsigned cost, i;
4027 enum gimple_code code = gimple_code (stmt);
4028 tree lhs;
4029 tree rhs;
4031 switch (code)
4033 case GIMPLE_ASSIGN:
4034 /* Try to estimate the cost of assignments. We have three cases to
4035 deal with:
4036 1) Simple assignments to registers;
4037 2) Stores to things that must live in memory. This includes
4038 "normal" stores to scalars, but also assignments of large
4039 structures, or constructors of big arrays;
4041 Let us look at the first two cases, assuming we have "a = b + C":
4042 <GIMPLE_ASSIGN <var_decl "a">
4043 <plus_expr <var_decl "b"> <constant C>>
4044 If "a" is a GIMPLE register, the assignment to it is free on almost
4045 any target, because "a" usually ends up in a real register. Hence
4046 the only cost of this expression comes from the PLUS_EXPR, and we
4047 can ignore the GIMPLE_ASSIGN.
4048 If "a" is not a GIMPLE register, the assignment to "a" will most
4049 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4050 of moving something into "a", which we compute using the function
4051 estimate_move_cost. */
4052 if (gimple_clobber_p (stmt))
4053 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4055 lhs = gimple_assign_lhs (stmt);
4056 rhs = gimple_assign_rhs1 (stmt);
4058 cost = 0;
4060 /* Account for the cost of moving to / from memory. */
4061 if (gimple_store_p (stmt))
4062 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4063 if (gimple_assign_load_p (stmt))
4064 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4066 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4067 gimple_assign_rhs1 (stmt),
4068 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4069 == GIMPLE_BINARY_RHS
4070 ? gimple_assign_rhs2 (stmt) : NULL);
4071 break;
4073 case GIMPLE_COND:
4074 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4075 gimple_op (stmt, 0),
4076 gimple_op (stmt, 1));
4077 break;
4079 case GIMPLE_SWITCH:
4081 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4082 /* Take into account cost of the switch + guess 2 conditional jumps for
4083 each case label.
4085 TODO: once the switch expansion logic is sufficiently separated, we can
4086 do better job on estimating cost of the switch. */
4087 if (weights->time_based)
4088 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4089 else
4090 cost = gimple_switch_num_labels (switch_stmt) * 2;
4092 break;
4094 case GIMPLE_CALL:
4096 tree decl;
4098 if (gimple_call_internal_p (stmt))
4099 return 0;
4100 else if ((decl = gimple_call_fndecl (stmt))
4101 && DECL_BUILT_IN (decl))
4103 /* Do not special case builtins where we see the body.
4104 This just confuse inliner. */
4105 struct cgraph_node *node;
4106 if (!(node = cgraph_node::get (decl))
4107 || node->definition)
4109 /* For buitins that are likely expanded to nothing or
4110 inlined do not account operand costs. */
4111 else if (is_simple_builtin (decl))
4112 return 0;
4113 else if (is_inexpensive_builtin (decl))
4114 return weights->target_builtin_call_cost;
4115 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4117 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4118 specialize the cheap expansion we do here.
4119 ??? This asks for a more general solution. */
4120 switch (DECL_FUNCTION_CODE (decl))
4122 case BUILT_IN_POW:
4123 case BUILT_IN_POWF:
4124 case BUILT_IN_POWL:
4125 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4126 && REAL_VALUES_EQUAL
4127 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
4128 return estimate_operator_cost
4129 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4130 gimple_call_arg (stmt, 0));
4131 break;
4133 default:
4134 break;
4139 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4140 if (gimple_call_lhs (stmt))
4141 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4142 weights->time_based);
4143 for (i = 0; i < gimple_call_num_args (stmt); i++)
4145 tree arg = gimple_call_arg (stmt, i);
4146 cost += estimate_move_cost (TREE_TYPE (arg),
4147 weights->time_based);
4149 break;
4152 case GIMPLE_RETURN:
4153 return weights->return_cost;
4155 case GIMPLE_GOTO:
4156 case GIMPLE_LABEL:
4157 case GIMPLE_NOP:
4158 case GIMPLE_PHI:
4159 case GIMPLE_PREDICT:
4160 case GIMPLE_DEBUG:
4161 return 0;
4163 case GIMPLE_ASM:
4165 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4166 /* 1000 means infinity. This avoids overflows later
4167 with very long asm statements. */
4168 if (count > 1000)
4169 count = 1000;
4170 return count;
4173 case GIMPLE_RESX:
4174 /* This is either going to be an external function call with one
4175 argument, or two register copy statements plus a goto. */
4176 return 2;
4178 case GIMPLE_EH_DISPATCH:
4179 /* ??? This is going to turn into a switch statement. Ideally
4180 we'd have a look at the eh region and estimate the number of
4181 edges involved. */
4182 return 10;
4184 case GIMPLE_BIND:
4185 return estimate_num_insns_seq (
4186 gimple_bind_body (as_a <gbind *> (stmt)),
4187 weights);
4189 case GIMPLE_EH_FILTER:
4190 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4192 case GIMPLE_CATCH:
4193 return estimate_num_insns_seq (gimple_catch_handler (
4194 as_a <gcatch *> (stmt)),
4195 weights);
4197 case GIMPLE_TRY:
4198 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4199 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4201 /* OMP directives are generally very expensive. */
4203 case GIMPLE_OMP_RETURN:
4204 case GIMPLE_OMP_SECTIONS_SWITCH:
4205 case GIMPLE_OMP_ATOMIC_STORE:
4206 case GIMPLE_OMP_CONTINUE:
4207 /* ...except these, which are cheap. */
4208 return 0;
4210 case GIMPLE_OMP_ATOMIC_LOAD:
4211 return weights->omp_cost;
4213 case GIMPLE_OMP_FOR:
4214 return (weights->omp_cost
4215 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4216 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4218 case GIMPLE_OMP_PARALLEL:
4219 case GIMPLE_OMP_TASK:
4220 case GIMPLE_OMP_CRITICAL:
4221 case GIMPLE_OMP_MASTER:
4222 case GIMPLE_OMP_TASKGROUP:
4223 case GIMPLE_OMP_ORDERED:
4224 case GIMPLE_OMP_SECTION:
4225 case GIMPLE_OMP_SECTIONS:
4226 case GIMPLE_OMP_SINGLE:
4227 case GIMPLE_OMP_TARGET:
4228 case GIMPLE_OMP_TEAMS:
4229 return (weights->omp_cost
4230 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4232 case GIMPLE_TRANSACTION:
4233 return (weights->tm_cost
4234 + estimate_num_insns_seq (gimple_transaction_body (
4235 as_a <gtransaction *> (stmt)),
4236 weights));
4238 default:
4239 gcc_unreachable ();
4242 return cost;
4245 /* Estimate number of instructions that will be created by expanding
4246 function FNDECL. WEIGHTS contains weights attributed to various
4247 constructs. */
4250 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4252 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4253 gimple_stmt_iterator bsi;
4254 basic_block bb;
4255 int n = 0;
4257 gcc_assert (my_function && my_function->cfg);
4258 FOR_EACH_BB_FN (bb, my_function)
4260 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4261 n += estimate_num_insns (gsi_stmt (bsi), weights);
4264 return n;
4268 /* Initializes weights used by estimate_num_insns. */
4270 void
4271 init_inline_once (void)
4273 eni_size_weights.call_cost = 1;
4274 eni_size_weights.indirect_call_cost = 3;
4275 eni_size_weights.target_builtin_call_cost = 1;
4276 eni_size_weights.div_mod_cost = 1;
4277 eni_size_weights.omp_cost = 40;
4278 eni_size_weights.tm_cost = 10;
4279 eni_size_weights.time_based = false;
4280 eni_size_weights.return_cost = 1;
4282 /* Estimating time for call is difficult, since we have no idea what the
4283 called function does. In the current uses of eni_time_weights,
4284 underestimating the cost does less harm than overestimating it, so
4285 we choose a rather small value here. */
4286 eni_time_weights.call_cost = 10;
4287 eni_time_weights.indirect_call_cost = 15;
4288 eni_time_weights.target_builtin_call_cost = 1;
4289 eni_time_weights.div_mod_cost = 10;
4290 eni_time_weights.omp_cost = 40;
4291 eni_time_weights.tm_cost = 40;
4292 eni_time_weights.time_based = true;
4293 eni_time_weights.return_cost = 2;
4296 /* Estimate the number of instructions in a gimple_seq. */
4299 count_insns_seq (gimple_seq seq, eni_weights *weights)
4301 gimple_stmt_iterator gsi;
4302 int n = 0;
4303 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
4304 n += estimate_num_insns (gsi_stmt (gsi), weights);
4306 return n;
4310 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4312 static void
4313 prepend_lexical_block (tree current_block, tree new_block)
4315 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4316 BLOCK_SUBBLOCKS (current_block) = new_block;
4317 BLOCK_SUPERCONTEXT (new_block) = current_block;
4320 /* Add local variables from CALLEE to CALLER. */
4322 static inline void
4323 add_local_variables (struct function *callee, struct function *caller,
4324 copy_body_data *id)
4326 tree var;
4327 unsigned ix;
4329 FOR_EACH_LOCAL_DECL (callee, ix, var)
4330 if (!can_be_nonlocal (var, id))
4332 tree new_var = remap_decl (var, id);
4334 /* Remap debug-expressions. */
4335 if (TREE_CODE (new_var) == VAR_DECL
4336 && DECL_HAS_DEBUG_EXPR_P (var)
4337 && new_var != var)
4339 tree tem = DECL_DEBUG_EXPR (var);
4340 bool old_regimplify = id->regimplify;
4341 id->remapping_type_depth++;
4342 walk_tree (&tem, copy_tree_body_r, id, NULL);
4343 id->remapping_type_depth--;
4344 id->regimplify = old_regimplify;
4345 SET_DECL_DEBUG_EXPR (new_var, tem);
4346 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4348 add_local_decl (caller, new_var);
4352 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4353 have brought in or introduced any debug stmts for SRCVAR. */
4355 static inline void
4356 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4358 tree *remappedvarp = id->decl_map->get (srcvar);
4360 if (!remappedvarp)
4361 return;
4363 if (TREE_CODE (*remappedvarp) != VAR_DECL)
4364 return;
4366 if (*remappedvarp == id->retvar || *remappedvarp == id->retbnd)
4367 return;
4369 tree tvar = target_for_debug_bind (*remappedvarp);
4370 if (!tvar)
4371 return;
4373 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4374 id->call_stmt);
4375 gimple_seq_add_stmt (bindings, stmt);
4378 /* For each inlined variable for which we may have debug bind stmts,
4379 add before GSI a final debug stmt resetting it, marking the end of
4380 its life, so that var-tracking knows it doesn't have to compute
4381 further locations for it. */
4383 static inline void
4384 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4386 tree var;
4387 unsigned ix;
4388 gimple_seq bindings = NULL;
4390 if (!gimple_in_ssa_p (id->src_cfun))
4391 return;
4393 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4394 return;
4396 for (var = DECL_ARGUMENTS (id->src_fn);
4397 var; var = DECL_CHAIN (var))
4398 reset_debug_binding (id, var, &bindings);
4400 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4401 reset_debug_binding (id, var, &bindings);
4403 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4406 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4408 static bool
4409 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
4411 tree use_retvar;
4412 tree fn;
4413 hash_map<tree, tree> *dst;
4414 hash_map<tree, tree> *st = NULL;
4415 tree return_slot;
4416 tree modify_dest;
4417 tree return_bounds = NULL;
4418 location_t saved_location;
4419 struct cgraph_edge *cg_edge;
4420 cgraph_inline_failed_t reason;
4421 basic_block return_block;
4422 edge e;
4423 gimple_stmt_iterator gsi, stmt_gsi;
4424 bool successfully_inlined = FALSE;
4425 bool purge_dead_abnormal_edges;
4426 gcall *call_stmt;
4427 unsigned int i;
4429 /* Set input_location here so we get the right instantiation context
4430 if we call instantiate_decl from inlinable_function_p. */
4431 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4432 saved_location = input_location;
4433 input_location = gimple_location (stmt);
4435 /* From here on, we're only interested in CALL_EXPRs. */
4436 call_stmt = dyn_cast <gcall *> (stmt);
4437 if (!call_stmt)
4438 goto egress;
4440 cg_edge = id->dst_node->get_edge (stmt);
4441 gcc_checking_assert (cg_edge);
4442 /* First, see if we can figure out what function is being called.
4443 If we cannot, then there is no hope of inlining the function. */
4444 if (cg_edge->indirect_unknown_callee)
4445 goto egress;
4446 fn = cg_edge->callee->decl;
4447 gcc_checking_assert (fn);
4449 /* If FN is a declaration of a function in a nested scope that was
4450 globally declared inline, we don't set its DECL_INITIAL.
4451 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4452 C++ front-end uses it for cdtors to refer to their internal
4453 declarations, that are not real functions. Fortunately those
4454 don't have trees to be saved, so we can tell by checking their
4455 gimple_body. */
4456 if (!DECL_INITIAL (fn)
4457 && DECL_ABSTRACT_ORIGIN (fn)
4458 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4459 fn = DECL_ABSTRACT_ORIGIN (fn);
4461 /* Don't try to inline functions that are not well-suited to inlining. */
4462 if (cg_edge->inline_failed)
4464 reason = cg_edge->inline_failed;
4465 /* If this call was originally indirect, we do not want to emit any
4466 inlining related warnings or sorry messages because there are no
4467 guarantees regarding those. */
4468 if (cg_edge->indirect_inlining_edge)
4469 goto egress;
4471 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4472 /* For extern inline functions that get redefined we always
4473 silently ignored always_inline flag. Better behaviour would
4474 be to be able to keep both bodies and use extern inline body
4475 for inlining, but we can't do that because frontends overwrite
4476 the body. */
4477 && !cg_edge->callee->local.redefined_extern_inline
4478 /* During early inline pass, report only when optimization is
4479 not turned on. */
4480 && (symtab->global_info_ready
4481 || !optimize
4482 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4483 /* PR 20090218-1_0.c. Body can be provided by another module. */
4484 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4486 error ("inlining failed in call to always_inline %q+F: %s", fn,
4487 cgraph_inline_failed_string (reason));
4488 error ("called from here");
4490 else if (warn_inline
4491 && DECL_DECLARED_INLINE_P (fn)
4492 && !DECL_NO_INLINE_WARNING_P (fn)
4493 && !DECL_IN_SYSTEM_HEADER (fn)
4494 && reason != CIF_UNSPECIFIED
4495 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4496 /* Do not warn about not inlined recursive calls. */
4497 && !cg_edge->recursive_p ()
4498 /* Avoid warnings during early inline pass. */
4499 && symtab->global_info_ready)
4501 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4502 fn, _(cgraph_inline_failed_string (reason)));
4503 warning (OPT_Winline, "called from here");
4505 goto egress;
4507 fn = cg_edge->callee->decl;
4508 cg_edge->callee->get_untransformed_body ();
4510 #ifdef ENABLE_CHECKING
4511 if (cg_edge->callee->decl != id->dst_node->decl)
4512 cg_edge->callee->verify ();
4513 #endif
4515 /* We will be inlining this callee. */
4516 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4517 id->assign_stmts.create (0);
4519 /* Update the callers EH personality. */
4520 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4521 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4522 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4524 /* Split the block holding the GIMPLE_CALL. */
4525 e = split_block (bb, stmt);
4526 bb = e->src;
4527 return_block = e->dest;
4528 remove_edge (e);
4530 /* split_block splits after the statement; work around this by
4531 moving the call into the second block manually. Not pretty,
4532 but seems easier than doing the CFG manipulation by hand
4533 when the GIMPLE_CALL is in the last statement of BB. */
4534 stmt_gsi = gsi_last_bb (bb);
4535 gsi_remove (&stmt_gsi, false);
4537 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4538 been the source of abnormal edges. In this case, schedule
4539 the removal of dead abnormal edges. */
4540 gsi = gsi_start_bb (return_block);
4541 if (gsi_end_p (gsi))
4543 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4544 purge_dead_abnormal_edges = true;
4546 else
4548 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4549 purge_dead_abnormal_edges = false;
4552 stmt_gsi = gsi_start_bb (return_block);
4554 /* Build a block containing code to initialize the arguments, the
4555 actual inline expansion of the body, and a label for the return
4556 statements within the function to jump to. The type of the
4557 statement expression is the return type of the function call.
4558 ??? If the call does not have an associated block then we will
4559 remap all callee blocks to NULL, effectively dropping most of
4560 its debug information. This should only happen for calls to
4561 artificial decls inserted by the compiler itself. We need to
4562 either link the inlined blocks into the caller block tree or
4563 not refer to them in any way to not break GC for locations. */
4564 if (gimple_block (stmt))
4566 id->block = make_node (BLOCK);
4567 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4568 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4569 prepend_lexical_block (gimple_block (stmt), id->block);
4572 /* Local declarations will be replaced by their equivalents in this
4573 map. */
4574 st = id->decl_map;
4575 id->decl_map = new hash_map<tree, tree>;
4576 dst = id->debug_map;
4577 id->debug_map = NULL;
4579 /* Record the function we are about to inline. */
4580 id->src_fn = fn;
4581 id->src_node = cg_edge->callee;
4582 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4583 id->call_stmt = stmt;
4585 gcc_assert (!id->src_cfun->after_inlining);
4587 id->entry_bb = bb;
4588 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4590 gimple_stmt_iterator si = gsi_last_bb (bb);
4591 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4592 NOT_TAKEN),
4593 GSI_NEW_STMT);
4595 initialize_inlined_parameters (id, stmt, fn, bb);
4597 if (DECL_INITIAL (fn))
4599 if (gimple_block (stmt))
4601 tree *var;
4603 prepend_lexical_block (id->block,
4604 remap_blocks (DECL_INITIAL (fn), id));
4605 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4606 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4607 == NULL_TREE));
4608 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4609 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4610 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4611 under it. The parameters can be then evaluated in the debugger,
4612 but don't show in backtraces. */
4613 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4614 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4616 tree v = *var;
4617 *var = TREE_CHAIN (v);
4618 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4619 BLOCK_VARS (id->block) = v;
4621 else
4622 var = &TREE_CHAIN (*var);
4624 else
4625 remap_blocks_to_null (DECL_INITIAL (fn), id);
4628 /* Return statements in the function body will be replaced by jumps
4629 to the RET_LABEL. */
4630 gcc_assert (DECL_INITIAL (fn));
4631 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4633 /* Find the LHS to which the result of this call is assigned. */
4634 return_slot = NULL;
4635 if (gimple_call_lhs (stmt))
4637 modify_dest = gimple_call_lhs (stmt);
4639 /* Remember where to copy returned bounds. */
4640 if (gimple_call_with_bounds_p (stmt)
4641 && TREE_CODE (modify_dest) == SSA_NAME)
4643 gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
4644 if (retbnd)
4646 return_bounds = gimple_call_lhs (retbnd);
4647 /* If returned bounds are not used then just
4648 remove unused call. */
4649 if (!return_bounds)
4651 gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
4652 gsi_remove (&iter, true);
4657 /* The function which we are inlining might not return a value,
4658 in which case we should issue a warning that the function
4659 does not return a value. In that case the optimizers will
4660 see that the variable to which the value is assigned was not
4661 initialized. We do not want to issue a warning about that
4662 uninitialized variable. */
4663 if (DECL_P (modify_dest))
4664 TREE_NO_WARNING (modify_dest) = 1;
4666 if (gimple_call_return_slot_opt_p (call_stmt))
4668 return_slot = modify_dest;
4669 modify_dest = NULL;
4672 else
4673 modify_dest = NULL;
4675 /* If we are inlining a call to the C++ operator new, we don't want
4676 to use type based alias analysis on the return value. Otherwise
4677 we may get confused if the compiler sees that the inlined new
4678 function returns a pointer which was just deleted. See bug
4679 33407. */
4680 if (DECL_IS_OPERATOR_NEW (fn))
4682 return_slot = NULL;
4683 modify_dest = NULL;
4686 /* Declare the return variable for the function. */
4687 use_retvar = declare_return_variable (id, return_slot, modify_dest,
4688 return_bounds, bb);
4690 /* Add local vars in this inlined callee to caller. */
4691 add_local_variables (id->src_cfun, cfun, id);
4693 if (dump_file && (dump_flags & TDF_DETAILS))
4695 fprintf (dump_file, "Inlining ");
4696 print_generic_expr (dump_file, id->src_fn, 0);
4697 fprintf (dump_file, " to ");
4698 print_generic_expr (dump_file, id->dst_fn, 0);
4699 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4702 /* This is it. Duplicate the callee body. Assume callee is
4703 pre-gimplified. Note that we must not alter the caller
4704 function in any way before this point, as this CALL_EXPR may be
4705 a self-referential call; if we're calling ourselves, we need to
4706 duplicate our body before altering anything. */
4707 copy_body (id, cg_edge->callee->count,
4708 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4709 bb, return_block, NULL);
4711 reset_debug_bindings (id, stmt_gsi);
4713 /* Reset the escaped solution. */
4714 if (cfun->gimple_df)
4715 pt_solution_reset (&cfun->gimple_df->escaped);
4717 /* Clean up. */
4718 if (id->debug_map)
4720 delete id->debug_map;
4721 id->debug_map = dst;
4723 delete id->decl_map;
4724 id->decl_map = st;
4726 /* Unlink the calls virtual operands before replacing it. */
4727 unlink_stmt_vdef (stmt);
4728 if (gimple_vdef (stmt)
4729 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4730 release_ssa_name (gimple_vdef (stmt));
4732 /* If the inlined function returns a result that we care about,
4733 substitute the GIMPLE_CALL with an assignment of the return
4734 variable to the LHS of the call. That is, if STMT was
4735 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4736 if (use_retvar && gimple_call_lhs (stmt))
4738 gimple old_stmt = stmt;
4739 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4740 gsi_replace (&stmt_gsi, stmt, false);
4741 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4743 /* Copy bounds if we copy structure with bounds. */
4744 if (chkp_function_instrumented_p (id->dst_fn)
4745 && !BOUNDED_P (use_retvar)
4746 && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
4747 id->assign_stmts.safe_push (stmt);
4749 else
4751 /* Handle the case of inlining a function with no return
4752 statement, which causes the return value to become undefined. */
4753 if (gimple_call_lhs (stmt)
4754 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4756 tree name = gimple_call_lhs (stmt);
4757 tree var = SSA_NAME_VAR (name);
4758 tree def = ssa_default_def (cfun, var);
4760 if (def)
4762 /* If the variable is used undefined, make this name
4763 undefined via a move. */
4764 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4765 gsi_replace (&stmt_gsi, stmt, true);
4767 else
4769 /* Otherwise make this variable undefined. */
4770 gsi_remove (&stmt_gsi, true);
4771 set_ssa_default_def (cfun, var, name);
4772 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4775 else
4776 gsi_remove (&stmt_gsi, true);
4779 /* Put returned bounds into the correct place if required. */
4780 if (return_bounds)
4782 gimple old_stmt = SSA_NAME_DEF_STMT (return_bounds);
4783 gimple new_stmt = gimple_build_assign (return_bounds, id->retbnd);
4784 gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
4785 unlink_stmt_vdef (old_stmt);
4786 gsi_replace (&bnd_gsi, new_stmt, false);
4787 maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
4788 cgraph_update_edges_for_call_stmt (old_stmt,
4789 gimple_call_fndecl (old_stmt),
4790 new_stmt);
4793 if (purge_dead_abnormal_edges)
4795 gimple_purge_dead_eh_edges (return_block);
4796 gimple_purge_dead_abnormal_call_edges (return_block);
4799 /* If the value of the new expression is ignored, that's OK. We
4800 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4801 the equivalent inlined version either. */
4802 if (is_gimple_assign (stmt))
4804 gcc_assert (gimple_assign_single_p (stmt)
4805 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4806 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4809 /* Copy bounds for all generated assigns that need it. */
4810 for (i = 0; i < id->assign_stmts.length (); i++)
4811 chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
4812 id->assign_stmts.release ();
4814 /* Output the inlining info for this abstract function, since it has been
4815 inlined. If we don't do this now, we can lose the information about the
4816 variables in the function when the blocks get blown away as soon as we
4817 remove the cgraph node. */
4818 if (gimple_block (stmt))
4819 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4821 /* Update callgraph if needed. */
4822 cg_edge->callee->remove ();
4824 id->block = NULL_TREE;
4825 successfully_inlined = TRUE;
4827 egress:
4828 input_location = saved_location;
4829 return successfully_inlined;
4832 /* Expand call statements reachable from STMT_P.
4833 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4834 in a MODIFY_EXPR. */
4836 static bool
4837 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4839 gimple_stmt_iterator gsi;
4840 bool inlined = false;
4842 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4844 gimple stmt = gsi_stmt (gsi);
4845 gsi_prev (&gsi);
4847 if (is_gimple_call (stmt)
4848 && !gimple_call_internal_p (stmt))
4849 inlined |= expand_call_inline (bb, stmt, id);
4852 return inlined;
4856 /* Walk all basic blocks created after FIRST and try to fold every statement
4857 in the STATEMENTS pointer set. */
4859 static void
4860 fold_marked_statements (int first, hash_set<gimple> *statements)
4862 for (; first < n_basic_blocks_for_fn (cfun); first++)
4863 if (BASIC_BLOCK_FOR_FN (cfun, first))
4865 gimple_stmt_iterator gsi;
4867 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4868 !gsi_end_p (gsi);
4869 gsi_next (&gsi))
4870 if (statements->contains (gsi_stmt (gsi)))
4872 gimple old_stmt = gsi_stmt (gsi);
4873 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4875 if (old_decl && DECL_BUILT_IN (old_decl))
4877 /* Folding builtins can create multiple instructions,
4878 we need to look at all of them. */
4879 gimple_stmt_iterator i2 = gsi;
4880 gsi_prev (&i2);
4881 if (fold_stmt (&gsi))
4883 gimple new_stmt;
4884 /* If a builtin at the end of a bb folded into nothing,
4885 the following loop won't work. */
4886 if (gsi_end_p (gsi))
4888 cgraph_update_edges_for_call_stmt (old_stmt,
4889 old_decl, NULL);
4890 break;
4892 if (gsi_end_p (i2))
4893 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4894 else
4895 gsi_next (&i2);
4896 while (1)
4898 new_stmt = gsi_stmt (i2);
4899 update_stmt (new_stmt);
4900 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4901 new_stmt);
4903 if (new_stmt == gsi_stmt (gsi))
4905 /* It is okay to check only for the very last
4906 of these statements. If it is a throwing
4907 statement nothing will change. If it isn't
4908 this can remove EH edges. If that weren't
4909 correct then because some intermediate stmts
4910 throw, but not the last one. That would mean
4911 we'd have to split the block, which we can't
4912 here and we'd loose anyway. And as builtins
4913 probably never throw, this all
4914 is mood anyway. */
4915 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4916 new_stmt))
4917 gimple_purge_dead_eh_edges (
4918 BASIC_BLOCK_FOR_FN (cfun, first));
4919 break;
4921 gsi_next (&i2);
4925 else if (fold_stmt (&gsi))
4927 /* Re-read the statement from GSI as fold_stmt() may
4928 have changed it. */
4929 gimple new_stmt = gsi_stmt (gsi);
4930 update_stmt (new_stmt);
4932 if (is_gimple_call (old_stmt)
4933 || is_gimple_call (new_stmt))
4934 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4935 new_stmt);
4937 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4938 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4939 first));
4945 /* Expand calls to inline functions in the body of FN. */
4947 unsigned int
4948 optimize_inline_calls (tree fn)
4950 copy_body_data id;
4951 basic_block bb;
4952 int last = n_basic_blocks_for_fn (cfun);
4953 bool inlined_p = false;
4955 /* Clear out ID. */
4956 memset (&id, 0, sizeof (id));
4958 id.src_node = id.dst_node = cgraph_node::get (fn);
4959 gcc_assert (id.dst_node->definition);
4960 id.dst_fn = fn;
4961 /* Or any functions that aren't finished yet. */
4962 if (current_function_decl)
4963 id.dst_fn = current_function_decl;
4965 id.copy_decl = copy_decl_maybe_to_var;
4966 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4967 id.transform_new_cfg = false;
4968 id.transform_return_to_modify = true;
4969 id.transform_parameter = true;
4970 id.transform_lang_insert_block = NULL;
4971 id.statements_to_fold = new hash_set<gimple>;
4973 push_gimplify_context ();
4975 /* We make no attempts to keep dominance info up-to-date. */
4976 free_dominance_info (CDI_DOMINATORS);
4977 free_dominance_info (CDI_POST_DOMINATORS);
4979 /* Register specific gimple functions. */
4980 gimple_register_cfg_hooks ();
4982 /* Reach the trees by walking over the CFG, and note the
4983 enclosing basic-blocks in the call edges. */
4984 /* We walk the blocks going forward, because inlined function bodies
4985 will split id->current_basic_block, and the new blocks will
4986 follow it; we'll trudge through them, processing their CALL_EXPRs
4987 along the way. */
4988 FOR_EACH_BB_FN (bb, cfun)
4989 inlined_p |= gimple_expand_calls_inline (bb, &id);
4991 pop_gimplify_context (NULL);
4993 #ifdef ENABLE_CHECKING
4995 struct cgraph_edge *e;
4997 id.dst_node->verify ();
4999 /* Double check that we inlined everything we are supposed to inline. */
5000 for (e = id.dst_node->callees; e; e = e->next_callee)
5001 gcc_assert (e->inline_failed);
5003 #endif
5005 /* Fold queued statements. */
5006 fold_marked_statements (last, id.statements_to_fold);
5007 delete id.statements_to_fold;
5009 gcc_assert (!id.debug_stmts.exists ());
5011 /* If we didn't inline into the function there is nothing to do. */
5012 if (!inlined_p)
5013 return 0;
5015 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5016 number_blocks (fn);
5018 delete_unreachable_blocks_update_callgraph (&id);
5019 #ifdef ENABLE_CHECKING
5020 id.dst_node->verify ();
5021 #endif
5023 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5024 not possible yet - the IPA passes might make various functions to not
5025 throw and they don't care to proactively update local EH info. This is
5026 done later in fixup_cfg pass that also execute the verification. */
5027 return (TODO_update_ssa
5028 | TODO_cleanup_cfg
5029 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5030 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5031 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5032 ? TODO_rebuild_frequencies : 0));
5035 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5037 tree
5038 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5040 enum tree_code code = TREE_CODE (*tp);
5041 enum tree_code_class cl = TREE_CODE_CLASS (code);
5043 /* We make copies of most nodes. */
5044 if (IS_EXPR_CODE_CLASS (cl)
5045 || code == TREE_LIST
5046 || code == TREE_VEC
5047 || code == TYPE_DECL
5048 || code == OMP_CLAUSE)
5050 /* Because the chain gets clobbered when we make a copy, we save it
5051 here. */
5052 tree chain = NULL_TREE, new_tree;
5054 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5055 chain = TREE_CHAIN (*tp);
5057 /* Copy the node. */
5058 new_tree = copy_node (*tp);
5060 *tp = new_tree;
5062 /* Now, restore the chain, if appropriate. That will cause
5063 walk_tree to walk into the chain as well. */
5064 if (code == PARM_DECL
5065 || code == TREE_LIST
5066 || code == OMP_CLAUSE)
5067 TREE_CHAIN (*tp) = chain;
5069 /* For now, we don't update BLOCKs when we make copies. So, we
5070 have to nullify all BIND_EXPRs. */
5071 if (TREE_CODE (*tp) == BIND_EXPR)
5072 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5074 else if (code == CONSTRUCTOR)
5076 /* CONSTRUCTOR nodes need special handling because
5077 we need to duplicate the vector of elements. */
5078 tree new_tree;
5080 new_tree = copy_node (*tp);
5081 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5082 *tp = new_tree;
5084 else if (code == STATEMENT_LIST)
5085 /* We used to just abort on STATEMENT_LIST, but we can run into them
5086 with statement-expressions (c++/40975). */
5087 copy_statement_list (tp);
5088 else if (TREE_CODE_CLASS (code) == tcc_type)
5089 *walk_subtrees = 0;
5090 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5091 *walk_subtrees = 0;
5092 else if (TREE_CODE_CLASS (code) == tcc_constant)
5093 *walk_subtrees = 0;
5094 return NULL_TREE;
5097 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5098 information indicating to what new SAVE_EXPR this one should be mapped,
5099 use that one. Otherwise, create a new node and enter it in ST. FN is
5100 the function into which the copy will be placed. */
5102 static void
5103 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5105 tree *n;
5106 tree t;
5108 /* See if we already encountered this SAVE_EXPR. */
5109 n = st->get (*tp);
5111 /* If we didn't already remap this SAVE_EXPR, do so now. */
5112 if (!n)
5114 t = copy_node (*tp);
5116 /* Remember this SAVE_EXPR. */
5117 st->put (*tp, t);
5118 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5119 st->put (t, t);
5121 else
5123 /* We've already walked into this SAVE_EXPR; don't do it again. */
5124 *walk_subtrees = 0;
5125 t = *n;
5128 /* Replace this SAVE_EXPR with the copy. */
5129 *tp = t;
5132 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5133 label, copies the declaration and enters it in the splay_tree in DATA (which
5134 is really a 'copy_body_data *'. */
5136 static tree
5137 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5138 bool *handled_ops_p ATTRIBUTE_UNUSED,
5139 struct walk_stmt_info *wi)
5141 copy_body_data *id = (copy_body_data *) wi->info;
5142 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5144 if (stmt)
5146 tree decl = gimple_label_label (stmt);
5148 /* Copy the decl and remember the copy. */
5149 insert_decl_map (id, decl, id->copy_decl (decl, id));
5152 return NULL_TREE;
5156 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5157 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5158 remaps all local declarations to appropriate replacements in gimple
5159 operands. */
5161 static tree
5162 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5164 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5165 copy_body_data *id = (copy_body_data *) wi->info;
5166 hash_map<tree, tree> *st = id->decl_map;
5167 tree *n;
5168 tree expr = *tp;
5170 /* Only a local declaration (variable or label). */
5171 if ((TREE_CODE (expr) == VAR_DECL
5172 && !TREE_STATIC (expr))
5173 || TREE_CODE (expr) == LABEL_DECL)
5175 /* Lookup the declaration. */
5176 n = st->get (expr);
5178 /* If it's there, remap it. */
5179 if (n)
5180 *tp = *n;
5181 *walk_subtrees = 0;
5183 else if (TREE_CODE (expr) == STATEMENT_LIST
5184 || TREE_CODE (expr) == BIND_EXPR
5185 || TREE_CODE (expr) == SAVE_EXPR)
5186 gcc_unreachable ();
5187 else if (TREE_CODE (expr) == TARGET_EXPR)
5189 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5190 It's OK for this to happen if it was part of a subtree that
5191 isn't immediately expanded, such as operand 2 of another
5192 TARGET_EXPR. */
5193 if (!TREE_OPERAND (expr, 1))
5195 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5196 TREE_OPERAND (expr, 3) = NULL_TREE;
5200 /* Keep iterating. */
5201 return NULL_TREE;
5205 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5206 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5207 remaps all local declarations to appropriate replacements in gimple
5208 statements. */
5210 static tree
5211 replace_locals_stmt (gimple_stmt_iterator *gsip,
5212 bool *handled_ops_p ATTRIBUTE_UNUSED,
5213 struct walk_stmt_info *wi)
5215 copy_body_data *id = (copy_body_data *) wi->info;
5216 gimple gs = gsi_stmt (*gsip);
5218 if (gbind *stmt = dyn_cast <gbind *> (gs))
5220 tree block = gimple_bind_block (stmt);
5222 if (block)
5224 remap_block (&block, id);
5225 gimple_bind_set_block (stmt, block);
5228 /* This will remap a lot of the same decls again, but this should be
5229 harmless. */
5230 if (gimple_bind_vars (stmt))
5231 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
5232 NULL, id));
5235 /* Keep iterating. */
5236 return NULL_TREE;
5240 /* Copies everything in SEQ and replaces variables and labels local to
5241 current_function_decl. */
5243 gimple_seq
5244 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5246 copy_body_data id;
5247 struct walk_stmt_info wi;
5248 gimple_seq copy;
5250 /* There's nothing to do for NULL_TREE. */
5251 if (seq == NULL)
5252 return seq;
5254 /* Set up ID. */
5255 memset (&id, 0, sizeof (id));
5256 id.src_fn = current_function_decl;
5257 id.dst_fn = current_function_decl;
5258 id.decl_map = new hash_map<tree, tree>;
5259 id.debug_map = NULL;
5261 id.copy_decl = copy_decl_no_change;
5262 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5263 id.transform_new_cfg = false;
5264 id.transform_return_to_modify = false;
5265 id.transform_parameter = false;
5266 id.transform_lang_insert_block = NULL;
5268 /* Walk the tree once to find local labels. */
5269 memset (&wi, 0, sizeof (wi));
5270 hash_set<tree> visited;
5271 wi.info = &id;
5272 wi.pset = &visited;
5273 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5275 copy = gimple_seq_copy (seq);
5277 /* Walk the copy, remapping decls. */
5278 memset (&wi, 0, sizeof (wi));
5279 wi.info = &id;
5280 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5282 /* Clean up. */
5283 delete id.decl_map;
5284 if (id.debug_map)
5285 delete id.debug_map;
5286 if (id.dependence_map)
5288 delete id.dependence_map;
5289 id.dependence_map = NULL;
5292 return copy;
5296 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5298 static tree
5299 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5301 if (*tp == data)
5302 return (tree) data;
5303 else
5304 return NULL;
5307 DEBUG_FUNCTION bool
5308 debug_find_tree (tree top, tree search)
5310 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5314 /* Declare the variables created by the inliner. Add all the variables in
5315 VARS to BIND_EXPR. */
5317 static void
5318 declare_inline_vars (tree block, tree vars)
5320 tree t;
5321 for (t = vars; t; t = DECL_CHAIN (t))
5323 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5324 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5325 add_local_decl (cfun, t);
5328 if (block)
5329 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5332 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5333 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5334 VAR_DECL translation. */
5336 static tree
5337 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5339 /* Don't generate debug information for the copy if we wouldn't have
5340 generated it for the copy either. */
5341 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5342 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5344 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5345 declaration inspired this copy. */
5346 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5348 /* The new variable/label has no RTL, yet. */
5349 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5350 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5351 SET_DECL_RTL (copy, 0);
5353 /* These args would always appear unused, if not for this. */
5354 TREE_USED (copy) = 1;
5356 /* Set the context for the new declaration. */
5357 if (!DECL_CONTEXT (decl))
5358 /* Globals stay global. */
5360 else if (DECL_CONTEXT (decl) != id->src_fn)
5361 /* Things that weren't in the scope of the function we're inlining
5362 from aren't in the scope we're inlining to, either. */
5364 else if (TREE_STATIC (decl))
5365 /* Function-scoped static variables should stay in the original
5366 function. */
5368 else
5369 /* Ordinary automatic local variables are now in the scope of the
5370 new function. */
5371 DECL_CONTEXT (copy) = id->dst_fn;
5373 return copy;
5376 static tree
5377 copy_decl_to_var (tree decl, copy_body_data *id)
5379 tree copy, type;
5381 gcc_assert (TREE_CODE (decl) == PARM_DECL
5382 || TREE_CODE (decl) == RESULT_DECL);
5384 type = TREE_TYPE (decl);
5386 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5387 VAR_DECL, DECL_NAME (decl), type);
5388 if (DECL_PT_UID_SET_P (decl))
5389 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5390 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5391 TREE_READONLY (copy) = TREE_READONLY (decl);
5392 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5393 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5395 return copy_decl_for_dup_finish (id, decl, copy);
5398 /* Like copy_decl_to_var, but create a return slot object instead of a
5399 pointer variable for return by invisible reference. */
5401 static tree
5402 copy_result_decl_to_var (tree decl, copy_body_data *id)
5404 tree copy, type;
5406 gcc_assert (TREE_CODE (decl) == PARM_DECL
5407 || TREE_CODE (decl) == RESULT_DECL);
5409 type = TREE_TYPE (decl);
5410 if (DECL_BY_REFERENCE (decl))
5411 type = TREE_TYPE (type);
5413 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5414 VAR_DECL, DECL_NAME (decl), type);
5415 if (DECL_PT_UID_SET_P (decl))
5416 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5417 TREE_READONLY (copy) = TREE_READONLY (decl);
5418 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5419 if (!DECL_BY_REFERENCE (decl))
5421 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5422 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5425 return copy_decl_for_dup_finish (id, decl, copy);
5428 tree
5429 copy_decl_no_change (tree decl, copy_body_data *id)
5431 tree copy;
5433 copy = copy_node (decl);
5435 /* The COPY is not abstract; it will be generated in DST_FN. */
5436 DECL_ABSTRACT_P (copy) = false;
5437 lang_hooks.dup_lang_specific_decl (copy);
5439 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5440 been taken; it's for internal bookkeeping in expand_goto_internal. */
5441 if (TREE_CODE (copy) == LABEL_DECL)
5443 TREE_ADDRESSABLE (copy) = 0;
5444 LABEL_DECL_UID (copy) = -1;
5447 return copy_decl_for_dup_finish (id, decl, copy);
5450 static tree
5451 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5453 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5454 return copy_decl_to_var (decl, id);
5455 else
5456 return copy_decl_no_change (decl, id);
5459 /* Return a copy of the function's argument tree. */
5460 static tree
5461 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5462 bitmap args_to_skip, tree *vars)
5464 tree arg, *parg;
5465 tree new_parm = NULL;
5466 int i = 0;
5468 parg = &new_parm;
5470 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5471 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5473 tree new_tree = remap_decl (arg, id);
5474 if (TREE_CODE (new_tree) != PARM_DECL)
5475 new_tree = id->copy_decl (arg, id);
5476 lang_hooks.dup_lang_specific_decl (new_tree);
5477 *parg = new_tree;
5478 parg = &DECL_CHAIN (new_tree);
5480 else if (!id->decl_map->get (arg))
5482 /* Make an equivalent VAR_DECL. If the argument was used
5483 as temporary variable later in function, the uses will be
5484 replaced by local variable. */
5485 tree var = copy_decl_to_var (arg, id);
5486 insert_decl_map (id, arg, var);
5487 /* Declare this new variable. */
5488 DECL_CHAIN (var) = *vars;
5489 *vars = var;
5491 return new_parm;
5494 /* Return a copy of the function's static chain. */
5495 static tree
5496 copy_static_chain (tree static_chain, copy_body_data * id)
5498 tree *chain_copy, *pvar;
5500 chain_copy = &static_chain;
5501 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5503 tree new_tree = remap_decl (*pvar, id);
5504 lang_hooks.dup_lang_specific_decl (new_tree);
5505 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5506 *pvar = new_tree;
5508 return static_chain;
5511 /* Return true if the function is allowed to be versioned.
5512 This is a guard for the versioning functionality. */
5514 bool
5515 tree_versionable_function_p (tree fndecl)
5517 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5518 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5521 /* Delete all unreachable basic blocks and update callgraph.
5522 Doing so is somewhat nontrivial because we need to update all clones and
5523 remove inline function that become unreachable. */
5525 static bool
5526 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5528 bool changed = false;
5529 basic_block b, next_bb;
5531 find_unreachable_blocks ();
5533 /* Delete all unreachable basic blocks. */
5535 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5536 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5538 next_bb = b->next_bb;
5540 if (!(b->flags & BB_REACHABLE))
5542 gimple_stmt_iterator bsi;
5544 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5546 struct cgraph_edge *e;
5547 struct cgraph_node *node;
5549 id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5551 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5552 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5554 if (!e->inline_failed)
5555 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5556 else
5557 e->remove ();
5559 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5560 && id->dst_node->clones)
5561 for (node = id->dst_node->clones; node != id->dst_node;)
5563 node->remove_stmt_references (gsi_stmt (bsi));
5564 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5565 && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5567 if (!e->inline_failed)
5568 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5569 else
5570 e->remove ();
5573 if (node->clones)
5574 node = node->clones;
5575 else if (node->next_sibling_clone)
5576 node = node->next_sibling_clone;
5577 else
5579 while (node != id->dst_node && !node->next_sibling_clone)
5580 node = node->clone_of;
5581 if (node != id->dst_node)
5582 node = node->next_sibling_clone;
5586 delete_basic_block (b);
5587 changed = true;
5591 return changed;
5594 /* Update clone info after duplication. */
5596 static void
5597 update_clone_info (copy_body_data * id)
5599 struct cgraph_node *node;
5600 if (!id->dst_node->clones)
5601 return;
5602 for (node = id->dst_node->clones; node != id->dst_node;)
5604 /* First update replace maps to match the new body. */
5605 if (node->clone.tree_map)
5607 unsigned int i;
5608 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5610 struct ipa_replace_map *replace_info;
5611 replace_info = (*node->clone.tree_map)[i];
5612 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5613 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5616 if (node->clones)
5617 node = node->clones;
5618 else if (node->next_sibling_clone)
5619 node = node->next_sibling_clone;
5620 else
5622 while (node != id->dst_node && !node->next_sibling_clone)
5623 node = node->clone_of;
5624 if (node != id->dst_node)
5625 node = node->next_sibling_clone;
5630 /* Create a copy of a function's tree.
5631 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5632 of the original function and the new copied function
5633 respectively. In case we want to replace a DECL
5634 tree with another tree while duplicating the function's
5635 body, TREE_MAP represents the mapping between these
5636 trees. If UPDATE_CLONES is set, the call_stmt fields
5637 of edges of clones of the function will be updated.
5639 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5640 from new version.
5641 If SKIP_RETURN is true, the new version will return void.
5642 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5643 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5645 void
5646 tree_function_versioning (tree old_decl, tree new_decl,
5647 vec<ipa_replace_map *, va_gc> *tree_map,
5648 bool update_clones, bitmap args_to_skip,
5649 bool skip_return, bitmap blocks_to_copy,
5650 basic_block new_entry)
5652 struct cgraph_node *old_version_node;
5653 struct cgraph_node *new_version_node;
5654 copy_body_data id;
5655 tree p;
5656 unsigned i;
5657 struct ipa_replace_map *replace_info;
5658 basic_block old_entry_block, bb;
5659 auto_vec<gimple, 10> init_stmts;
5660 tree vars = NULL_TREE;
5662 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5663 && TREE_CODE (new_decl) == FUNCTION_DECL);
5664 DECL_POSSIBLY_INLINED (old_decl) = 1;
5666 old_version_node = cgraph_node::get (old_decl);
5667 gcc_checking_assert (old_version_node);
5668 new_version_node = cgraph_node::get (new_decl);
5669 gcc_checking_assert (new_version_node);
5671 /* Copy over debug args. */
5672 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5674 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5675 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5676 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5677 old_debug_args = decl_debug_args_lookup (old_decl);
5678 if (old_debug_args)
5680 new_debug_args = decl_debug_args_insert (new_decl);
5681 *new_debug_args = vec_safe_copy (*old_debug_args);
5685 /* Output the inlining info for this abstract function, since it has been
5686 inlined. If we don't do this now, we can lose the information about the
5687 variables in the function when the blocks get blown away as soon as we
5688 remove the cgraph node. */
5689 (*debug_hooks->outlining_inline_function) (old_decl);
5691 DECL_ARTIFICIAL (new_decl) = 1;
5692 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5693 if (DECL_ORIGIN (old_decl) == old_decl)
5694 old_version_node->used_as_abstract_origin = true;
5695 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5697 /* Prepare the data structures for the tree copy. */
5698 memset (&id, 0, sizeof (id));
5700 /* Generate a new name for the new version. */
5701 id.statements_to_fold = new hash_set<gimple>;
5703 id.decl_map = new hash_map<tree, tree>;
5704 id.debug_map = NULL;
5705 id.src_fn = old_decl;
5706 id.dst_fn = new_decl;
5707 id.src_node = old_version_node;
5708 id.dst_node = new_version_node;
5709 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5710 id.blocks_to_copy = blocks_to_copy;
5712 id.copy_decl = copy_decl_no_change;
5713 id.transform_call_graph_edges
5714 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5715 id.transform_new_cfg = true;
5716 id.transform_return_to_modify = false;
5717 id.transform_parameter = false;
5718 id.transform_lang_insert_block = NULL;
5720 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5721 (DECL_STRUCT_FUNCTION (old_decl));
5722 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5723 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5724 initialize_cfun (new_decl, old_decl,
5725 old_entry_block->count);
5726 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5727 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5728 = id.src_cfun->gimple_df->ipa_pta;
5730 /* Copy the function's static chain. */
5731 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5732 if (p)
5733 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5734 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5735 &id);
5737 /* If there's a tree_map, prepare for substitution. */
5738 if (tree_map)
5739 for (i = 0; i < tree_map->length (); i++)
5741 gimple init;
5742 replace_info = (*tree_map)[i];
5743 if (replace_info->replace_p)
5745 if (!replace_info->old_tree)
5747 int i = replace_info->parm_num;
5748 tree parm;
5749 tree req_type;
5751 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5752 i --;
5753 replace_info->old_tree = parm;
5754 req_type = TREE_TYPE (parm);
5755 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5757 if (fold_convertible_p (req_type, replace_info->new_tree))
5758 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5759 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5760 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5761 else
5763 if (dump_file)
5765 fprintf (dump_file, " const ");
5766 print_generic_expr (dump_file, replace_info->new_tree, 0);
5767 fprintf (dump_file, " can't be converted to param ");
5768 print_generic_expr (dump_file, parm, 0);
5769 fprintf (dump_file, "\n");
5771 replace_info->old_tree = NULL;
5775 else
5776 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5777 if (replace_info->old_tree)
5779 init = setup_one_parameter (&id, replace_info->old_tree,
5780 replace_info->new_tree, id.src_fn,
5781 NULL,
5782 &vars);
5783 if (init)
5784 init_stmts.safe_push (init);
5788 /* Copy the function's arguments. */
5789 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5790 DECL_ARGUMENTS (new_decl) =
5791 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5792 args_to_skip, &vars);
5794 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5795 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5797 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5799 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5800 /* Add local vars. */
5801 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5803 if (DECL_RESULT (old_decl) == NULL_TREE)
5805 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5807 DECL_RESULT (new_decl)
5808 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5809 RESULT_DECL, NULL_TREE, void_type_node);
5810 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5811 cfun->returns_struct = 0;
5812 cfun->returns_pcc_struct = 0;
5814 else
5816 tree old_name;
5817 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5818 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5819 if (gimple_in_ssa_p (id.src_cfun)
5820 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5821 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5823 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
5824 insert_decl_map (&id, old_name, new_name);
5825 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5826 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5830 /* Set up the destination functions loop tree. */
5831 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5833 cfun->curr_properties &= ~PROP_loops;
5834 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5835 cfun->curr_properties |= PROP_loops;
5838 /* Copy the Function's body. */
5839 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5840 ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5841 new_entry);
5843 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5844 number_blocks (new_decl);
5846 /* We want to create the BB unconditionally, so that the addition of
5847 debug stmts doesn't affect BB count, which may in the end cause
5848 codegen differences. */
5849 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5850 while (init_stmts.length ())
5851 insert_init_stmt (&id, bb, init_stmts.pop ());
5852 update_clone_info (&id);
5854 /* Remap the nonlocal_goto_save_area, if any. */
5855 if (cfun->nonlocal_goto_save_area)
5857 struct walk_stmt_info wi;
5859 memset (&wi, 0, sizeof (wi));
5860 wi.info = &id;
5861 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5864 /* Clean up. */
5865 delete id.decl_map;
5866 if (id.debug_map)
5867 delete id.debug_map;
5868 free_dominance_info (CDI_DOMINATORS);
5869 free_dominance_info (CDI_POST_DOMINATORS);
5871 fold_marked_statements (0, id.statements_to_fold);
5872 delete id.statements_to_fold;
5873 fold_cond_expr_cond ();
5874 delete_unreachable_blocks_update_callgraph (&id);
5875 if (id.dst_node->definition)
5876 cgraph_edge::rebuild_references ();
5877 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
5879 calculate_dominance_info (CDI_DOMINATORS);
5880 fix_loop_structure (NULL);
5882 update_ssa (TODO_update_ssa);
5884 /* After partial cloning we need to rescale frequencies, so they are
5885 within proper range in the cloned function. */
5886 if (new_entry)
5888 struct cgraph_edge *e;
5889 rebuild_frequencies ();
5891 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5892 for (e = new_version_node->callees; e; e = e->next_callee)
5894 basic_block bb = gimple_bb (e->call_stmt);
5895 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5896 bb);
5897 e->count = bb->count;
5899 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5901 basic_block bb = gimple_bb (e->call_stmt);
5902 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5903 bb);
5904 e->count = bb->count;
5908 free_dominance_info (CDI_DOMINATORS);
5909 free_dominance_info (CDI_POST_DOMINATORS);
5911 gcc_assert (!id.debug_stmts.exists ());
5912 pop_cfun ();
5913 return;
5916 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5917 the callee and return the inlined body on success. */
5919 tree
5920 maybe_inline_call_in_expr (tree exp)
5922 tree fn = get_callee_fndecl (exp);
5924 /* We can only try to inline "const" functions. */
5925 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5927 call_expr_arg_iterator iter;
5928 copy_body_data id;
5929 tree param, arg, t;
5930 hash_map<tree, tree> decl_map;
5932 /* Remap the parameters. */
5933 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5934 param;
5935 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5936 decl_map.put (param, arg);
5938 memset (&id, 0, sizeof (id));
5939 id.src_fn = fn;
5940 id.dst_fn = current_function_decl;
5941 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5942 id.decl_map = &decl_map;
5944 id.copy_decl = copy_decl_no_change;
5945 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5946 id.transform_new_cfg = false;
5947 id.transform_return_to_modify = true;
5948 id.transform_parameter = true;
5949 id.transform_lang_insert_block = NULL;
5951 /* Make sure not to unshare trees behind the front-end's back
5952 since front-end specific mechanisms may rely on sharing. */
5953 id.regimplify = false;
5954 id.do_not_unshare = true;
5956 /* We're not inside any EH region. */
5957 id.eh_lp_nr = 0;
5959 t = copy_tree_body (&id);
5961 /* We can only return something suitable for use in a GENERIC
5962 expression tree. */
5963 if (TREE_CODE (t) == MODIFY_EXPR)
5964 return TREE_OPERAND (t, 1);
5967 return NULL_TREE;
5970 /* Duplicate a type, fields and all. */
5972 tree
5973 build_duplicate_type (tree type)
5975 struct copy_body_data id;
5977 memset (&id, 0, sizeof (id));
5978 id.src_fn = current_function_decl;
5979 id.dst_fn = current_function_decl;
5980 id.src_cfun = cfun;
5981 id.decl_map = new hash_map<tree, tree>;
5982 id.debug_map = NULL;
5983 id.copy_decl = copy_decl_no_change;
5985 type = remap_type_1 (type, &id);
5987 delete id.decl_map;
5988 if (id.debug_map)
5989 delete id.debug_map;
5991 TYPE_CANONICAL (type) = type;
5993 return type;
5996 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
5997 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
5998 evaluation. */
6000 tree
6001 copy_fn (tree fn, tree& parms, tree& result)
6003 copy_body_data id;
6004 tree param;
6005 hash_map<tree, tree> decl_map;
6007 tree *p = &parms;
6008 *p = NULL_TREE;
6010 memset (&id, 0, sizeof (id));
6011 id.src_fn = fn;
6012 id.dst_fn = current_function_decl;
6013 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6014 id.decl_map = &decl_map;
6016 id.copy_decl = copy_decl_no_change;
6017 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6018 id.transform_new_cfg = false;
6019 id.transform_return_to_modify = false;
6020 id.transform_parameter = true;
6021 id.transform_lang_insert_block = NULL;
6023 /* Make sure not to unshare trees behind the front-end's back
6024 since front-end specific mechanisms may rely on sharing. */
6025 id.regimplify = false;
6026 id.do_not_unshare = true;
6028 /* We're not inside any EH region. */
6029 id.eh_lp_nr = 0;
6031 /* Remap the parameters and result and return them to the caller. */
6032 for (param = DECL_ARGUMENTS (fn);
6033 param;
6034 param = DECL_CHAIN (param))
6036 *p = remap_decl (param, &id);
6037 p = &DECL_CHAIN (*p);
6040 if (DECL_RESULT (fn))
6041 result = remap_decl (DECL_RESULT (fn), &id);
6042 else
6043 result = NULL_TREE;
6045 return copy_tree_body (&id);