Daily bump.
[official-gcc.git] / gcc / tree-inline.c
blob6f138edc7f7f1caa44e0fb81412aae1d4f023176
1 /* Tree inlining.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "alias.h"
27 #include "symtab.h"
28 #include "tree.h"
29 #include "fold-const.h"
30 #include "stor-layout.h"
31 #include "calls.h"
32 #include "tree-inline.h"
33 #include "flags.h"
34 #include "params.h"
35 #include "insn-config.h"
36 #include "langhooks.h"
37 #include "predict.h"
38 #include "hard-reg-set.h"
39 #include "function.h"
40 #include "dominance.h"
41 #include "cfg.h"
42 #include "cfganal.h"
43 #include "basic-block.h"
44 #include "tree-iterator.h"
45 #include "intl.h"
46 #include "tree-ssa-alias.h"
47 #include "internal-fn.h"
48 #include "gimple-fold.h"
49 #include "tree-eh.h"
50 #include "gimple-expr.h"
51 #include "gimple.h"
52 #include "gimplify.h"
53 #include "gimple-iterator.h"
54 #include "gimplify-me.h"
55 #include "gimple-walk.h"
56 #include "gimple-ssa.h"
57 #include "tree-cfg.h"
58 #include "tree-phinodes.h"
59 #include "ssa-iterators.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
62 #include "tree-into-ssa.h"
63 #include "rtl.h"
64 #include "expmed.h"
65 #include "dojump.h"
66 #include "explow.h"
67 #include "emit-rtl.h"
68 #include "varasm.h"
69 #include "stmt.h"
70 #include "expr.h"
71 #include "tree-dfa.h"
72 #include "tree-ssa.h"
73 #include "tree-pretty-print.h"
74 #include "except.h"
75 #include "debug.h"
76 #include "cgraph.h"
77 #include "alloc-pool.h"
78 #include "symbol-summary.h"
79 #include "ipa-prop.h"
80 #include "value-prof.h"
81 #include "tree-pass.h"
82 #include "target.h"
83 #include "cfgloop.h"
84 #include "builtins.h"
85 #include "tree-chkp.h"
87 #include "rtl.h" /* FIXME: For asm_str_count. */
89 /* I'm not real happy about this, but we need to handle gimple and
90 non-gimple trees. */
92 /* Inlining, Cloning, Versioning, Parallelization
94 Inlining: a function body is duplicated, but the PARM_DECLs are
95 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
96 MODIFY_EXPRs that store to a dedicated returned-value variable.
97 The duplicated eh_region info of the copy will later be appended
98 to the info for the caller; the eh_region info in copied throwing
99 statements and RESX statements are adjusted accordingly.
101 Cloning: (only in C++) We have one body for a con/de/structor, and
102 multiple function decls, each with a unique parameter list.
103 Duplicate the body, using the given splay tree; some parameters
104 will become constants (like 0 or 1).
106 Versioning: a function body is duplicated and the result is a new
107 function rather than into blocks of an existing function as with
108 inlining. Some parameters will become constants.
110 Parallelization: a region of a function is duplicated resulting in
111 a new function. Variables may be replaced with complex expressions
112 to enable shared variable semantics.
114 All of these will simultaneously lookup any callgraph edges. If
115 we're going to inline the duplicated function body, and the given
116 function has some cloned callgraph nodes (one for each place this
117 function will be inlined) those callgraph edges will be duplicated.
118 If we're cloning the body, those callgraph edges will be
119 updated to point into the new body. (Note that the original
120 callgraph node and edge list will not be altered.)
122 See the CALL_EXPR handling case in copy_tree_body_r (). */
124 /* To Do:
126 o In order to make inlining-on-trees work, we pessimized
127 function-local static constants. In particular, they are now
128 always output, even when not addressed. Fix this by treating
129 function-local static constants just like global static
130 constants; the back-end already knows not to output them if they
131 are not needed.
133 o Provide heuristics to clamp inlining of recursive template
134 calls? */
137 /* Weights that estimate_num_insns uses to estimate the size of the
138 produced code. */
140 eni_weights eni_size_weights;
142 /* Weights that estimate_num_insns uses to estimate the time necessary
143 to execute the produced code. */
145 eni_weights eni_time_weights;
147 /* Prototypes. */
149 static tree declare_return_variable (copy_body_data *, tree, tree, tree,
150 basic_block);
151 static void remap_block (tree *, copy_body_data *);
152 static void copy_bind_expr (tree *, int *, copy_body_data *);
153 static void declare_inline_vars (tree, tree);
154 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
155 static void prepend_lexical_block (tree current_block, tree new_block);
156 static tree copy_decl_to_var (tree, copy_body_data *);
157 static tree copy_result_decl_to_var (tree, copy_body_data *);
158 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
159 static gimple_seq remap_gimple_stmt (gimple, copy_body_data *);
160 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
161 static void insert_init_stmt (copy_body_data *, basic_block, gimple);
163 /* Insert a tree->tree mapping for ID. Despite the name suggests
164 that the trees should be variables, it is used for more than that. */
166 void
167 insert_decl_map (copy_body_data *id, tree key, tree value)
169 id->decl_map->put (key, value);
171 /* Always insert an identity map as well. If we see this same new
172 node again, we won't want to duplicate it a second time. */
173 if (key != value)
174 id->decl_map->put (value, value);
177 /* Insert a tree->tree mapping for ID. This is only used for
178 variables. */
180 static void
181 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
183 if (!gimple_in_ssa_p (id->src_cfun))
184 return;
186 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
187 return;
189 if (!target_for_debug_bind (key))
190 return;
192 gcc_assert (TREE_CODE (key) == PARM_DECL);
193 gcc_assert (TREE_CODE (value) == VAR_DECL);
195 if (!id->debug_map)
196 id->debug_map = new hash_map<tree, tree>;
198 id->debug_map->put (key, value);
201 /* If nonzero, we're remapping the contents of inlined debug
202 statements. If negative, an error has occurred, such as a
203 reference to a variable that isn't available in the inlined
204 context. */
205 static int processing_debug_stmt = 0;
207 /* Construct new SSA name for old NAME. ID is the inline context. */
209 static tree
210 remap_ssa_name (tree name, copy_body_data *id)
212 tree new_tree, var;
213 tree *n;
215 gcc_assert (TREE_CODE (name) == SSA_NAME);
217 n = id->decl_map->get (name);
218 if (n)
219 return unshare_expr (*n);
221 if (processing_debug_stmt)
223 if (SSA_NAME_IS_DEFAULT_DEF (name)
224 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
225 && id->entry_bb == NULL
226 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
228 tree vexpr = make_node (DEBUG_EXPR_DECL);
229 gimple def_temp;
230 gimple_stmt_iterator gsi;
231 tree val = SSA_NAME_VAR (name);
233 n = id->decl_map->get (val);
234 if (n != NULL)
235 val = *n;
236 if (TREE_CODE (val) != PARM_DECL)
238 processing_debug_stmt = -1;
239 return name;
241 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
242 DECL_ARTIFICIAL (vexpr) = 1;
243 TREE_TYPE (vexpr) = TREE_TYPE (name);
244 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
245 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
246 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
247 return vexpr;
250 processing_debug_stmt = -1;
251 return name;
254 /* Remap anonymous SSA names or SSA names of anonymous decls. */
255 var = SSA_NAME_VAR (name);
256 if (!var
257 || (!SSA_NAME_IS_DEFAULT_DEF (name)
258 && TREE_CODE (var) == VAR_DECL
259 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
260 && DECL_ARTIFICIAL (var)
261 && DECL_IGNORED_P (var)
262 && !DECL_NAME (var)))
264 struct ptr_info_def *pi;
265 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
266 if (!var && SSA_NAME_IDENTIFIER (name))
267 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
268 insert_decl_map (id, name, new_tree);
269 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
270 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
271 /* At least IPA points-to info can be directly transferred. */
272 if (id->src_cfun->gimple_df
273 && id->src_cfun->gimple_df->ipa_pta
274 && (pi = SSA_NAME_PTR_INFO (name))
275 && !pi->pt.anything)
277 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
278 new_pi->pt = pi->pt;
280 return new_tree;
283 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
284 in copy_bb. */
285 new_tree = remap_decl (var, id);
287 /* We might've substituted constant or another SSA_NAME for
288 the variable.
290 Replace the SSA name representing RESULT_DECL by variable during
291 inlining: this saves us from need to introduce PHI node in a case
292 return value is just partly initialized. */
293 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
294 && (!SSA_NAME_VAR (name)
295 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
296 || !id->transform_return_to_modify))
298 struct ptr_info_def *pi;
299 new_tree = make_ssa_name (new_tree);
300 insert_decl_map (id, name, new_tree);
301 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
302 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
303 /* At least IPA points-to info can be directly transferred. */
304 if (id->src_cfun->gimple_df
305 && id->src_cfun->gimple_df->ipa_pta
306 && (pi = SSA_NAME_PTR_INFO (name))
307 && !pi->pt.anything)
309 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
310 new_pi->pt = pi->pt;
312 if (SSA_NAME_IS_DEFAULT_DEF (name))
314 /* By inlining function having uninitialized variable, we might
315 extend the lifetime (variable might get reused). This cause
316 ICE in the case we end up extending lifetime of SSA name across
317 abnormal edge, but also increase register pressure.
319 We simply initialize all uninitialized vars by 0 except
320 for case we are inlining to very first BB. We can avoid
321 this for all BBs that are not inside strongly connected
322 regions of the CFG, but this is expensive to test. */
323 if (id->entry_bb
324 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
325 && (!SSA_NAME_VAR (name)
326 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
327 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
328 0)->dest
329 || EDGE_COUNT (id->entry_bb->preds) != 1))
331 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
332 gimple init_stmt;
333 tree zero = build_zero_cst (TREE_TYPE (new_tree));
335 init_stmt = gimple_build_assign (new_tree, zero);
336 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
337 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
339 else
341 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
342 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
346 else
347 insert_decl_map (id, name, new_tree);
348 return new_tree;
351 /* Remap DECL during the copying of the BLOCK tree for the function. */
353 tree
354 remap_decl (tree decl, copy_body_data *id)
356 tree *n;
358 /* We only remap local variables in the current function. */
360 /* See if we have remapped this declaration. */
362 n = id->decl_map->get (decl);
364 if (!n && processing_debug_stmt)
366 processing_debug_stmt = -1;
367 return decl;
370 /* If we didn't already have an equivalent for this declaration,
371 create one now. */
372 if (!n)
374 /* Make a copy of the variable or label. */
375 tree t = id->copy_decl (decl, id);
377 /* Remember it, so that if we encounter this local entity again
378 we can reuse this copy. Do this early because remap_type may
379 need this decl for TYPE_STUB_DECL. */
380 insert_decl_map (id, decl, t);
382 if (!DECL_P (t))
383 return t;
385 /* Remap types, if necessary. */
386 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
387 if (TREE_CODE (t) == TYPE_DECL)
388 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
390 /* Remap sizes as necessary. */
391 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
392 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
394 /* If fields, do likewise for offset and qualifier. */
395 if (TREE_CODE (t) == FIELD_DECL)
397 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
398 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
399 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
402 return t;
405 if (id->do_not_unshare)
406 return *n;
407 else
408 return unshare_expr (*n);
411 static tree
412 remap_type_1 (tree type, copy_body_data *id)
414 tree new_tree, t;
416 /* We do need a copy. build and register it now. If this is a pointer or
417 reference type, remap the designated type and make a new pointer or
418 reference type. */
419 if (TREE_CODE (type) == POINTER_TYPE)
421 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
422 TYPE_MODE (type),
423 TYPE_REF_CAN_ALIAS_ALL (type));
424 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
425 new_tree = build_type_attribute_qual_variant (new_tree,
426 TYPE_ATTRIBUTES (type),
427 TYPE_QUALS (type));
428 insert_decl_map (id, type, new_tree);
429 return new_tree;
431 else if (TREE_CODE (type) == REFERENCE_TYPE)
433 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
434 TYPE_MODE (type),
435 TYPE_REF_CAN_ALIAS_ALL (type));
436 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
437 new_tree = build_type_attribute_qual_variant (new_tree,
438 TYPE_ATTRIBUTES (type),
439 TYPE_QUALS (type));
440 insert_decl_map (id, type, new_tree);
441 return new_tree;
443 else
444 new_tree = copy_node (type);
446 insert_decl_map (id, type, new_tree);
448 /* This is a new type, not a copy of an old type. Need to reassociate
449 variants. We can handle everything except the main variant lazily. */
450 t = TYPE_MAIN_VARIANT (type);
451 if (type != t)
453 t = remap_type (t, id);
454 TYPE_MAIN_VARIANT (new_tree) = t;
455 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
456 TYPE_NEXT_VARIANT (t) = new_tree;
458 else
460 TYPE_MAIN_VARIANT (new_tree) = new_tree;
461 TYPE_NEXT_VARIANT (new_tree) = NULL;
464 if (TYPE_STUB_DECL (type))
465 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
467 /* Lazily create pointer and reference types. */
468 TYPE_POINTER_TO (new_tree) = NULL;
469 TYPE_REFERENCE_TO (new_tree) = NULL;
471 /* Copy all types that may contain references to local variables; be sure to
472 preserve sharing in between type and its main variant when possible. */
473 switch (TREE_CODE (new_tree))
475 case INTEGER_TYPE:
476 case REAL_TYPE:
477 case FIXED_POINT_TYPE:
478 case ENUMERAL_TYPE:
479 case BOOLEAN_TYPE:
480 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
482 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
483 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
485 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
486 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
488 else
490 t = TYPE_MIN_VALUE (new_tree);
491 if (t && TREE_CODE (t) != INTEGER_CST)
492 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
494 t = TYPE_MAX_VALUE (new_tree);
495 if (t && TREE_CODE (t) != INTEGER_CST)
496 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
498 return new_tree;
500 case FUNCTION_TYPE:
501 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
502 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
503 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
504 else
505 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
506 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
507 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
508 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
509 else
510 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
511 return new_tree;
513 case ARRAY_TYPE:
514 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
515 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
516 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
517 else
518 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
520 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
522 gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
523 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
525 else
526 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
527 break;
529 case RECORD_TYPE:
530 case UNION_TYPE:
531 case QUAL_UNION_TYPE:
532 if (TYPE_MAIN_VARIANT (type) != type
533 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
534 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
535 else
537 tree f, nf = NULL;
539 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
541 t = remap_decl (f, id);
542 DECL_CONTEXT (t) = new_tree;
543 DECL_CHAIN (t) = nf;
544 nf = t;
546 TYPE_FIELDS (new_tree) = nreverse (nf);
548 break;
550 case OFFSET_TYPE:
551 default:
552 /* Shouldn't have been thought variable sized. */
553 gcc_unreachable ();
556 /* All variants of type share the same size, so use the already remaped data. */
557 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
559 gcc_checking_assert (TYPE_SIZE (type) == TYPE_SIZE (TYPE_MAIN_VARIANT (type)));
560 gcc_checking_assert (TYPE_SIZE_UNIT (type) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type)));
562 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
563 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
565 else
567 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
568 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
571 return new_tree;
574 tree
575 remap_type (tree type, copy_body_data *id)
577 tree *node;
578 tree tmp;
580 if (type == NULL)
581 return type;
583 /* See if we have remapped this type. */
584 node = id->decl_map->get (type);
585 if (node)
586 return *node;
588 /* The type only needs remapping if it's variably modified. */
589 if (! variably_modified_type_p (type, id->src_fn))
591 insert_decl_map (id, type, type);
592 return type;
595 id->remapping_type_depth++;
596 tmp = remap_type_1 (type, id);
597 id->remapping_type_depth--;
599 return tmp;
602 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
604 static bool
605 can_be_nonlocal (tree decl, copy_body_data *id)
607 /* We can not duplicate function decls. */
608 if (TREE_CODE (decl) == FUNCTION_DECL)
609 return true;
611 /* Local static vars must be non-local or we get multiple declaration
612 problems. */
613 if (TREE_CODE (decl) == VAR_DECL
614 && !auto_var_in_fn_p (decl, id->src_fn))
615 return true;
617 return false;
620 static tree
621 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
622 copy_body_data *id)
624 tree old_var;
625 tree new_decls = NULL_TREE;
627 /* Remap its variables. */
628 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
630 tree new_var;
632 if (can_be_nonlocal (old_var, id))
634 /* We need to add this variable to the local decls as otherwise
635 nothing else will do so. */
636 if (TREE_CODE (old_var) == VAR_DECL
637 && ! DECL_EXTERNAL (old_var))
638 add_local_decl (cfun, old_var);
639 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
640 && !DECL_IGNORED_P (old_var)
641 && nonlocalized_list)
642 vec_safe_push (*nonlocalized_list, old_var);
643 continue;
646 /* Remap the variable. */
647 new_var = remap_decl (old_var, id);
649 /* If we didn't remap this variable, we can't mess with its
650 TREE_CHAIN. If we remapped this variable to the return slot, it's
651 already declared somewhere else, so don't declare it here. */
653 if (new_var == id->retvar)
655 else if (!new_var)
657 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
658 && !DECL_IGNORED_P (old_var)
659 && nonlocalized_list)
660 vec_safe_push (*nonlocalized_list, old_var);
662 else
664 gcc_assert (DECL_P (new_var));
665 DECL_CHAIN (new_var) = new_decls;
666 new_decls = new_var;
668 /* Also copy value-expressions. */
669 if (TREE_CODE (new_var) == VAR_DECL
670 && DECL_HAS_VALUE_EXPR_P (new_var))
672 tree tem = DECL_VALUE_EXPR (new_var);
673 bool old_regimplify = id->regimplify;
674 id->remapping_type_depth++;
675 walk_tree (&tem, copy_tree_body_r, id, NULL);
676 id->remapping_type_depth--;
677 id->regimplify = old_regimplify;
678 SET_DECL_VALUE_EXPR (new_var, tem);
683 return nreverse (new_decls);
686 /* Copy the BLOCK to contain remapped versions of the variables
687 therein. And hook the new block into the block-tree. */
689 static void
690 remap_block (tree *block, copy_body_data *id)
692 tree old_block;
693 tree new_block;
695 /* Make the new block. */
696 old_block = *block;
697 new_block = make_node (BLOCK);
698 TREE_USED (new_block) = TREE_USED (old_block);
699 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
700 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
701 BLOCK_NONLOCALIZED_VARS (new_block)
702 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
703 *block = new_block;
705 /* Remap its variables. */
706 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
707 &BLOCK_NONLOCALIZED_VARS (new_block),
708 id);
710 if (id->transform_lang_insert_block)
711 id->transform_lang_insert_block (new_block);
713 /* Remember the remapped block. */
714 insert_decl_map (id, old_block, new_block);
717 /* Copy the whole block tree and root it in id->block. */
718 static tree
719 remap_blocks (tree block, copy_body_data *id)
721 tree t;
722 tree new_tree = block;
724 if (!block)
725 return NULL;
727 remap_block (&new_tree, id);
728 gcc_assert (new_tree != block);
729 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
730 prepend_lexical_block (new_tree, remap_blocks (t, id));
731 /* Blocks are in arbitrary order, but make things slightly prettier and do
732 not swap order when producing a copy. */
733 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
734 return new_tree;
737 /* Remap the block tree rooted at BLOCK to nothing. */
738 static void
739 remap_blocks_to_null (tree block, copy_body_data *id)
741 tree t;
742 insert_decl_map (id, block, NULL_TREE);
743 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
744 remap_blocks_to_null (t, id);
747 static void
748 copy_statement_list (tree *tp)
750 tree_stmt_iterator oi, ni;
751 tree new_tree;
753 new_tree = alloc_stmt_list ();
754 ni = tsi_start (new_tree);
755 oi = tsi_start (*tp);
756 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
757 *tp = new_tree;
759 for (; !tsi_end_p (oi); tsi_next (&oi))
761 tree stmt = tsi_stmt (oi);
762 if (TREE_CODE (stmt) == STATEMENT_LIST)
763 /* This copy is not redundant; tsi_link_after will smash this
764 STATEMENT_LIST into the end of the one we're building, and we
765 don't want to do that with the original. */
766 copy_statement_list (&stmt);
767 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
771 static void
772 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
774 tree block = BIND_EXPR_BLOCK (*tp);
775 /* Copy (and replace) the statement. */
776 copy_tree_r (tp, walk_subtrees, NULL);
777 if (block)
779 remap_block (&block, id);
780 BIND_EXPR_BLOCK (*tp) = block;
783 if (BIND_EXPR_VARS (*tp))
784 /* This will remap a lot of the same decls again, but this should be
785 harmless. */
786 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
790 /* Create a new gimple_seq by remapping all the statements in BODY
791 using the inlining information in ID. */
793 static gimple_seq
794 remap_gimple_seq (gimple_seq body, copy_body_data *id)
796 gimple_stmt_iterator si;
797 gimple_seq new_body = NULL;
799 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
801 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
802 gimple_seq_add_seq (&new_body, new_stmts);
805 return new_body;
809 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
810 block using the mapping information in ID. */
812 static gimple
813 copy_gimple_bind (gbind *stmt, copy_body_data *id)
815 gimple new_bind;
816 tree new_block, new_vars;
817 gimple_seq body, new_body;
819 /* Copy the statement. Note that we purposely don't use copy_stmt
820 here because we need to remap statements as we copy. */
821 body = gimple_bind_body (stmt);
822 new_body = remap_gimple_seq (body, id);
824 new_block = gimple_bind_block (stmt);
825 if (new_block)
826 remap_block (&new_block, id);
828 /* This will remap a lot of the same decls again, but this should be
829 harmless. */
830 new_vars = gimple_bind_vars (stmt);
831 if (new_vars)
832 new_vars = remap_decls (new_vars, NULL, id);
834 new_bind = gimple_build_bind (new_vars, new_body, new_block);
836 return new_bind;
839 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
841 static bool
842 is_parm (tree decl)
844 if (TREE_CODE (decl) == SSA_NAME)
846 decl = SSA_NAME_VAR (decl);
847 if (!decl)
848 return false;
851 return (TREE_CODE (decl) == PARM_DECL);
854 /* Remap the dependence CLIQUE from the source to the destination function
855 as specified in ID. */
857 static unsigned short
858 remap_dependence_clique (copy_body_data *id, unsigned short clique)
860 if (clique == 0)
861 return 0;
862 if (!id->dependence_map)
863 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
864 bool existed;
865 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
866 if (!existed)
867 newc = ++cfun->last_clique;
868 return newc;
871 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
872 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
873 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
874 recursing into the children nodes of *TP. */
876 static tree
877 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
879 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
880 copy_body_data *id = (copy_body_data *) wi_p->info;
881 tree fn = id->src_fn;
883 if (TREE_CODE (*tp) == SSA_NAME)
885 *tp = remap_ssa_name (*tp, id);
886 *walk_subtrees = 0;
887 return NULL;
889 else if (auto_var_in_fn_p (*tp, fn))
891 /* Local variables and labels need to be replaced by equivalent
892 variables. We don't want to copy static variables; there's
893 only one of those, no matter how many times we inline the
894 containing function. Similarly for globals from an outer
895 function. */
896 tree new_decl;
898 /* Remap the declaration. */
899 new_decl = remap_decl (*tp, id);
900 gcc_assert (new_decl);
901 /* Replace this variable with the copy. */
902 STRIP_TYPE_NOPS (new_decl);
903 /* ??? The C++ frontend uses void * pointer zero to initialize
904 any other type. This confuses the middle-end type verification.
905 As cloned bodies do not go through gimplification again the fixup
906 there doesn't trigger. */
907 if (TREE_CODE (new_decl) == INTEGER_CST
908 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
909 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
910 *tp = new_decl;
911 *walk_subtrees = 0;
913 else if (TREE_CODE (*tp) == STATEMENT_LIST)
914 gcc_unreachable ();
915 else if (TREE_CODE (*tp) == SAVE_EXPR)
916 gcc_unreachable ();
917 else if (TREE_CODE (*tp) == LABEL_DECL
918 && (!DECL_CONTEXT (*tp)
919 || decl_function_context (*tp) == id->src_fn))
920 /* These may need to be remapped for EH handling. */
921 *tp = remap_decl (*tp, id);
922 else if (TREE_CODE (*tp) == FIELD_DECL)
924 /* If the enclosing record type is variably_modified_type_p, the field
925 has already been remapped. Otherwise, it need not be. */
926 tree *n = id->decl_map->get (*tp);
927 if (n)
928 *tp = *n;
929 *walk_subtrees = 0;
931 else if (TYPE_P (*tp))
932 /* Types may need remapping as well. */
933 *tp = remap_type (*tp, id);
934 else if (CONSTANT_CLASS_P (*tp))
936 /* If this is a constant, we have to copy the node iff the type
937 will be remapped. copy_tree_r will not copy a constant. */
938 tree new_type = remap_type (TREE_TYPE (*tp), id);
940 if (new_type == TREE_TYPE (*tp))
941 *walk_subtrees = 0;
943 else if (TREE_CODE (*tp) == INTEGER_CST)
944 *tp = wide_int_to_tree (new_type, *tp);
945 else
947 *tp = copy_node (*tp);
948 TREE_TYPE (*tp) = new_type;
951 else
953 /* Otherwise, just copy the node. Note that copy_tree_r already
954 knows not to copy VAR_DECLs, etc., so this is safe. */
956 if (TREE_CODE (*tp) == MEM_REF)
958 /* We need to re-canonicalize MEM_REFs from inline substitutions
959 that can happen when a pointer argument is an ADDR_EXPR.
960 Recurse here manually to allow that. */
961 tree ptr = TREE_OPERAND (*tp, 0);
962 tree type = remap_type (TREE_TYPE (*tp), id);
963 tree old = *tp;
964 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
965 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
966 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
967 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
968 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
969 if (MR_DEPENDENCE_CLIQUE (old) != 0)
971 MR_DEPENDENCE_CLIQUE (*tp)
972 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
973 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
975 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
976 remapped a parameter as the property might be valid only
977 for the parameter itself. */
978 if (TREE_THIS_NOTRAP (old)
979 && (!is_parm (TREE_OPERAND (old, 0))
980 || (!id->transform_parameter && is_parm (ptr))))
981 TREE_THIS_NOTRAP (*tp) = 1;
982 *walk_subtrees = 0;
983 return NULL;
986 /* Here is the "usual case". Copy this tree node, and then
987 tweak some special cases. */
988 copy_tree_r (tp, walk_subtrees, NULL);
990 if (TREE_CODE (*tp) != OMP_CLAUSE)
991 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
993 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
995 /* The copied TARGET_EXPR has never been expanded, even if the
996 original node was expanded already. */
997 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
998 TREE_OPERAND (*tp, 3) = NULL_TREE;
1000 else if (TREE_CODE (*tp) == ADDR_EXPR)
1002 /* Variable substitution need not be simple. In particular,
1003 the MEM_REF substitution above. Make sure that
1004 TREE_CONSTANT and friends are up-to-date. */
1005 int invariant = is_gimple_min_invariant (*tp);
1006 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1007 recompute_tree_invariant_for_addr_expr (*tp);
1009 /* If this used to be invariant, but is not any longer,
1010 then regimplification is probably needed. */
1011 if (invariant && !is_gimple_min_invariant (*tp))
1012 id->regimplify = true;
1014 *walk_subtrees = 0;
1018 /* Update the TREE_BLOCK for the cloned expr. */
1019 if (EXPR_P (*tp))
1021 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1022 tree old_block = TREE_BLOCK (*tp);
1023 if (old_block)
1025 tree *n;
1026 n = id->decl_map->get (TREE_BLOCK (*tp));
1027 if (n)
1028 new_block = *n;
1030 TREE_SET_BLOCK (*tp, new_block);
1033 /* Keep iterating. */
1034 return NULL_TREE;
1038 /* Called from copy_body_id via walk_tree. DATA is really a
1039 `copy_body_data *'. */
1041 tree
1042 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1044 copy_body_data *id = (copy_body_data *) data;
1045 tree fn = id->src_fn;
1046 tree new_block;
1048 /* Begin by recognizing trees that we'll completely rewrite for the
1049 inlining context. Our output for these trees is completely
1050 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1051 into an edge). Further down, we'll handle trees that get
1052 duplicated and/or tweaked. */
1054 /* When requested, RETURN_EXPRs should be transformed to just the
1055 contained MODIFY_EXPR. The branch semantics of the return will
1056 be handled elsewhere by manipulating the CFG rather than a statement. */
1057 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1059 tree assignment = TREE_OPERAND (*tp, 0);
1061 /* If we're returning something, just turn that into an
1062 assignment into the equivalent of the original RESULT_DECL.
1063 If the "assignment" is just the result decl, the result
1064 decl has already been set (e.g. a recent "foo (&result_decl,
1065 ...)"); just toss the entire RETURN_EXPR. */
1066 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1068 /* Replace the RETURN_EXPR with (a copy of) the
1069 MODIFY_EXPR hanging underneath. */
1070 *tp = copy_node (assignment);
1072 else /* Else the RETURN_EXPR returns no value. */
1074 *tp = NULL;
1075 return (tree) (void *)1;
1078 else if (TREE_CODE (*tp) == SSA_NAME)
1080 *tp = remap_ssa_name (*tp, id);
1081 *walk_subtrees = 0;
1082 return NULL;
1085 /* Local variables and labels need to be replaced by equivalent
1086 variables. We don't want to copy static variables; there's only
1087 one of those, no matter how many times we inline the containing
1088 function. Similarly for globals from an outer function. */
1089 else if (auto_var_in_fn_p (*tp, fn))
1091 tree new_decl;
1093 /* Remap the declaration. */
1094 new_decl = remap_decl (*tp, id);
1095 gcc_assert (new_decl);
1096 /* Replace this variable with the copy. */
1097 STRIP_TYPE_NOPS (new_decl);
1098 *tp = new_decl;
1099 *walk_subtrees = 0;
1101 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1102 copy_statement_list (tp);
1103 else if (TREE_CODE (*tp) == SAVE_EXPR
1104 || TREE_CODE (*tp) == TARGET_EXPR)
1105 remap_save_expr (tp, id->decl_map, walk_subtrees);
1106 else if (TREE_CODE (*tp) == LABEL_DECL
1107 && (! DECL_CONTEXT (*tp)
1108 || decl_function_context (*tp) == id->src_fn))
1109 /* These may need to be remapped for EH handling. */
1110 *tp = remap_decl (*tp, id);
1111 else if (TREE_CODE (*tp) == BIND_EXPR)
1112 copy_bind_expr (tp, walk_subtrees, id);
1113 /* Types may need remapping as well. */
1114 else if (TYPE_P (*tp))
1115 *tp = remap_type (*tp, id);
1117 /* If this is a constant, we have to copy the node iff the type will be
1118 remapped. copy_tree_r will not copy a constant. */
1119 else if (CONSTANT_CLASS_P (*tp))
1121 tree new_type = remap_type (TREE_TYPE (*tp), id);
1123 if (new_type == TREE_TYPE (*tp))
1124 *walk_subtrees = 0;
1126 else if (TREE_CODE (*tp) == INTEGER_CST)
1127 *tp = wide_int_to_tree (new_type, *tp);
1128 else
1130 *tp = copy_node (*tp);
1131 TREE_TYPE (*tp) = new_type;
1135 /* Otherwise, just copy the node. Note that copy_tree_r already
1136 knows not to copy VAR_DECLs, etc., so this is safe. */
1137 else
1139 /* Here we handle trees that are not completely rewritten.
1140 First we detect some inlining-induced bogosities for
1141 discarding. */
1142 if (TREE_CODE (*tp) == MODIFY_EXPR
1143 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1144 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1146 /* Some assignments VAR = VAR; don't generate any rtl code
1147 and thus don't count as variable modification. Avoid
1148 keeping bogosities like 0 = 0. */
1149 tree decl = TREE_OPERAND (*tp, 0), value;
1150 tree *n;
1152 n = id->decl_map->get (decl);
1153 if (n)
1155 value = *n;
1156 STRIP_TYPE_NOPS (value);
1157 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1159 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1160 return copy_tree_body_r (tp, walk_subtrees, data);
1164 else if (TREE_CODE (*tp) == INDIRECT_REF)
1166 /* Get rid of *& from inline substitutions that can happen when a
1167 pointer argument is an ADDR_EXPR. */
1168 tree decl = TREE_OPERAND (*tp, 0);
1169 tree *n = id->decl_map->get (decl);
1170 if (n)
1172 /* If we happen to get an ADDR_EXPR in n->value, strip
1173 it manually here as we'll eventually get ADDR_EXPRs
1174 which lie about their types pointed to. In this case
1175 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1176 but we absolutely rely on that. As fold_indirect_ref
1177 does other useful transformations, try that first, though. */
1178 tree type = TREE_TYPE (*tp);
1179 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1180 tree old = *tp;
1181 *tp = gimple_fold_indirect_ref (ptr);
1182 if (! *tp)
1184 if (TREE_CODE (ptr) == ADDR_EXPR)
1187 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1188 /* ??? We should either assert here or build
1189 a VIEW_CONVERT_EXPR instead of blindly leaking
1190 incompatible types to our IL. */
1191 if (! *tp)
1192 *tp = TREE_OPERAND (ptr, 0);
1194 else
1196 *tp = build1 (INDIRECT_REF, type, ptr);
1197 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1198 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1199 TREE_READONLY (*tp) = TREE_READONLY (old);
1200 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1201 have remapped a parameter as the property might be
1202 valid only for the parameter itself. */
1203 if (TREE_THIS_NOTRAP (old)
1204 && (!is_parm (TREE_OPERAND (old, 0))
1205 || (!id->transform_parameter && is_parm (ptr))))
1206 TREE_THIS_NOTRAP (*tp) = 1;
1209 *walk_subtrees = 0;
1210 return NULL;
1213 else if (TREE_CODE (*tp) == MEM_REF)
1215 /* We need to re-canonicalize MEM_REFs from inline substitutions
1216 that can happen when a pointer argument is an ADDR_EXPR.
1217 Recurse here manually to allow that. */
1218 tree ptr = TREE_OPERAND (*tp, 0);
1219 tree type = remap_type (TREE_TYPE (*tp), id);
1220 tree old = *tp;
1221 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1222 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1223 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1224 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1225 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1226 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1228 MR_DEPENDENCE_CLIQUE (*tp)
1229 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1230 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1232 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1233 remapped a parameter as the property might be valid only
1234 for the parameter itself. */
1235 if (TREE_THIS_NOTRAP (old)
1236 && (!is_parm (TREE_OPERAND (old, 0))
1237 || (!id->transform_parameter && is_parm (ptr))))
1238 TREE_THIS_NOTRAP (*tp) = 1;
1239 *walk_subtrees = 0;
1240 return NULL;
1243 /* Here is the "usual case". Copy this tree node, and then
1244 tweak some special cases. */
1245 copy_tree_r (tp, walk_subtrees, NULL);
1247 /* If EXPR has block defined, map it to newly constructed block.
1248 When inlining we want EXPRs without block appear in the block
1249 of function call if we are not remapping a type. */
1250 if (EXPR_P (*tp))
1252 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1253 if (TREE_BLOCK (*tp))
1255 tree *n;
1256 n = id->decl_map->get (TREE_BLOCK (*tp));
1257 if (n)
1258 new_block = *n;
1260 TREE_SET_BLOCK (*tp, new_block);
1263 if (TREE_CODE (*tp) != OMP_CLAUSE)
1264 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1266 /* The copied TARGET_EXPR has never been expanded, even if the
1267 original node was expanded already. */
1268 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1270 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1271 TREE_OPERAND (*tp, 3) = NULL_TREE;
1274 /* Variable substitution need not be simple. In particular, the
1275 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1276 and friends are up-to-date. */
1277 else if (TREE_CODE (*tp) == ADDR_EXPR)
1279 int invariant = is_gimple_min_invariant (*tp);
1280 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1282 /* Handle the case where we substituted an INDIRECT_REF
1283 into the operand of the ADDR_EXPR. */
1284 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1285 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1286 else
1287 recompute_tree_invariant_for_addr_expr (*tp);
1289 /* If this used to be invariant, but is not any longer,
1290 then regimplification is probably needed. */
1291 if (invariant && !is_gimple_min_invariant (*tp))
1292 id->regimplify = true;
1294 *walk_subtrees = 0;
1298 /* Keep iterating. */
1299 return NULL_TREE;
1302 /* Helper for remap_gimple_stmt. Given an EH region number for the
1303 source function, map that to the duplicate EH region number in
1304 the destination function. */
1306 static int
1307 remap_eh_region_nr (int old_nr, copy_body_data *id)
1309 eh_region old_r, new_r;
1311 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1312 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1314 return new_r->index;
1317 /* Similar, but operate on INTEGER_CSTs. */
1319 static tree
1320 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1322 int old_nr, new_nr;
1324 old_nr = tree_to_shwi (old_t_nr);
1325 new_nr = remap_eh_region_nr (old_nr, id);
1327 return build_int_cst (integer_type_node, new_nr);
1330 /* Helper for copy_bb. Remap statement STMT using the inlining
1331 information in ID. Return the new statement copy. */
1333 static gimple_seq
1334 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1336 gimple copy = NULL;
1337 struct walk_stmt_info wi;
1338 bool skip_first = false;
1339 gimple_seq stmts = NULL;
1341 if (is_gimple_debug (stmt)
1342 && !opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
1343 return stmts;
1345 /* Begin by recognizing trees that we'll completely rewrite for the
1346 inlining context. Our output for these trees is completely
1347 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1348 into an edge). Further down, we'll handle trees that get
1349 duplicated and/or tweaked. */
1351 /* When requested, GIMPLE_RETURNs should be transformed to just the
1352 contained GIMPLE_ASSIGN. The branch semantics of the return will
1353 be handled elsewhere by manipulating the CFG rather than the
1354 statement. */
1355 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1357 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1358 tree retbnd = gimple_return_retbnd (stmt);
1359 tree bndslot = id->retbnd;
1361 if (retbnd && bndslot)
1363 gimple bndcopy = gimple_build_assign (bndslot, retbnd);
1364 memset (&wi, 0, sizeof (wi));
1365 wi.info = id;
1366 walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1367 gimple_seq_add_stmt (&stmts, bndcopy);
1370 /* If we're returning something, just turn that into an
1371 assignment into the equivalent of the original RESULT_DECL.
1372 If RETVAL is just the result decl, the result decl has
1373 already been set (e.g. a recent "foo (&result_decl, ...)");
1374 just toss the entire GIMPLE_RETURN. */
1375 if (retval
1376 && (TREE_CODE (retval) != RESULT_DECL
1377 && (TREE_CODE (retval) != SSA_NAME
1378 || ! SSA_NAME_VAR (retval)
1379 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1381 copy = gimple_build_assign (id->do_not_unshare
1382 ? id->retvar : unshare_expr (id->retvar),
1383 retval);
1384 /* id->retvar is already substituted. Skip it on later remapping. */
1385 skip_first = true;
1387 /* We need to copy bounds if return structure with pointers into
1388 instrumented function. */
1389 if (chkp_function_instrumented_p (id->dst_fn)
1390 && !bndslot
1391 && !BOUNDED_P (id->retvar)
1392 && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
1393 id->assign_stmts.safe_push (copy);
1396 else
1397 return stmts;
1399 else if (gimple_has_substatements (stmt))
1401 gimple_seq s1, s2;
1403 /* When cloning bodies from the C++ front end, we will be handed bodies
1404 in High GIMPLE form. Handle here all the High GIMPLE statements that
1405 have embedded statements. */
1406 switch (gimple_code (stmt))
1408 case GIMPLE_BIND:
1409 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1410 break;
1412 case GIMPLE_CATCH:
1414 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1415 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1416 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1418 break;
1420 case GIMPLE_EH_FILTER:
1421 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1422 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1423 break;
1425 case GIMPLE_TRY:
1426 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1427 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1428 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1429 break;
1431 case GIMPLE_WITH_CLEANUP_EXPR:
1432 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1433 copy = gimple_build_wce (s1);
1434 break;
1436 case GIMPLE_OMP_PARALLEL:
1438 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1439 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1440 copy = gimple_build_omp_parallel
1441 (s1,
1442 gimple_omp_parallel_clauses (omp_par_stmt),
1443 gimple_omp_parallel_child_fn (omp_par_stmt),
1444 gimple_omp_parallel_data_arg (omp_par_stmt));
1446 break;
1448 case GIMPLE_OMP_TASK:
1449 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1450 copy = gimple_build_omp_task
1451 (s1,
1452 gimple_omp_task_clauses (stmt),
1453 gimple_omp_task_child_fn (stmt),
1454 gimple_omp_task_data_arg (stmt),
1455 gimple_omp_task_copy_fn (stmt),
1456 gimple_omp_task_arg_size (stmt),
1457 gimple_omp_task_arg_align (stmt));
1458 break;
1460 case GIMPLE_OMP_FOR:
1461 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1462 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1463 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1464 gimple_omp_for_clauses (stmt),
1465 gimple_omp_for_collapse (stmt), s2);
1467 size_t i;
1468 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1470 gimple_omp_for_set_index (copy, i,
1471 gimple_omp_for_index (stmt, i));
1472 gimple_omp_for_set_initial (copy, i,
1473 gimple_omp_for_initial (stmt, i));
1474 gimple_omp_for_set_final (copy, i,
1475 gimple_omp_for_final (stmt, i));
1476 gimple_omp_for_set_incr (copy, i,
1477 gimple_omp_for_incr (stmt, i));
1478 gimple_omp_for_set_cond (copy, i,
1479 gimple_omp_for_cond (stmt, i));
1482 break;
1484 case GIMPLE_OMP_MASTER:
1485 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1486 copy = gimple_build_omp_master (s1);
1487 break;
1489 case GIMPLE_OMP_TASKGROUP:
1490 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1491 copy = gimple_build_omp_taskgroup (s1);
1492 break;
1494 case GIMPLE_OMP_ORDERED:
1495 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1496 copy = gimple_build_omp_ordered (s1);
1497 break;
1499 case GIMPLE_OMP_SECTION:
1500 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1501 copy = gimple_build_omp_section (s1);
1502 break;
1504 case GIMPLE_OMP_SECTIONS:
1505 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1506 copy = gimple_build_omp_sections
1507 (s1, gimple_omp_sections_clauses (stmt));
1508 break;
1510 case GIMPLE_OMP_SINGLE:
1511 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1512 copy = gimple_build_omp_single
1513 (s1, gimple_omp_single_clauses (stmt));
1514 break;
1516 case GIMPLE_OMP_TARGET:
1517 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1518 copy = gimple_build_omp_target
1519 (s1, gimple_omp_target_kind (stmt),
1520 gimple_omp_target_clauses (stmt));
1521 break;
1523 case GIMPLE_OMP_TEAMS:
1524 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1525 copy = gimple_build_omp_teams
1526 (s1, gimple_omp_teams_clauses (stmt));
1527 break;
1529 case GIMPLE_OMP_CRITICAL:
1530 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1531 copy = gimple_build_omp_critical (s1,
1532 gimple_omp_critical_name (
1533 as_a <gomp_critical *> (stmt)));
1534 break;
1536 case GIMPLE_TRANSACTION:
1538 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1539 gtransaction *new_trans_stmt;
1540 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1541 id);
1542 copy = new_trans_stmt
1543 = gimple_build_transaction (
1545 gimple_transaction_label (old_trans_stmt));
1546 gimple_transaction_set_subcode (
1547 new_trans_stmt,
1548 gimple_transaction_subcode (old_trans_stmt));
1550 break;
1552 default:
1553 gcc_unreachable ();
1556 else
1558 if (gimple_assign_copy_p (stmt)
1559 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1560 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1562 /* Here we handle statements that are not completely rewritten.
1563 First we detect some inlining-induced bogosities for
1564 discarding. */
1566 /* Some assignments VAR = VAR; don't generate any rtl code
1567 and thus don't count as variable modification. Avoid
1568 keeping bogosities like 0 = 0. */
1569 tree decl = gimple_assign_lhs (stmt), value;
1570 tree *n;
1572 n = id->decl_map->get (decl);
1573 if (n)
1575 value = *n;
1576 STRIP_TYPE_NOPS (value);
1577 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1578 return NULL;
1582 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1583 in a block that we aren't copying during tree_function_versioning,
1584 just drop the clobber stmt. */
1585 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1587 tree lhs = gimple_assign_lhs (stmt);
1588 if (TREE_CODE (lhs) == MEM_REF
1589 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1591 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1592 if (gimple_bb (def_stmt)
1593 && !bitmap_bit_p (id->blocks_to_copy,
1594 gimple_bb (def_stmt)->index))
1595 return NULL;
1599 if (gimple_debug_bind_p (stmt))
1601 gdebug *copy
1602 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1603 gimple_debug_bind_get_value (stmt),
1604 stmt);
1605 id->debug_stmts.safe_push (copy);
1606 gimple_seq_add_stmt (&stmts, copy);
1607 return stmts;
1609 if (gimple_debug_source_bind_p (stmt))
1611 gdebug *copy = gimple_build_debug_source_bind
1612 (gimple_debug_source_bind_get_var (stmt),
1613 gimple_debug_source_bind_get_value (stmt),
1614 stmt);
1615 id->debug_stmts.safe_push (copy);
1616 gimple_seq_add_stmt (&stmts, copy);
1617 return stmts;
1620 /* Create a new deep copy of the statement. */
1621 copy = gimple_copy (stmt);
1623 /* Clear flags that need revisiting. */
1624 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1626 if (gimple_call_tail_p (call_stmt))
1627 gimple_call_set_tail (call_stmt, false);
1628 if (gimple_call_from_thunk_p (call_stmt))
1629 gimple_call_set_from_thunk (call_stmt, false);
1632 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1633 RESX and EH_DISPATCH. */
1634 if (id->eh_map)
1635 switch (gimple_code (copy))
1637 case GIMPLE_CALL:
1639 tree r, fndecl = gimple_call_fndecl (copy);
1640 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1641 switch (DECL_FUNCTION_CODE (fndecl))
1643 case BUILT_IN_EH_COPY_VALUES:
1644 r = gimple_call_arg (copy, 1);
1645 r = remap_eh_region_tree_nr (r, id);
1646 gimple_call_set_arg (copy, 1, r);
1647 /* FALLTHRU */
1649 case BUILT_IN_EH_POINTER:
1650 case BUILT_IN_EH_FILTER:
1651 r = gimple_call_arg (copy, 0);
1652 r = remap_eh_region_tree_nr (r, id);
1653 gimple_call_set_arg (copy, 0, r);
1654 break;
1656 default:
1657 break;
1660 /* Reset alias info if we didn't apply measures to
1661 keep it valid over inlining by setting DECL_PT_UID. */
1662 if (!id->src_cfun->gimple_df
1663 || !id->src_cfun->gimple_df->ipa_pta)
1664 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1666 break;
1668 case GIMPLE_RESX:
1670 gresx *resx_stmt = as_a <gresx *> (copy);
1671 int r = gimple_resx_region (resx_stmt);
1672 r = remap_eh_region_nr (r, id);
1673 gimple_resx_set_region (resx_stmt, r);
1675 break;
1677 case GIMPLE_EH_DISPATCH:
1679 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1680 int r = gimple_eh_dispatch_region (eh_dispatch);
1681 r = remap_eh_region_nr (r, id);
1682 gimple_eh_dispatch_set_region (eh_dispatch, r);
1684 break;
1686 default:
1687 break;
1691 /* If STMT has a block defined, map it to the newly constructed
1692 block. */
1693 if (gimple_block (copy))
1695 tree *n;
1696 n = id->decl_map->get (gimple_block (copy));
1697 gcc_assert (n);
1698 gimple_set_block (copy, *n);
1701 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1703 gimple_seq_add_stmt (&stmts, copy);
1704 return stmts;
1707 /* Remap all the operands in COPY. */
1708 memset (&wi, 0, sizeof (wi));
1709 wi.info = id;
1710 if (skip_first)
1711 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1712 else
1713 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1715 /* Clear the copied virtual operands. We are not remapping them here
1716 but are going to recreate them from scratch. */
1717 if (gimple_has_mem_ops (copy))
1719 gimple_set_vdef (copy, NULL_TREE);
1720 gimple_set_vuse (copy, NULL_TREE);
1723 gimple_seq_add_stmt (&stmts, copy);
1724 return stmts;
1728 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1729 later */
1731 static basic_block
1732 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1733 gcov_type count_scale)
1735 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1736 basic_block copy_basic_block;
1737 tree decl;
1738 gcov_type freq;
1739 basic_block prev;
1741 /* Search for previous copied basic block. */
1742 prev = bb->prev_bb;
1743 while (!prev->aux)
1744 prev = prev->prev_bb;
1746 /* create_basic_block() will append every new block to
1747 basic_block_info automatically. */
1748 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1749 copy_basic_block->count = apply_scale (bb->count, count_scale);
1751 /* We are going to rebuild frequencies from scratch. These values
1752 have just small importance to drive canonicalize_loop_headers. */
1753 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1755 /* We recompute frequencies after inlining, so this is quite safe. */
1756 if (freq > BB_FREQ_MAX)
1757 freq = BB_FREQ_MAX;
1758 copy_basic_block->frequency = freq;
1760 copy_gsi = gsi_start_bb (copy_basic_block);
1762 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1764 gimple_seq stmts;
1765 gimple stmt = gsi_stmt (gsi);
1766 gimple orig_stmt = stmt;
1767 gimple_stmt_iterator stmts_gsi;
1768 bool stmt_added = false;
1770 id->regimplify = false;
1771 stmts = remap_gimple_stmt (stmt, id);
1773 if (gimple_seq_empty_p (stmts))
1774 continue;
1776 seq_gsi = copy_gsi;
1778 for (stmts_gsi = gsi_start (stmts);
1779 !gsi_end_p (stmts_gsi); )
1781 stmt = gsi_stmt (stmts_gsi);
1783 /* Advance iterator now before stmt is moved to seq_gsi. */
1784 gsi_next (&stmts_gsi);
1786 if (gimple_nop_p (stmt))
1787 continue;
1789 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1790 orig_stmt);
1792 /* With return slot optimization we can end up with
1793 non-gimple (foo *)&this->m, fix that here. */
1794 if (is_gimple_assign (stmt)
1795 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1796 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1798 tree new_rhs;
1799 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1800 gimple_assign_rhs1 (stmt),
1801 true, NULL, false,
1802 GSI_CONTINUE_LINKING);
1803 gimple_assign_set_rhs1 (stmt, new_rhs);
1804 id->regimplify = false;
1807 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1809 if (id->regimplify)
1810 gimple_regimplify_operands (stmt, &seq_gsi);
1812 stmt_added = true;
1815 if (!stmt_added)
1816 continue;
1818 /* If copy_basic_block has been empty at the start of this iteration,
1819 call gsi_start_bb again to get at the newly added statements. */
1820 if (gsi_end_p (copy_gsi))
1821 copy_gsi = gsi_start_bb (copy_basic_block);
1822 else
1823 gsi_next (&copy_gsi);
1825 /* Process the new statement. The call to gimple_regimplify_operands
1826 possibly turned the statement into multiple statements, we
1827 need to process all of them. */
1830 tree fn;
1831 gcall *call_stmt;
1833 stmt = gsi_stmt (copy_gsi);
1834 call_stmt = dyn_cast <gcall *> (stmt);
1835 if (call_stmt
1836 && gimple_call_va_arg_pack_p (call_stmt)
1837 && id->call_stmt)
1839 /* __builtin_va_arg_pack () should be replaced by
1840 all arguments corresponding to ... in the caller. */
1841 tree p;
1842 gcall *new_call;
1843 vec<tree> argarray;
1844 size_t nargs = gimple_call_num_args (id->call_stmt);
1845 size_t n, i, nargs_to_copy;
1846 bool remove_bounds = false;
1848 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1849 nargs--;
1851 /* Bounds should be removed from arg pack in case
1852 we handle not instrumented call in instrumented
1853 function. */
1854 nargs_to_copy = nargs;
1855 if (gimple_call_with_bounds_p (id->call_stmt)
1856 && !gimple_call_with_bounds_p (stmt))
1858 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1859 i < gimple_call_num_args (id->call_stmt);
1860 i++)
1861 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1862 nargs_to_copy--;
1863 remove_bounds = true;
1866 /* Create the new array of arguments. */
1867 n = nargs_to_copy + gimple_call_num_args (call_stmt);
1868 argarray.create (n);
1869 argarray.safe_grow_cleared (n);
1871 /* Copy all the arguments before '...' */
1872 memcpy (argarray.address (),
1873 gimple_call_arg_ptr (call_stmt, 0),
1874 gimple_call_num_args (call_stmt) * sizeof (tree));
1876 if (remove_bounds)
1878 /* Append the rest of arguments removing bounds. */
1879 unsigned cur = gimple_call_num_args (call_stmt);
1880 i = gimple_call_num_args (id->call_stmt) - nargs;
1881 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1882 i < gimple_call_num_args (id->call_stmt);
1883 i++)
1884 if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1885 argarray[cur++] = gimple_call_arg (id->call_stmt, i);
1886 gcc_assert (cur == n);
1888 else
1890 /* Append the arguments passed in '...' */
1891 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1892 gimple_call_arg_ptr (id->call_stmt, 0)
1893 + (gimple_call_num_args (id->call_stmt) - nargs),
1894 nargs * sizeof (tree));
1897 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1898 argarray);
1900 argarray.release ();
1902 /* Copy all GIMPLE_CALL flags, location and block, except
1903 GF_CALL_VA_ARG_PACK. */
1904 gimple_call_copy_flags (new_call, call_stmt);
1905 gimple_call_set_va_arg_pack (new_call, false);
1906 gimple_set_location (new_call, gimple_location (stmt));
1907 gimple_set_block (new_call, gimple_block (stmt));
1908 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1910 gsi_replace (&copy_gsi, new_call, false);
1911 stmt = new_call;
1913 else if (call_stmt
1914 && id->call_stmt
1915 && (decl = gimple_call_fndecl (stmt))
1916 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1917 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1919 /* __builtin_va_arg_pack_len () should be replaced by
1920 the number of anonymous arguments. */
1921 size_t nargs = gimple_call_num_args (id->call_stmt), i;
1922 tree count, p;
1923 gimple new_stmt;
1925 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1926 nargs--;
1928 /* For instrumented calls we should ignore bounds. */
1929 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1930 i < gimple_call_num_args (id->call_stmt);
1931 i++)
1932 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1933 nargs--;
1935 count = build_int_cst (integer_type_node, nargs);
1936 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1937 gsi_replace (&copy_gsi, new_stmt, false);
1938 stmt = new_stmt;
1940 else if (call_stmt
1941 && id->call_stmt
1942 && gimple_call_internal_p (stmt)
1943 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1945 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
1946 gsi_remove (&copy_gsi, false);
1947 continue;
1950 /* Statements produced by inlining can be unfolded, especially
1951 when we constant propagated some operands. We can't fold
1952 them right now for two reasons:
1953 1) folding require SSA_NAME_DEF_STMTs to be correct
1954 2) we can't change function calls to builtins.
1955 So we just mark statement for later folding. We mark
1956 all new statements, instead just statements that has changed
1957 by some nontrivial substitution so even statements made
1958 foldable indirectly are updated. If this turns out to be
1959 expensive, copy_body can be told to watch for nontrivial
1960 changes. */
1961 if (id->statements_to_fold)
1962 id->statements_to_fold->add (stmt);
1964 /* We're duplicating a CALL_EXPR. Find any corresponding
1965 callgraph edges and update or duplicate them. */
1966 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
1968 struct cgraph_edge *edge;
1970 switch (id->transform_call_graph_edges)
1972 case CB_CGE_DUPLICATE:
1973 edge = id->src_node->get_edge (orig_stmt);
1974 if (edge)
1976 int edge_freq = edge->frequency;
1977 int new_freq;
1978 struct cgraph_edge *old_edge = edge;
1979 edge = edge->clone (id->dst_node, call_stmt,
1980 gimple_uid (stmt),
1981 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1982 true);
1983 /* We could also just rescale the frequency, but
1984 doing so would introduce roundoff errors and make
1985 verifier unhappy. */
1986 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
1987 copy_basic_block);
1989 /* Speculative calls consist of two edges - direct and indirect.
1990 Duplicate the whole thing and distribute frequencies accordingly. */
1991 if (edge->speculative)
1993 struct cgraph_edge *direct, *indirect;
1994 struct ipa_ref *ref;
1996 gcc_assert (!edge->indirect_unknown_callee);
1997 old_edge->speculative_call_info (direct, indirect, ref);
1998 indirect = indirect->clone (id->dst_node, call_stmt,
1999 gimple_uid (stmt),
2000 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
2001 true);
2002 if (old_edge->frequency + indirect->frequency)
2004 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
2005 (old_edge->frequency + indirect->frequency)),
2006 CGRAPH_FREQ_MAX);
2007 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
2008 (old_edge->frequency + indirect->frequency)),
2009 CGRAPH_FREQ_MAX);
2011 id->dst_node->clone_reference (ref, stmt);
2013 else
2015 edge->frequency = new_freq;
2016 if (dump_file
2017 && profile_status_for_fn (cfun) != PROFILE_ABSENT
2018 && (edge_freq > edge->frequency + 10
2019 || edge_freq < edge->frequency - 10))
2021 fprintf (dump_file, "Edge frequency estimated by "
2022 "cgraph %i diverge from inliner's estimate %i\n",
2023 edge_freq,
2024 edge->frequency);
2025 fprintf (dump_file,
2026 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
2027 bb->index,
2028 bb->frequency,
2029 copy_basic_block->frequency);
2033 break;
2035 case CB_CGE_MOVE_CLONES:
2036 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2037 call_stmt);
2038 edge = id->dst_node->get_edge (stmt);
2039 break;
2041 case CB_CGE_MOVE:
2042 edge = id->dst_node->get_edge (orig_stmt);
2043 if (edge)
2044 edge->set_call_stmt (call_stmt);
2045 break;
2047 default:
2048 gcc_unreachable ();
2051 /* Constant propagation on argument done during inlining
2052 may create new direct call. Produce an edge for it. */
2053 if ((!edge
2054 || (edge->indirect_inlining_edge
2055 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2056 && id->dst_node->definition
2057 && (fn = gimple_call_fndecl (stmt)) != NULL)
2059 struct cgraph_node *dest = cgraph_node::get (fn);
2061 /* We have missing edge in the callgraph. This can happen
2062 when previous inlining turned an indirect call into a
2063 direct call by constant propagating arguments or we are
2064 producing dead clone (for further cloning). In all
2065 other cases we hit a bug (incorrect node sharing is the
2066 most common reason for missing edges). */
2067 gcc_assert (!dest->definition
2068 || dest->address_taken
2069 || !id->src_node->definition
2070 || !id->dst_node->definition);
2071 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2072 id->dst_node->create_edge_including_clones
2073 (dest, orig_stmt, call_stmt, bb->count,
2074 compute_call_stmt_bb_frequency (id->dst_node->decl,
2075 copy_basic_block),
2076 CIF_ORIGINALLY_INDIRECT_CALL);
2077 else
2078 id->dst_node->create_edge (dest, call_stmt,
2079 bb->count,
2080 compute_call_stmt_bb_frequency
2081 (id->dst_node->decl,
2082 copy_basic_block))->inline_failed
2083 = CIF_ORIGINALLY_INDIRECT_CALL;
2084 if (dump_file)
2086 fprintf (dump_file, "Created new direct edge to %s\n",
2087 dest->name ());
2091 notice_special_calls (as_a <gcall *> (stmt));
2094 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2095 id->eh_map, id->eh_lp_nr);
2097 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
2099 ssa_op_iter i;
2100 tree def;
2102 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
2103 if (TREE_CODE (def) == SSA_NAME)
2104 SSA_NAME_DEF_STMT (def) = stmt;
2107 gsi_next (&copy_gsi);
2109 while (!gsi_end_p (copy_gsi));
2111 copy_gsi = gsi_last_bb (copy_basic_block);
2114 return copy_basic_block;
2117 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2118 form is quite easy, since dominator relationship for old basic blocks does
2119 not change.
2121 There is however exception where inlining might change dominator relation
2122 across EH edges from basic block within inlined functions destinating
2123 to landing pads in function we inline into.
2125 The function fills in PHI_RESULTs of such PHI nodes if they refer
2126 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2127 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2128 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2129 set, and this means that there will be no overlapping live ranges
2130 for the underlying symbol.
2132 This might change in future if we allow redirecting of EH edges and
2133 we might want to change way build CFG pre-inlining to include
2134 all the possible edges then. */
2135 static void
2136 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2137 bool can_throw, bool nonlocal_goto)
2139 edge e;
2140 edge_iterator ei;
2142 FOR_EACH_EDGE (e, ei, bb->succs)
2143 if (!e->dest->aux
2144 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2146 gphi *phi;
2147 gphi_iterator si;
2149 if (!nonlocal_goto)
2150 gcc_assert (e->flags & EDGE_EH);
2152 if (!can_throw)
2153 gcc_assert (!(e->flags & EDGE_EH));
2155 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2157 edge re;
2159 phi = si.phi ();
2161 /* For abnormal goto/call edges the receiver can be the
2162 ENTRY_BLOCK. Do not assert this cannot happen. */
2164 gcc_assert ((e->flags & EDGE_EH)
2165 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2167 re = find_edge (ret_bb, e->dest);
2168 gcc_checking_assert (re);
2169 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2170 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2172 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2173 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2179 /* Copy edges from BB into its copy constructed earlier, scale profile
2180 accordingly. Edges will be taken care of later. Assume aux
2181 pointers to point to the copies of each BB. Return true if any
2182 debug stmts are left after a statement that must end the basic block. */
2184 static bool
2185 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
2186 basic_block abnormal_goto_dest)
2188 basic_block new_bb = (basic_block) bb->aux;
2189 edge_iterator ei;
2190 edge old_edge;
2191 gimple_stmt_iterator si;
2192 int flags;
2193 bool need_debug_cleanup = false;
2195 /* Use the indices from the original blocks to create edges for the
2196 new ones. */
2197 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2198 if (!(old_edge->flags & EDGE_EH))
2200 edge new_edge;
2202 flags = old_edge->flags;
2204 /* Return edges do get a FALLTHRU flag when the get inlined. */
2205 if (old_edge->dest->index == EXIT_BLOCK
2206 && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2207 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2208 flags |= EDGE_FALLTHRU;
2209 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2210 new_edge->count = apply_scale (old_edge->count, count_scale);
2211 new_edge->probability = old_edge->probability;
2214 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2215 return false;
2217 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2219 gimple copy_stmt;
2220 bool can_throw, nonlocal_goto;
2222 copy_stmt = gsi_stmt (si);
2223 if (!is_gimple_debug (copy_stmt))
2224 update_stmt (copy_stmt);
2226 /* Do this before the possible split_block. */
2227 gsi_next (&si);
2229 /* If this tree could throw an exception, there are two
2230 cases where we need to add abnormal edge(s): the
2231 tree wasn't in a region and there is a "current
2232 region" in the caller; or the original tree had
2233 EH edges. In both cases split the block after the tree,
2234 and add abnormal edge(s) as needed; we need both
2235 those from the callee and the caller.
2236 We check whether the copy can throw, because the const
2237 propagation can change an INDIRECT_REF which throws
2238 into a COMPONENT_REF which doesn't. If the copy
2239 can throw, the original could also throw. */
2240 can_throw = stmt_can_throw_internal (copy_stmt);
2241 nonlocal_goto
2242 = (stmt_can_make_abnormal_goto (copy_stmt)
2243 && !computed_goto_p (copy_stmt));
2245 if (can_throw || nonlocal_goto)
2247 if (!gsi_end_p (si))
2249 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2250 gsi_next (&si);
2251 if (gsi_end_p (si))
2252 need_debug_cleanup = true;
2254 if (!gsi_end_p (si))
2255 /* Note that bb's predecessor edges aren't necessarily
2256 right at this point; split_block doesn't care. */
2258 edge e = split_block (new_bb, copy_stmt);
2260 new_bb = e->dest;
2261 new_bb->aux = e->src->aux;
2262 si = gsi_start_bb (new_bb);
2266 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2267 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2268 else if (can_throw)
2269 make_eh_edges (copy_stmt);
2271 /* If the call we inline cannot make abnormal goto do not add
2272 additional abnormal edges but only retain those already present
2273 in the original function body. */
2274 if (abnormal_goto_dest == NULL)
2275 nonlocal_goto = false;
2276 if (nonlocal_goto)
2278 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2280 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2281 nonlocal_goto = false;
2282 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2283 in OpenMP regions which aren't allowed to be left abnormally.
2284 So, no need to add abnormal edge in that case. */
2285 else if (is_gimple_call (copy_stmt)
2286 && gimple_call_internal_p (copy_stmt)
2287 && (gimple_call_internal_fn (copy_stmt)
2288 == IFN_ABNORMAL_DISPATCHER)
2289 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2290 nonlocal_goto = false;
2291 else
2292 make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2295 if ((can_throw || nonlocal_goto)
2296 && gimple_in_ssa_p (cfun))
2297 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2298 can_throw, nonlocal_goto);
2300 return need_debug_cleanup;
2303 /* Copy the PHIs. All blocks and edges are copied, some blocks
2304 was possibly split and new outgoing EH edges inserted.
2305 BB points to the block of original function and AUX pointers links
2306 the original and newly copied blocks. */
2308 static void
2309 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2311 basic_block const new_bb = (basic_block) bb->aux;
2312 edge_iterator ei;
2313 gphi *phi;
2314 gphi_iterator si;
2315 edge new_edge;
2316 bool inserted = false;
2318 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2320 tree res, new_res;
2321 gphi *new_phi;
2323 phi = si.phi ();
2324 res = PHI_RESULT (phi);
2325 new_res = res;
2326 if (!virtual_operand_p (res))
2328 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2329 new_phi = create_phi_node (new_res, new_bb);
2330 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2332 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2333 tree arg;
2334 tree new_arg;
2335 edge_iterator ei2;
2336 location_t locus;
2338 /* When doing partial cloning, we allow PHIs on the entry block
2339 as long as all the arguments are the same. Find any input
2340 edge to see argument to copy. */
2341 if (!old_edge)
2342 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2343 if (!old_edge->src->aux)
2344 break;
2346 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2347 new_arg = arg;
2348 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2349 gcc_assert (new_arg);
2350 /* With return slot optimization we can end up with
2351 non-gimple (foo *)&this->m, fix that here. */
2352 if (TREE_CODE (new_arg) != SSA_NAME
2353 && TREE_CODE (new_arg) != FUNCTION_DECL
2354 && !is_gimple_val (new_arg))
2356 gimple_seq stmts = NULL;
2357 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2358 gsi_insert_seq_on_edge (new_edge, stmts);
2359 inserted = true;
2361 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2362 if (LOCATION_BLOCK (locus))
2364 tree *n;
2365 n = id->decl_map->get (LOCATION_BLOCK (locus));
2366 gcc_assert (n);
2367 if (*n)
2368 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2369 else
2370 locus = LOCATION_LOCUS (locus);
2372 else
2373 locus = LOCATION_LOCUS (locus);
2375 add_phi_arg (new_phi, new_arg, new_edge, locus);
2380 /* Commit the delayed edge insertions. */
2381 if (inserted)
2382 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2383 gsi_commit_one_edge_insert (new_edge, NULL);
2387 /* Wrapper for remap_decl so it can be used as a callback. */
2389 static tree
2390 remap_decl_1 (tree decl, void *data)
2392 return remap_decl (decl, (copy_body_data *) data);
2395 /* Build struct function and associated datastructures for the new clone
2396 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2397 the cfun to the function of new_fndecl (and current_function_decl too). */
2399 static void
2400 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2402 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2403 gcov_type count_scale;
2405 if (!DECL_ARGUMENTS (new_fndecl))
2406 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2407 if (!DECL_RESULT (new_fndecl))
2408 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2410 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2411 count_scale
2412 = GCOV_COMPUTE_SCALE (count,
2413 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2414 else
2415 count_scale = REG_BR_PROB_BASE;
2417 /* Register specific tree functions. */
2418 gimple_register_cfg_hooks ();
2420 /* Get clean struct function. */
2421 push_struct_function (new_fndecl);
2423 /* We will rebuild these, so just sanity check that they are empty. */
2424 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2425 gcc_assert (cfun->local_decls == NULL);
2426 gcc_assert (cfun->cfg == NULL);
2427 gcc_assert (cfun->decl == new_fndecl);
2429 /* Copy items we preserve during cloning. */
2430 cfun->static_chain_decl = src_cfun->static_chain_decl;
2431 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2432 cfun->function_end_locus = src_cfun->function_end_locus;
2433 cfun->curr_properties = src_cfun->curr_properties;
2434 cfun->last_verified = src_cfun->last_verified;
2435 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2436 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2437 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2438 cfun->stdarg = src_cfun->stdarg;
2439 cfun->after_inlining = src_cfun->after_inlining;
2440 cfun->can_throw_non_call_exceptions
2441 = src_cfun->can_throw_non_call_exceptions;
2442 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2443 cfun->returns_struct = src_cfun->returns_struct;
2444 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2446 init_empty_tree_cfg ();
2448 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2449 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2450 (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2451 REG_BR_PROB_BASE);
2452 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2453 = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2454 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2455 (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2456 REG_BR_PROB_BASE);
2457 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2458 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2459 if (src_cfun->eh)
2460 init_eh_for_function ();
2462 if (src_cfun->gimple_df)
2464 init_tree_ssa (cfun);
2465 cfun->gimple_df->in_ssa_p = true;
2466 init_ssa_operands (cfun);
2470 /* Helper function for copy_cfg_body. Move debug stmts from the end
2471 of NEW_BB to the beginning of successor basic blocks when needed. If the
2472 successor has multiple predecessors, reset them, otherwise keep
2473 their value. */
2475 static void
2476 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2478 edge e;
2479 edge_iterator ei;
2480 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2482 if (gsi_end_p (si)
2483 || gsi_one_before_end_p (si)
2484 || !(stmt_can_throw_internal (gsi_stmt (si))
2485 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2486 return;
2488 FOR_EACH_EDGE (e, ei, new_bb->succs)
2490 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2491 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2492 while (is_gimple_debug (gsi_stmt (ssi)))
2494 gimple stmt = gsi_stmt (ssi);
2495 gdebug *new_stmt;
2496 tree var;
2497 tree value;
2499 /* For the last edge move the debug stmts instead of copying
2500 them. */
2501 if (ei_one_before_end_p (ei))
2503 si = ssi;
2504 gsi_prev (&ssi);
2505 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2506 gimple_debug_bind_reset_value (stmt);
2507 gsi_remove (&si, false);
2508 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2509 continue;
2512 if (gimple_debug_bind_p (stmt))
2514 var = gimple_debug_bind_get_var (stmt);
2515 if (single_pred_p (e->dest))
2517 value = gimple_debug_bind_get_value (stmt);
2518 value = unshare_expr (value);
2520 else
2521 value = NULL_TREE;
2522 new_stmt = gimple_build_debug_bind (var, value, stmt);
2524 else if (gimple_debug_source_bind_p (stmt))
2526 var = gimple_debug_source_bind_get_var (stmt);
2527 value = gimple_debug_source_bind_get_value (stmt);
2528 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2530 else
2531 gcc_unreachable ();
2532 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2533 id->debug_stmts.safe_push (new_stmt);
2534 gsi_prev (&ssi);
2539 /* Make a copy of the sub-loops of SRC_PARENT and place them
2540 as siblings of DEST_PARENT. */
2542 static void
2543 copy_loops (copy_body_data *id,
2544 struct loop *dest_parent, struct loop *src_parent)
2546 struct loop *src_loop = src_parent->inner;
2547 while (src_loop)
2549 if (!id->blocks_to_copy
2550 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2552 struct loop *dest_loop = alloc_loop ();
2554 /* Assign the new loop its header and latch and associate
2555 those with the new loop. */
2556 dest_loop->header = (basic_block)src_loop->header->aux;
2557 dest_loop->header->loop_father = dest_loop;
2558 if (src_loop->latch != NULL)
2560 dest_loop->latch = (basic_block)src_loop->latch->aux;
2561 dest_loop->latch->loop_father = dest_loop;
2564 /* Copy loop meta-data. */
2565 copy_loop_info (src_loop, dest_loop);
2567 /* Finally place it into the loop array and the loop tree. */
2568 place_new_loop (cfun, dest_loop);
2569 flow_loop_tree_node_add (dest_parent, dest_loop);
2571 dest_loop->safelen = src_loop->safelen;
2572 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2573 if (src_loop->force_vectorize)
2575 dest_loop->force_vectorize = true;
2576 cfun->has_force_vectorize_loops = true;
2578 if (src_loop->simduid)
2580 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2581 cfun->has_simduid_loops = true;
2584 /* Recurse. */
2585 copy_loops (id, dest_loop, src_loop);
2587 src_loop = src_loop->next;
2591 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2593 void
2594 redirect_all_calls (copy_body_data * id, basic_block bb)
2596 gimple_stmt_iterator si;
2597 gimple last = last_stmt (bb);
2598 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2600 gimple stmt = gsi_stmt (si);
2601 if (is_gimple_call (stmt))
2603 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2604 if (edge)
2606 edge->redirect_call_stmt_to_callee ();
2607 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2608 gimple_purge_dead_eh_edges (bb);
2614 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2615 with each bb's frequency. Used when NODE has a 0-weight entry
2616 but we are about to inline it into a non-zero count call bb.
2617 See the comments for handle_missing_profiles() in predict.c for
2618 when this can happen for COMDATs. */
2620 void
2621 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2623 basic_block bb;
2624 edge_iterator ei;
2625 edge e;
2626 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2628 FOR_ALL_BB_FN(bb, fn)
2630 bb->count = apply_scale (count,
2631 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2632 FOR_EACH_EDGE (e, ei, bb->succs)
2633 e->count = apply_probability (e->src->count, e->probability);
2637 /* Make a copy of the body of FN so that it can be inserted inline in
2638 another function. Walks FN via CFG, returns new fndecl. */
2640 static tree
2641 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2642 basic_block entry_block_map, basic_block exit_block_map,
2643 basic_block new_entry)
2645 tree callee_fndecl = id->src_fn;
2646 /* Original cfun for the callee, doesn't change. */
2647 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2648 struct function *cfun_to_copy;
2649 basic_block bb;
2650 tree new_fndecl = NULL;
2651 bool need_debug_cleanup = false;
2652 gcov_type count_scale;
2653 int last;
2654 int incoming_frequency = 0;
2655 gcov_type incoming_count = 0;
2657 /* This can happen for COMDAT routines that end up with 0 counts
2658 despite being called (see the comments for handle_missing_profiles()
2659 in predict.c as to why). Apply counts to the blocks in the callee
2660 before inlining, using the guessed edge frequencies, so that we don't
2661 end up with a 0-count inline body which can confuse downstream
2662 optimizations such as function splitting. */
2663 if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2665 /* Apply the larger of the call bb count and the total incoming
2666 call edge count to the callee. */
2667 gcov_type in_count = 0;
2668 struct cgraph_edge *in_edge;
2669 for (in_edge = id->src_node->callers; in_edge;
2670 in_edge = in_edge->next_caller)
2671 in_count += in_edge->count;
2672 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2675 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2676 count_scale
2677 = GCOV_COMPUTE_SCALE (count,
2678 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2679 else
2680 count_scale = REG_BR_PROB_BASE;
2682 /* Register specific tree functions. */
2683 gimple_register_cfg_hooks ();
2685 /* If we are inlining just region of the function, make sure to connect
2686 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2687 part of loop, we must compute frequency and probability of
2688 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2689 probabilities of edges incoming from nonduplicated region. */
2690 if (new_entry)
2692 edge e;
2693 edge_iterator ei;
2695 FOR_EACH_EDGE (e, ei, new_entry->preds)
2696 if (!e->src->aux)
2698 incoming_frequency += EDGE_FREQUENCY (e);
2699 incoming_count += e->count;
2701 incoming_count = apply_scale (incoming_count, count_scale);
2702 incoming_frequency
2703 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2704 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2705 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2708 /* Must have a CFG here at this point. */
2709 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2710 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2712 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2714 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2715 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2716 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2717 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2719 /* Duplicate any exception-handling regions. */
2720 if (cfun->eh)
2721 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2722 remap_decl_1, id);
2724 /* Use aux pointers to map the original blocks to copy. */
2725 FOR_EACH_BB_FN (bb, cfun_to_copy)
2726 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2728 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2729 bb->aux = new_bb;
2730 new_bb->aux = bb;
2731 new_bb->loop_father = entry_block_map->loop_father;
2734 last = last_basic_block_for_fn (cfun);
2736 /* Now that we've duplicated the blocks, duplicate their edges. */
2737 basic_block abnormal_goto_dest = NULL;
2738 if (id->call_stmt
2739 && stmt_can_make_abnormal_goto (id->call_stmt))
2741 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2743 bb = gimple_bb (id->call_stmt);
2744 gsi_next (&gsi);
2745 if (gsi_end_p (gsi))
2746 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2748 FOR_ALL_BB_FN (bb, cfun_to_copy)
2749 if (!id->blocks_to_copy
2750 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2751 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2752 abnormal_goto_dest);
2754 if (new_entry)
2756 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2757 e->probability = REG_BR_PROB_BASE;
2758 e->count = incoming_count;
2761 /* Duplicate the loop tree, if available and wanted. */
2762 if (loops_for_fn (src_cfun) != NULL
2763 && current_loops != NULL)
2765 copy_loops (id, entry_block_map->loop_father,
2766 get_loop (src_cfun, 0));
2767 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2768 loops_state_set (LOOPS_NEED_FIXUP);
2771 /* If the loop tree in the source function needed fixup, mark the
2772 destination loop tree for fixup, too. */
2773 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2774 loops_state_set (LOOPS_NEED_FIXUP);
2776 if (gimple_in_ssa_p (cfun))
2777 FOR_ALL_BB_FN (bb, cfun_to_copy)
2778 if (!id->blocks_to_copy
2779 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2780 copy_phis_for_bb (bb, id);
2782 FOR_ALL_BB_FN (bb, cfun_to_copy)
2783 if (bb->aux)
2785 if (need_debug_cleanup
2786 && bb->index != ENTRY_BLOCK
2787 && bb->index != EXIT_BLOCK)
2788 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2789 /* Update call edge destinations. This can not be done before loop
2790 info is updated, because we may split basic blocks. */
2791 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2792 && bb->index != ENTRY_BLOCK
2793 && bb->index != EXIT_BLOCK)
2794 redirect_all_calls (id, (basic_block)bb->aux);
2795 ((basic_block)bb->aux)->aux = NULL;
2796 bb->aux = NULL;
2799 /* Zero out AUX fields of newly created block during EH edge
2800 insertion. */
2801 for (; last < last_basic_block_for_fn (cfun); last++)
2803 if (need_debug_cleanup)
2804 maybe_move_debug_stmts_to_successors (id,
2805 BASIC_BLOCK_FOR_FN (cfun, last));
2806 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2807 /* Update call edge destinations. This can not be done before loop
2808 info is updated, because we may split basic blocks. */
2809 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2810 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2812 entry_block_map->aux = NULL;
2813 exit_block_map->aux = NULL;
2815 if (id->eh_map)
2817 delete id->eh_map;
2818 id->eh_map = NULL;
2820 if (id->dependence_map)
2822 delete id->dependence_map;
2823 id->dependence_map = NULL;
2826 return new_fndecl;
2829 /* Copy the debug STMT using ID. We deal with these statements in a
2830 special way: if any variable in their VALUE expression wasn't
2831 remapped yet, we won't remap it, because that would get decl uids
2832 out of sync, causing codegen differences between -g and -g0. If
2833 this arises, we drop the VALUE expression altogether. */
2835 static void
2836 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2838 tree t, *n;
2839 struct walk_stmt_info wi;
2841 if (gimple_block (stmt))
2843 n = id->decl_map->get (gimple_block (stmt));
2844 gimple_set_block (stmt, n ? *n : id->block);
2847 /* Remap all the operands in COPY. */
2848 memset (&wi, 0, sizeof (wi));
2849 wi.info = id;
2851 processing_debug_stmt = 1;
2853 if (gimple_debug_source_bind_p (stmt))
2854 t = gimple_debug_source_bind_get_var (stmt);
2855 else
2856 t = gimple_debug_bind_get_var (stmt);
2858 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2859 && (n = id->debug_map->get (t)))
2861 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2862 t = *n;
2864 else if (TREE_CODE (t) == VAR_DECL
2865 && !is_global_var (t)
2866 && !id->decl_map->get (t))
2867 /* T is a non-localized variable. */;
2868 else
2869 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2871 if (gimple_debug_bind_p (stmt))
2873 gimple_debug_bind_set_var (stmt, t);
2875 if (gimple_debug_bind_has_value_p (stmt))
2876 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2877 remap_gimple_op_r, &wi, NULL);
2879 /* Punt if any decl couldn't be remapped. */
2880 if (processing_debug_stmt < 0)
2881 gimple_debug_bind_reset_value (stmt);
2883 else if (gimple_debug_source_bind_p (stmt))
2885 gimple_debug_source_bind_set_var (stmt, t);
2886 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2887 remap_gimple_op_r, &wi, NULL);
2888 /* When inlining and source bind refers to one of the optimized
2889 away parameters, change the source bind into normal debug bind
2890 referring to the corresponding DEBUG_EXPR_DECL that should have
2891 been bound before the call stmt. */
2892 t = gimple_debug_source_bind_get_value (stmt);
2893 if (t != NULL_TREE
2894 && TREE_CODE (t) == PARM_DECL
2895 && id->call_stmt)
2897 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2898 unsigned int i;
2899 if (debug_args != NULL)
2901 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2902 if ((**debug_args)[i] == DECL_ORIGIN (t)
2903 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2905 t = (**debug_args)[i + 1];
2906 stmt->subcode = GIMPLE_DEBUG_BIND;
2907 gimple_debug_bind_set_value (stmt, t);
2908 break;
2914 processing_debug_stmt = 0;
2916 update_stmt (stmt);
2919 /* Process deferred debug stmts. In order to give values better odds
2920 of being successfully remapped, we delay the processing of debug
2921 stmts until all other stmts that might require remapping are
2922 processed. */
2924 static void
2925 copy_debug_stmts (copy_body_data *id)
2927 size_t i;
2928 gdebug *stmt;
2930 if (!id->debug_stmts.exists ())
2931 return;
2933 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2934 copy_debug_stmt (stmt, id);
2936 id->debug_stmts.release ();
2939 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2940 another function. */
2942 static tree
2943 copy_tree_body (copy_body_data *id)
2945 tree fndecl = id->src_fn;
2946 tree body = DECL_SAVED_TREE (fndecl);
2948 walk_tree (&body, copy_tree_body_r, id, NULL);
2950 return body;
2953 /* Make a copy of the body of FN so that it can be inserted inline in
2954 another function. */
2956 static tree
2957 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2958 basic_block entry_block_map, basic_block exit_block_map,
2959 basic_block new_entry)
2961 tree fndecl = id->src_fn;
2962 tree body;
2964 /* If this body has a CFG, walk CFG and copy. */
2965 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2966 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2967 new_entry);
2968 copy_debug_stmts (id);
2970 return body;
2973 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2974 defined in function FN, or of a data member thereof. */
2976 static bool
2977 self_inlining_addr_expr (tree value, tree fn)
2979 tree var;
2981 if (TREE_CODE (value) != ADDR_EXPR)
2982 return false;
2984 var = get_base_address (TREE_OPERAND (value, 0));
2986 return var && auto_var_in_fn_p (var, fn);
2989 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2990 lexical block and line number information from base_stmt, if given,
2991 or from the last stmt of the block otherwise. */
2993 static gimple
2994 insert_init_debug_bind (copy_body_data *id,
2995 basic_block bb, tree var, tree value,
2996 gimple base_stmt)
2998 gimple note;
2999 gimple_stmt_iterator gsi;
3000 tree tracked_var;
3002 if (!gimple_in_ssa_p (id->src_cfun))
3003 return NULL;
3005 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3006 return NULL;
3008 tracked_var = target_for_debug_bind (var);
3009 if (!tracked_var)
3010 return NULL;
3012 if (bb)
3014 gsi = gsi_last_bb (bb);
3015 if (!base_stmt && !gsi_end_p (gsi))
3016 base_stmt = gsi_stmt (gsi);
3019 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3021 if (bb)
3023 if (!gsi_end_p (gsi))
3024 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3025 else
3026 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3029 return note;
3032 static void
3033 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
3035 /* If VAR represents a zero-sized variable, it's possible that the
3036 assignment statement may result in no gimple statements. */
3037 if (init_stmt)
3039 gimple_stmt_iterator si = gsi_last_bb (bb);
3041 /* We can end up with init statements that store to a non-register
3042 from a rhs with a conversion. Handle that here by forcing the
3043 rhs into a temporary. gimple_regimplify_operands is not
3044 prepared to do this for us. */
3045 if (!is_gimple_debug (init_stmt)
3046 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3047 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3048 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3050 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3051 gimple_expr_type (init_stmt),
3052 gimple_assign_rhs1 (init_stmt));
3053 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3054 GSI_NEW_STMT);
3055 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3056 gimple_assign_set_rhs1 (init_stmt, rhs);
3058 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3059 gimple_regimplify_operands (init_stmt, &si);
3061 if (!is_gimple_debug (init_stmt))
3063 tree def = gimple_assign_lhs (init_stmt);
3064 insert_init_debug_bind (id, bb, def, def, init_stmt);
3069 /* Initialize parameter P with VALUE. If needed, produce init statement
3070 at the end of BB. When BB is NULL, we return init statement to be
3071 output later. */
3072 static gimple
3073 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3074 basic_block bb, tree *vars)
3076 gimple init_stmt = NULL;
3077 tree var;
3078 tree rhs = value;
3079 tree def = (gimple_in_ssa_p (cfun)
3080 ? ssa_default_def (id->src_cfun, p) : NULL);
3082 if (value
3083 && value != error_mark_node
3084 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3086 /* If we can match up types by promotion/demotion do so. */
3087 if (fold_convertible_p (TREE_TYPE (p), value))
3088 rhs = fold_convert (TREE_TYPE (p), value);
3089 else
3091 /* ??? For valid programs we should not end up here.
3092 Still if we end up with truly mismatched types here, fall back
3093 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3094 GIMPLE to the following passes. */
3095 if (!is_gimple_reg_type (TREE_TYPE (value))
3096 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3097 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3098 else
3099 rhs = build_zero_cst (TREE_TYPE (p));
3103 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3104 here since the type of this decl must be visible to the calling
3105 function. */
3106 var = copy_decl_to_var (p, id);
3108 /* Declare this new variable. */
3109 DECL_CHAIN (var) = *vars;
3110 *vars = var;
3112 /* Make gimplifier happy about this variable. */
3113 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3115 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3116 we would not need to create a new variable here at all, if it
3117 weren't for debug info. Still, we can just use the argument
3118 value. */
3119 if (TREE_READONLY (p)
3120 && !TREE_ADDRESSABLE (p)
3121 && value && !TREE_SIDE_EFFECTS (value)
3122 && !def)
3124 /* We may produce non-gimple trees by adding NOPs or introduce
3125 invalid sharing when operand is not really constant.
3126 It is not big deal to prohibit constant propagation here as
3127 we will constant propagate in DOM1 pass anyway. */
3128 if (is_gimple_min_invariant (value)
3129 && useless_type_conversion_p (TREE_TYPE (p),
3130 TREE_TYPE (value))
3131 /* We have to be very careful about ADDR_EXPR. Make sure
3132 the base variable isn't a local variable of the inlined
3133 function, e.g., when doing recursive inlining, direct or
3134 mutually-recursive or whatever, which is why we don't
3135 just test whether fn == current_function_decl. */
3136 && ! self_inlining_addr_expr (value, fn))
3138 insert_decl_map (id, p, value);
3139 insert_debug_decl_map (id, p, var);
3140 return insert_init_debug_bind (id, bb, var, value, NULL);
3144 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3145 that way, when the PARM_DECL is encountered, it will be
3146 automatically replaced by the VAR_DECL. */
3147 insert_decl_map (id, p, var);
3149 /* Even if P was TREE_READONLY, the new VAR should not be.
3150 In the original code, we would have constructed a
3151 temporary, and then the function body would have never
3152 changed the value of P. However, now, we will be
3153 constructing VAR directly. The constructor body may
3154 change its value multiple times as it is being
3155 constructed. Therefore, it must not be TREE_READONLY;
3156 the back-end assumes that TREE_READONLY variable is
3157 assigned to only once. */
3158 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3159 TREE_READONLY (var) = 0;
3161 /* If there is no setup required and we are in SSA, take the easy route
3162 replacing all SSA names representing the function parameter by the
3163 SSA name passed to function.
3165 We need to construct map for the variable anyway as it might be used
3166 in different SSA names when parameter is set in function.
3168 Do replacement at -O0 for const arguments replaced by constant.
3169 This is important for builtin_constant_p and other construct requiring
3170 constant argument to be visible in inlined function body. */
3171 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3172 && (optimize
3173 || (TREE_READONLY (p)
3174 && is_gimple_min_invariant (rhs)))
3175 && (TREE_CODE (rhs) == SSA_NAME
3176 || is_gimple_min_invariant (rhs))
3177 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3179 insert_decl_map (id, def, rhs);
3180 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3183 /* If the value of argument is never used, don't care about initializing
3184 it. */
3185 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3187 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3188 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3191 /* Initialize this VAR_DECL from the equivalent argument. Convert
3192 the argument to the proper type in case it was promoted. */
3193 if (value)
3195 if (rhs == error_mark_node)
3197 insert_decl_map (id, p, var);
3198 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3201 STRIP_USELESS_TYPE_CONVERSION (rhs);
3203 /* If we are in SSA form properly remap the default definition
3204 or assign to a dummy SSA name if the parameter is unused and
3205 we are not optimizing. */
3206 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3208 if (def)
3210 def = remap_ssa_name (def, id);
3211 init_stmt = gimple_build_assign (def, rhs);
3212 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3213 set_ssa_default_def (cfun, var, NULL);
3215 else if (!optimize)
3217 def = make_ssa_name (var);
3218 init_stmt = gimple_build_assign (def, rhs);
3221 else
3222 init_stmt = gimple_build_assign (var, rhs);
3224 if (bb && init_stmt)
3225 insert_init_stmt (id, bb, init_stmt);
3227 return init_stmt;
3230 /* Generate code to initialize the parameters of the function at the
3231 top of the stack in ID from the GIMPLE_CALL STMT. */
3233 static void
3234 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
3235 tree fn, basic_block bb)
3237 tree parms;
3238 size_t i;
3239 tree p;
3240 tree vars = NULL_TREE;
3241 tree static_chain = gimple_call_chain (stmt);
3243 /* Figure out what the parameters are. */
3244 parms = DECL_ARGUMENTS (fn);
3246 /* Loop through the parameter declarations, replacing each with an
3247 equivalent VAR_DECL, appropriately initialized. */
3248 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3250 tree val;
3251 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3252 setup_one_parameter (id, p, val, fn, bb, &vars);
3254 /* After remapping parameters remap their types. This has to be done
3255 in a second loop over all parameters to appropriately remap
3256 variable sized arrays when the size is specified in a
3257 parameter following the array. */
3258 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3260 tree *varp = id->decl_map->get (p);
3261 if (varp
3262 && TREE_CODE (*varp) == VAR_DECL)
3264 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3265 ? ssa_default_def (id->src_cfun, p) : NULL);
3266 tree var = *varp;
3267 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3268 /* Also remap the default definition if it was remapped
3269 to the default definition of the parameter replacement
3270 by the parameter setup. */
3271 if (def)
3273 tree *defp = id->decl_map->get (def);
3274 if (defp
3275 && TREE_CODE (*defp) == SSA_NAME
3276 && SSA_NAME_VAR (*defp) == var)
3277 TREE_TYPE (*defp) = TREE_TYPE (var);
3282 /* Initialize the static chain. */
3283 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3284 gcc_assert (fn != current_function_decl);
3285 if (p)
3287 /* No static chain? Seems like a bug in tree-nested.c. */
3288 gcc_assert (static_chain);
3290 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3293 declare_inline_vars (id->block, vars);
3297 /* Declare a return variable to replace the RESULT_DECL for the
3298 function we are calling. An appropriate DECL_STMT is returned.
3299 The USE_STMT is filled to contain a use of the declaration to
3300 indicate the return value of the function.
3302 RETURN_SLOT, if non-null is place where to store the result. It
3303 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3304 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3306 RETURN_BOUNDS holds a destination for returned bounds.
3308 The return value is a (possibly null) value that holds the result
3309 as seen by the caller. */
3311 static tree
3312 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3313 tree return_bounds, basic_block entry_bb)
3315 tree callee = id->src_fn;
3316 tree result = DECL_RESULT (callee);
3317 tree callee_type = TREE_TYPE (result);
3318 tree caller_type;
3319 tree var, use;
3321 /* Handle type-mismatches in the function declaration return type
3322 vs. the call expression. */
3323 if (modify_dest)
3324 caller_type = TREE_TYPE (modify_dest);
3325 else
3326 caller_type = TREE_TYPE (TREE_TYPE (callee));
3328 /* We don't need to do anything for functions that don't return anything. */
3329 if (VOID_TYPE_P (callee_type))
3330 return NULL_TREE;
3332 /* If there was a return slot, then the return value is the
3333 dereferenced address of that object. */
3334 if (return_slot)
3336 /* The front end shouldn't have used both return_slot and
3337 a modify expression. */
3338 gcc_assert (!modify_dest);
3339 if (DECL_BY_REFERENCE (result))
3341 tree return_slot_addr = build_fold_addr_expr (return_slot);
3342 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3344 /* We are going to construct *&return_slot and we can't do that
3345 for variables believed to be not addressable.
3347 FIXME: This check possibly can match, because values returned
3348 via return slot optimization are not believed to have address
3349 taken by alias analysis. */
3350 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3351 var = return_slot_addr;
3353 else
3355 var = return_slot;
3356 gcc_assert (TREE_CODE (var) != SSA_NAME);
3357 if (TREE_ADDRESSABLE (result))
3358 mark_addressable (var);
3360 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3361 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3362 && !DECL_GIMPLE_REG_P (result)
3363 && DECL_P (var))
3364 DECL_GIMPLE_REG_P (var) = 0;
3365 use = NULL;
3366 goto done;
3369 /* All types requiring non-trivial constructors should have been handled. */
3370 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3372 /* Attempt to avoid creating a new temporary variable. */
3373 if (modify_dest
3374 && TREE_CODE (modify_dest) != SSA_NAME)
3376 bool use_it = false;
3378 /* We can't use MODIFY_DEST if there's type promotion involved. */
3379 if (!useless_type_conversion_p (callee_type, caller_type))
3380 use_it = false;
3382 /* ??? If we're assigning to a variable sized type, then we must
3383 reuse the destination variable, because we've no good way to
3384 create variable sized temporaries at this point. */
3385 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3386 use_it = true;
3388 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3389 reuse it as the result of the call directly. Don't do this if
3390 it would promote MODIFY_DEST to addressable. */
3391 else if (TREE_ADDRESSABLE (result))
3392 use_it = false;
3393 else
3395 tree base_m = get_base_address (modify_dest);
3397 /* If the base isn't a decl, then it's a pointer, and we don't
3398 know where that's going to go. */
3399 if (!DECL_P (base_m))
3400 use_it = false;
3401 else if (is_global_var (base_m))
3402 use_it = false;
3403 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3404 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3405 && !DECL_GIMPLE_REG_P (result)
3406 && DECL_GIMPLE_REG_P (base_m))
3407 use_it = false;
3408 else if (!TREE_ADDRESSABLE (base_m))
3409 use_it = true;
3412 if (use_it)
3414 var = modify_dest;
3415 use = NULL;
3416 goto done;
3420 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3422 var = copy_result_decl_to_var (result, id);
3423 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3425 /* Do not have the rest of GCC warn about this variable as it should
3426 not be visible to the user. */
3427 TREE_NO_WARNING (var) = 1;
3429 declare_inline_vars (id->block, var);
3431 /* Build the use expr. If the return type of the function was
3432 promoted, convert it back to the expected type. */
3433 use = var;
3434 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3436 /* If we can match up types by promotion/demotion do so. */
3437 if (fold_convertible_p (caller_type, var))
3438 use = fold_convert (caller_type, var);
3439 else
3441 /* ??? For valid programs we should not end up here.
3442 Still if we end up with truly mismatched types here, fall back
3443 to using a MEM_REF to not leak invalid GIMPLE to the following
3444 passes. */
3445 /* Prevent var from being written into SSA form. */
3446 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3447 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3448 DECL_GIMPLE_REG_P (var) = false;
3449 else if (is_gimple_reg_type (TREE_TYPE (var)))
3450 TREE_ADDRESSABLE (var) = true;
3451 use = fold_build2 (MEM_REF, caller_type,
3452 build_fold_addr_expr (var),
3453 build_int_cst (ptr_type_node, 0));
3457 STRIP_USELESS_TYPE_CONVERSION (use);
3459 if (DECL_BY_REFERENCE (result))
3461 TREE_ADDRESSABLE (var) = 1;
3462 var = build_fold_addr_expr (var);
3465 done:
3466 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3467 way, when the RESULT_DECL is encountered, it will be
3468 automatically replaced by the VAR_DECL.
3470 When returning by reference, ensure that RESULT_DECL remaps to
3471 gimple_val. */
3472 if (DECL_BY_REFERENCE (result)
3473 && !is_gimple_val (var))
3475 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3476 insert_decl_map (id, result, temp);
3477 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3478 it's default_def SSA_NAME. */
3479 if (gimple_in_ssa_p (id->src_cfun)
3480 && is_gimple_reg (result))
3482 temp = make_ssa_name (temp);
3483 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3485 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3487 else
3488 insert_decl_map (id, result, var);
3490 /* Remember this so we can ignore it in remap_decls. */
3491 id->retvar = var;
3493 /* If returned bounds are used, then make var for them. */
3494 if (return_bounds)
3496 tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
3497 DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
3498 TREE_NO_WARNING (bndtemp) = 1;
3499 declare_inline_vars (id->block, bndtemp);
3501 id->retbnd = bndtemp;
3502 insert_init_stmt (id, entry_bb,
3503 gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
3506 return use;
3509 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3510 to a local label. */
3512 static tree
3513 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3515 tree node = *nodep;
3516 tree fn = (tree) fnp;
3518 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3519 return node;
3521 if (TYPE_P (node))
3522 *walk_subtrees = 0;
3524 return NULL_TREE;
3527 /* Determine if the function can be copied. If so return NULL. If
3528 not return a string describng the reason for failure. */
3530 const char *
3531 copy_forbidden (struct function *fun, tree fndecl)
3533 const char *reason = fun->cannot_be_copied_reason;
3534 tree decl;
3535 unsigned ix;
3537 /* Only examine the function once. */
3538 if (fun->cannot_be_copied_set)
3539 return reason;
3541 /* We cannot copy a function that receives a non-local goto
3542 because we cannot remap the destination label used in the
3543 function that is performing the non-local goto. */
3544 /* ??? Actually, this should be possible, if we work at it.
3545 No doubt there's just a handful of places that simply
3546 assume it doesn't happen and don't substitute properly. */
3547 if (fun->has_nonlocal_label)
3549 reason = G_("function %q+F can never be copied "
3550 "because it receives a non-local goto");
3551 goto fail;
3554 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3555 if (TREE_CODE (decl) == VAR_DECL
3556 && TREE_STATIC (decl)
3557 && !DECL_EXTERNAL (decl)
3558 && DECL_INITIAL (decl)
3559 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3560 has_label_address_in_static_1,
3561 fndecl))
3563 reason = G_("function %q+F can never be copied because it saves "
3564 "address of local label in a static variable");
3565 goto fail;
3568 fail:
3569 fun->cannot_be_copied_reason = reason;
3570 fun->cannot_be_copied_set = true;
3571 return reason;
3575 static const char *inline_forbidden_reason;
3577 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3578 iff a function can not be inlined. Also sets the reason why. */
3580 static tree
3581 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3582 struct walk_stmt_info *wip)
3584 tree fn = (tree) wip->info;
3585 tree t;
3586 gimple stmt = gsi_stmt (*gsi);
3588 switch (gimple_code (stmt))
3590 case GIMPLE_CALL:
3591 /* Refuse to inline alloca call unless user explicitly forced so as
3592 this may change program's memory overhead drastically when the
3593 function using alloca is called in loop. In GCC present in
3594 SPEC2000 inlining into schedule_block cause it to require 2GB of
3595 RAM instead of 256MB. Don't do so for alloca calls emitted for
3596 VLA objects as those can't cause unbounded growth (they're always
3597 wrapped inside stack_save/stack_restore regions. */
3598 if (gimple_alloca_call_p (stmt)
3599 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3600 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3602 inline_forbidden_reason
3603 = G_("function %q+F can never be inlined because it uses "
3604 "alloca (override using the always_inline attribute)");
3605 *handled_ops_p = true;
3606 return fn;
3609 t = gimple_call_fndecl (stmt);
3610 if (t == NULL_TREE)
3611 break;
3613 /* We cannot inline functions that call setjmp. */
3614 if (setjmp_call_p (t))
3616 inline_forbidden_reason
3617 = G_("function %q+F can never be inlined because it uses setjmp");
3618 *handled_ops_p = true;
3619 return t;
3622 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3623 switch (DECL_FUNCTION_CODE (t))
3625 /* We cannot inline functions that take a variable number of
3626 arguments. */
3627 case BUILT_IN_VA_START:
3628 case BUILT_IN_NEXT_ARG:
3629 case BUILT_IN_VA_END:
3630 inline_forbidden_reason
3631 = G_("function %q+F can never be inlined because it "
3632 "uses variable argument lists");
3633 *handled_ops_p = true;
3634 return t;
3636 case BUILT_IN_LONGJMP:
3637 /* We can't inline functions that call __builtin_longjmp at
3638 all. The non-local goto machinery really requires the
3639 destination be in a different function. If we allow the
3640 function calling __builtin_longjmp to be inlined into the
3641 function calling __builtin_setjmp, Things will Go Awry. */
3642 inline_forbidden_reason
3643 = G_("function %q+F can never be inlined because "
3644 "it uses setjmp-longjmp exception handling");
3645 *handled_ops_p = true;
3646 return t;
3648 case BUILT_IN_NONLOCAL_GOTO:
3649 /* Similarly. */
3650 inline_forbidden_reason
3651 = G_("function %q+F can never be inlined because "
3652 "it uses non-local goto");
3653 *handled_ops_p = true;
3654 return t;
3656 case BUILT_IN_RETURN:
3657 case BUILT_IN_APPLY_ARGS:
3658 /* If a __builtin_apply_args caller would be inlined,
3659 it would be saving arguments of the function it has
3660 been inlined into. Similarly __builtin_return would
3661 return from the function the inline has been inlined into. */
3662 inline_forbidden_reason
3663 = G_("function %q+F can never be inlined because "
3664 "it uses __builtin_return or __builtin_apply_args");
3665 *handled_ops_p = true;
3666 return t;
3668 default:
3669 break;
3671 break;
3673 case GIMPLE_GOTO:
3674 t = gimple_goto_dest (stmt);
3676 /* We will not inline a function which uses computed goto. The
3677 addresses of its local labels, which may be tucked into
3678 global storage, are of course not constant across
3679 instantiations, which causes unexpected behavior. */
3680 if (TREE_CODE (t) != LABEL_DECL)
3682 inline_forbidden_reason
3683 = G_("function %q+F can never be inlined "
3684 "because it contains a computed goto");
3685 *handled_ops_p = true;
3686 return t;
3688 break;
3690 default:
3691 break;
3694 *handled_ops_p = false;
3695 return NULL_TREE;
3698 /* Return true if FNDECL is a function that cannot be inlined into
3699 another one. */
3701 static bool
3702 inline_forbidden_p (tree fndecl)
3704 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3705 struct walk_stmt_info wi;
3706 basic_block bb;
3707 bool forbidden_p = false;
3709 /* First check for shared reasons not to copy the code. */
3710 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3711 if (inline_forbidden_reason != NULL)
3712 return true;
3714 /* Next, walk the statements of the function looking for
3715 constraucts we can't handle, or are non-optimal for inlining. */
3716 hash_set<tree> visited_nodes;
3717 memset (&wi, 0, sizeof (wi));
3718 wi.info = (void *) fndecl;
3719 wi.pset = &visited_nodes;
3721 FOR_EACH_BB_FN (bb, fun)
3723 gimple ret;
3724 gimple_seq seq = bb_seq (bb);
3725 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3726 forbidden_p = (ret != NULL);
3727 if (forbidden_p)
3728 break;
3731 return forbidden_p;
3734 /* Return false if the function FNDECL cannot be inlined on account of its
3735 attributes, true otherwise. */
3736 static bool
3737 function_attribute_inlinable_p (const_tree fndecl)
3739 if (targetm.attribute_table)
3741 const_tree a;
3743 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3745 const_tree name = TREE_PURPOSE (a);
3746 int i;
3748 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3749 if (is_attribute_p (targetm.attribute_table[i].name, name))
3750 return targetm.function_attribute_inlinable_p (fndecl);
3754 return true;
3757 /* Returns nonzero if FN is a function that does not have any
3758 fundamental inline blocking properties. */
3760 bool
3761 tree_inlinable_function_p (tree fn)
3763 bool inlinable = true;
3764 bool do_warning;
3765 tree always_inline;
3767 /* If we've already decided this function shouldn't be inlined,
3768 there's no need to check again. */
3769 if (DECL_UNINLINABLE (fn))
3770 return false;
3772 /* We only warn for functions declared `inline' by the user. */
3773 do_warning = (warn_inline
3774 && DECL_DECLARED_INLINE_P (fn)
3775 && !DECL_NO_INLINE_WARNING_P (fn)
3776 && !DECL_IN_SYSTEM_HEADER (fn));
3778 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3780 if (flag_no_inline
3781 && always_inline == NULL)
3783 if (do_warning)
3784 warning (OPT_Winline, "function %q+F can never be inlined because it "
3785 "is suppressed using -fno-inline", fn);
3786 inlinable = false;
3789 else if (!function_attribute_inlinable_p (fn))
3791 if (do_warning)
3792 warning (OPT_Winline, "function %q+F can never be inlined because it "
3793 "uses attributes conflicting with inlining", fn);
3794 inlinable = false;
3797 else if (inline_forbidden_p (fn))
3799 /* See if we should warn about uninlinable functions. Previously,
3800 some of these warnings would be issued while trying to expand
3801 the function inline, but that would cause multiple warnings
3802 about functions that would for example call alloca. But since
3803 this a property of the function, just one warning is enough.
3804 As a bonus we can now give more details about the reason why a
3805 function is not inlinable. */
3806 if (always_inline)
3807 error (inline_forbidden_reason, fn);
3808 else if (do_warning)
3809 warning (OPT_Winline, inline_forbidden_reason, fn);
3811 inlinable = false;
3814 /* Squirrel away the result so that we don't have to check again. */
3815 DECL_UNINLINABLE (fn) = !inlinable;
3817 return inlinable;
3820 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3821 word size and take possible memcpy call into account and return
3822 cost based on whether optimizing for size or speed according to SPEED_P. */
3825 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3827 HOST_WIDE_INT size;
3829 gcc_assert (!VOID_TYPE_P (type));
3831 if (TREE_CODE (type) == VECTOR_TYPE)
3833 machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3834 machine_mode simd
3835 = targetm.vectorize.preferred_simd_mode (inner);
3836 int simd_mode_size = GET_MODE_SIZE (simd);
3837 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3838 / simd_mode_size);
3841 size = int_size_in_bytes (type);
3843 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3844 /* Cost of a memcpy call, 3 arguments and the call. */
3845 return 4;
3846 else
3847 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3850 /* Returns cost of operation CODE, according to WEIGHTS */
3852 static int
3853 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3854 tree op1 ATTRIBUTE_UNUSED, tree op2)
3856 switch (code)
3858 /* These are "free" conversions, or their presumed cost
3859 is folded into other operations. */
3860 case RANGE_EXPR:
3861 CASE_CONVERT:
3862 case COMPLEX_EXPR:
3863 case PAREN_EXPR:
3864 case VIEW_CONVERT_EXPR:
3865 return 0;
3867 /* Assign cost of 1 to usual operations.
3868 ??? We may consider mapping RTL costs to this. */
3869 case COND_EXPR:
3870 case VEC_COND_EXPR:
3871 case VEC_PERM_EXPR:
3873 case PLUS_EXPR:
3874 case POINTER_PLUS_EXPR:
3875 case MINUS_EXPR:
3876 case MULT_EXPR:
3877 case MULT_HIGHPART_EXPR:
3878 case FMA_EXPR:
3880 case ADDR_SPACE_CONVERT_EXPR:
3881 case FIXED_CONVERT_EXPR:
3882 case FIX_TRUNC_EXPR:
3884 case NEGATE_EXPR:
3885 case FLOAT_EXPR:
3886 case MIN_EXPR:
3887 case MAX_EXPR:
3888 case ABS_EXPR:
3890 case LSHIFT_EXPR:
3891 case RSHIFT_EXPR:
3892 case LROTATE_EXPR:
3893 case RROTATE_EXPR:
3895 case BIT_IOR_EXPR:
3896 case BIT_XOR_EXPR:
3897 case BIT_AND_EXPR:
3898 case BIT_NOT_EXPR:
3900 case TRUTH_ANDIF_EXPR:
3901 case TRUTH_ORIF_EXPR:
3902 case TRUTH_AND_EXPR:
3903 case TRUTH_OR_EXPR:
3904 case TRUTH_XOR_EXPR:
3905 case TRUTH_NOT_EXPR:
3907 case LT_EXPR:
3908 case LE_EXPR:
3909 case GT_EXPR:
3910 case GE_EXPR:
3911 case EQ_EXPR:
3912 case NE_EXPR:
3913 case ORDERED_EXPR:
3914 case UNORDERED_EXPR:
3916 case UNLT_EXPR:
3917 case UNLE_EXPR:
3918 case UNGT_EXPR:
3919 case UNGE_EXPR:
3920 case UNEQ_EXPR:
3921 case LTGT_EXPR:
3923 case CONJ_EXPR:
3925 case PREDECREMENT_EXPR:
3926 case PREINCREMENT_EXPR:
3927 case POSTDECREMENT_EXPR:
3928 case POSTINCREMENT_EXPR:
3930 case REALIGN_LOAD_EXPR:
3932 case REDUC_MAX_EXPR:
3933 case REDUC_MIN_EXPR:
3934 case REDUC_PLUS_EXPR:
3935 case WIDEN_SUM_EXPR:
3936 case WIDEN_MULT_EXPR:
3937 case DOT_PROD_EXPR:
3938 case SAD_EXPR:
3939 case WIDEN_MULT_PLUS_EXPR:
3940 case WIDEN_MULT_MINUS_EXPR:
3941 case WIDEN_LSHIFT_EXPR:
3943 case VEC_WIDEN_MULT_HI_EXPR:
3944 case VEC_WIDEN_MULT_LO_EXPR:
3945 case VEC_WIDEN_MULT_EVEN_EXPR:
3946 case VEC_WIDEN_MULT_ODD_EXPR:
3947 case VEC_UNPACK_HI_EXPR:
3948 case VEC_UNPACK_LO_EXPR:
3949 case VEC_UNPACK_FLOAT_HI_EXPR:
3950 case VEC_UNPACK_FLOAT_LO_EXPR:
3951 case VEC_PACK_TRUNC_EXPR:
3952 case VEC_PACK_SAT_EXPR:
3953 case VEC_PACK_FIX_TRUNC_EXPR:
3954 case VEC_WIDEN_LSHIFT_HI_EXPR:
3955 case VEC_WIDEN_LSHIFT_LO_EXPR:
3957 return 1;
3959 /* Few special cases of expensive operations. This is useful
3960 to avoid inlining on functions having too many of these. */
3961 case TRUNC_DIV_EXPR:
3962 case CEIL_DIV_EXPR:
3963 case FLOOR_DIV_EXPR:
3964 case ROUND_DIV_EXPR:
3965 case EXACT_DIV_EXPR:
3966 case TRUNC_MOD_EXPR:
3967 case CEIL_MOD_EXPR:
3968 case FLOOR_MOD_EXPR:
3969 case ROUND_MOD_EXPR:
3970 case RDIV_EXPR:
3971 if (TREE_CODE (op2) != INTEGER_CST)
3972 return weights->div_mod_cost;
3973 return 1;
3975 default:
3976 /* We expect a copy assignment with no operator. */
3977 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3978 return 0;
3983 /* Estimate number of instructions that will be created by expanding
3984 the statements in the statement sequence STMTS.
3985 WEIGHTS contains weights attributed to various constructs. */
3987 static
3988 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3990 int cost;
3991 gimple_stmt_iterator gsi;
3993 cost = 0;
3994 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3995 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3997 return cost;
4001 /* Estimate number of instructions that will be created by expanding STMT.
4002 WEIGHTS contains weights attributed to various constructs. */
4005 estimate_num_insns (gimple stmt, eni_weights *weights)
4007 unsigned cost, i;
4008 enum gimple_code code = gimple_code (stmt);
4009 tree lhs;
4010 tree rhs;
4012 switch (code)
4014 case GIMPLE_ASSIGN:
4015 /* Try to estimate the cost of assignments. We have three cases to
4016 deal with:
4017 1) Simple assignments to registers;
4018 2) Stores to things that must live in memory. This includes
4019 "normal" stores to scalars, but also assignments of large
4020 structures, or constructors of big arrays;
4022 Let us look at the first two cases, assuming we have "a = b + C":
4023 <GIMPLE_ASSIGN <var_decl "a">
4024 <plus_expr <var_decl "b"> <constant C>>
4025 If "a" is a GIMPLE register, the assignment to it is free on almost
4026 any target, because "a" usually ends up in a real register. Hence
4027 the only cost of this expression comes from the PLUS_EXPR, and we
4028 can ignore the GIMPLE_ASSIGN.
4029 If "a" is not a GIMPLE register, the assignment to "a" will most
4030 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4031 of moving something into "a", which we compute using the function
4032 estimate_move_cost. */
4033 if (gimple_clobber_p (stmt))
4034 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4036 lhs = gimple_assign_lhs (stmt);
4037 rhs = gimple_assign_rhs1 (stmt);
4039 cost = 0;
4041 /* Account for the cost of moving to / from memory. */
4042 if (gimple_store_p (stmt))
4043 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4044 if (gimple_assign_load_p (stmt))
4045 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4047 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4048 gimple_assign_rhs1 (stmt),
4049 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4050 == GIMPLE_BINARY_RHS
4051 ? gimple_assign_rhs2 (stmt) : NULL);
4052 break;
4054 case GIMPLE_COND:
4055 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4056 gimple_op (stmt, 0),
4057 gimple_op (stmt, 1));
4058 break;
4060 case GIMPLE_SWITCH:
4062 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4063 /* Take into account cost of the switch + guess 2 conditional jumps for
4064 each case label.
4066 TODO: once the switch expansion logic is sufficiently separated, we can
4067 do better job on estimating cost of the switch. */
4068 if (weights->time_based)
4069 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4070 else
4071 cost = gimple_switch_num_labels (switch_stmt) * 2;
4073 break;
4075 case GIMPLE_CALL:
4077 tree decl;
4079 if (gimple_call_internal_p (stmt))
4080 return 0;
4081 else if ((decl = gimple_call_fndecl (stmt))
4082 && DECL_BUILT_IN (decl))
4084 /* Do not special case builtins where we see the body.
4085 This just confuse inliner. */
4086 struct cgraph_node *node;
4087 if (!(node = cgraph_node::get (decl))
4088 || node->definition)
4090 /* For buitins that are likely expanded to nothing or
4091 inlined do not account operand costs. */
4092 else if (is_simple_builtin (decl))
4093 return 0;
4094 else if (is_inexpensive_builtin (decl))
4095 return weights->target_builtin_call_cost;
4096 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
4098 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4099 specialize the cheap expansion we do here.
4100 ??? This asks for a more general solution. */
4101 switch (DECL_FUNCTION_CODE (decl))
4103 case BUILT_IN_POW:
4104 case BUILT_IN_POWF:
4105 case BUILT_IN_POWL:
4106 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4107 && REAL_VALUES_EQUAL
4108 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
4109 return estimate_operator_cost
4110 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4111 gimple_call_arg (stmt, 0));
4112 break;
4114 default:
4115 break;
4120 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4121 if (gimple_call_lhs (stmt))
4122 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4123 weights->time_based);
4124 for (i = 0; i < gimple_call_num_args (stmt); i++)
4126 tree arg = gimple_call_arg (stmt, i);
4127 cost += estimate_move_cost (TREE_TYPE (arg),
4128 weights->time_based);
4130 break;
4133 case GIMPLE_RETURN:
4134 return weights->return_cost;
4136 case GIMPLE_GOTO:
4137 case GIMPLE_LABEL:
4138 case GIMPLE_NOP:
4139 case GIMPLE_PHI:
4140 case GIMPLE_PREDICT:
4141 case GIMPLE_DEBUG:
4142 return 0;
4144 case GIMPLE_ASM:
4146 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4147 /* 1000 means infinity. This avoids overflows later
4148 with very long asm statements. */
4149 if (count > 1000)
4150 count = 1000;
4151 return count;
4154 case GIMPLE_RESX:
4155 /* This is either going to be an external function call with one
4156 argument, or two register copy statements plus a goto. */
4157 return 2;
4159 case GIMPLE_EH_DISPATCH:
4160 /* ??? This is going to turn into a switch statement. Ideally
4161 we'd have a look at the eh region and estimate the number of
4162 edges involved. */
4163 return 10;
4165 case GIMPLE_BIND:
4166 return estimate_num_insns_seq (
4167 gimple_bind_body (as_a <gbind *> (stmt)),
4168 weights);
4170 case GIMPLE_EH_FILTER:
4171 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4173 case GIMPLE_CATCH:
4174 return estimate_num_insns_seq (gimple_catch_handler (
4175 as_a <gcatch *> (stmt)),
4176 weights);
4178 case GIMPLE_TRY:
4179 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4180 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4182 /* OMP directives are generally very expensive. */
4184 case GIMPLE_OMP_RETURN:
4185 case GIMPLE_OMP_SECTIONS_SWITCH:
4186 case GIMPLE_OMP_ATOMIC_STORE:
4187 case GIMPLE_OMP_CONTINUE:
4188 /* ...except these, which are cheap. */
4189 return 0;
4191 case GIMPLE_OMP_ATOMIC_LOAD:
4192 return weights->omp_cost;
4194 case GIMPLE_OMP_FOR:
4195 return (weights->omp_cost
4196 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4197 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4199 case GIMPLE_OMP_PARALLEL:
4200 case GIMPLE_OMP_TASK:
4201 case GIMPLE_OMP_CRITICAL:
4202 case GIMPLE_OMP_MASTER:
4203 case GIMPLE_OMP_TASKGROUP:
4204 case GIMPLE_OMP_ORDERED:
4205 case GIMPLE_OMP_SECTION:
4206 case GIMPLE_OMP_SECTIONS:
4207 case GIMPLE_OMP_SINGLE:
4208 case GIMPLE_OMP_TARGET:
4209 case GIMPLE_OMP_TEAMS:
4210 return (weights->omp_cost
4211 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4213 case GIMPLE_TRANSACTION:
4214 return (weights->tm_cost
4215 + estimate_num_insns_seq (gimple_transaction_body (
4216 as_a <gtransaction *> (stmt)),
4217 weights));
4219 default:
4220 gcc_unreachable ();
4223 return cost;
4226 /* Estimate number of instructions that will be created by expanding
4227 function FNDECL. WEIGHTS contains weights attributed to various
4228 constructs. */
4231 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4233 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4234 gimple_stmt_iterator bsi;
4235 basic_block bb;
4236 int n = 0;
4238 gcc_assert (my_function && my_function->cfg);
4239 FOR_EACH_BB_FN (bb, my_function)
4241 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4242 n += estimate_num_insns (gsi_stmt (bsi), weights);
4245 return n;
4249 /* Initializes weights used by estimate_num_insns. */
4251 void
4252 init_inline_once (void)
4254 eni_size_weights.call_cost = 1;
4255 eni_size_weights.indirect_call_cost = 3;
4256 eni_size_weights.target_builtin_call_cost = 1;
4257 eni_size_weights.div_mod_cost = 1;
4258 eni_size_weights.omp_cost = 40;
4259 eni_size_weights.tm_cost = 10;
4260 eni_size_weights.time_based = false;
4261 eni_size_weights.return_cost = 1;
4263 /* Estimating time for call is difficult, since we have no idea what the
4264 called function does. In the current uses of eni_time_weights,
4265 underestimating the cost does less harm than overestimating it, so
4266 we choose a rather small value here. */
4267 eni_time_weights.call_cost = 10;
4268 eni_time_weights.indirect_call_cost = 15;
4269 eni_time_weights.target_builtin_call_cost = 1;
4270 eni_time_weights.div_mod_cost = 10;
4271 eni_time_weights.omp_cost = 40;
4272 eni_time_weights.tm_cost = 40;
4273 eni_time_weights.time_based = true;
4274 eni_time_weights.return_cost = 2;
4277 /* Estimate the number of instructions in a gimple_seq. */
4280 count_insns_seq (gimple_seq seq, eni_weights *weights)
4282 gimple_stmt_iterator gsi;
4283 int n = 0;
4284 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
4285 n += estimate_num_insns (gsi_stmt (gsi), weights);
4287 return n;
4291 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4293 static void
4294 prepend_lexical_block (tree current_block, tree new_block)
4296 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4297 BLOCK_SUBBLOCKS (current_block) = new_block;
4298 BLOCK_SUPERCONTEXT (new_block) = current_block;
4301 /* Add local variables from CALLEE to CALLER. */
4303 static inline void
4304 add_local_variables (struct function *callee, struct function *caller,
4305 copy_body_data *id)
4307 tree var;
4308 unsigned ix;
4310 FOR_EACH_LOCAL_DECL (callee, ix, var)
4311 if (!can_be_nonlocal (var, id))
4313 tree new_var = remap_decl (var, id);
4315 /* Remap debug-expressions. */
4316 if (TREE_CODE (new_var) == VAR_DECL
4317 && DECL_HAS_DEBUG_EXPR_P (var)
4318 && new_var != var)
4320 tree tem = DECL_DEBUG_EXPR (var);
4321 bool old_regimplify = id->regimplify;
4322 id->remapping_type_depth++;
4323 walk_tree (&tem, copy_tree_body_r, id, NULL);
4324 id->remapping_type_depth--;
4325 id->regimplify = old_regimplify;
4326 SET_DECL_DEBUG_EXPR (new_var, tem);
4327 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4329 add_local_decl (caller, new_var);
4333 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4334 have brought in or introduced any debug stmts for SRCVAR. */
4336 static inline void
4337 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4339 tree *remappedvarp = id->decl_map->get (srcvar);
4341 if (!remappedvarp)
4342 return;
4344 if (TREE_CODE (*remappedvarp) != VAR_DECL)
4345 return;
4347 if (*remappedvarp == id->retvar || *remappedvarp == id->retbnd)
4348 return;
4350 tree tvar = target_for_debug_bind (*remappedvarp);
4351 if (!tvar)
4352 return;
4354 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4355 id->call_stmt);
4356 gimple_seq_add_stmt (bindings, stmt);
4359 /* For each inlined variable for which we may have debug bind stmts,
4360 add before GSI a final debug stmt resetting it, marking the end of
4361 its life, so that var-tracking knows it doesn't have to compute
4362 further locations for it. */
4364 static inline void
4365 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4367 tree var;
4368 unsigned ix;
4369 gimple_seq bindings = NULL;
4371 if (!gimple_in_ssa_p (id->src_cfun))
4372 return;
4374 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4375 return;
4377 for (var = DECL_ARGUMENTS (id->src_fn);
4378 var; var = DECL_CHAIN (var))
4379 reset_debug_binding (id, var, &bindings);
4381 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4382 reset_debug_binding (id, var, &bindings);
4384 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4387 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4389 static bool
4390 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
4392 tree use_retvar;
4393 tree fn;
4394 hash_map<tree, tree> *dst;
4395 hash_map<tree, tree> *st = NULL;
4396 tree return_slot;
4397 tree modify_dest;
4398 tree return_bounds = NULL;
4399 location_t saved_location;
4400 struct cgraph_edge *cg_edge;
4401 cgraph_inline_failed_t reason;
4402 basic_block return_block;
4403 edge e;
4404 gimple_stmt_iterator gsi, stmt_gsi;
4405 bool successfully_inlined = FALSE;
4406 bool purge_dead_abnormal_edges;
4407 gcall *call_stmt;
4408 unsigned int i;
4410 /* Set input_location here so we get the right instantiation context
4411 if we call instantiate_decl from inlinable_function_p. */
4412 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4413 saved_location = input_location;
4414 input_location = gimple_location (stmt);
4416 /* From here on, we're only interested in CALL_EXPRs. */
4417 call_stmt = dyn_cast <gcall *> (stmt);
4418 if (!call_stmt)
4419 goto egress;
4421 cg_edge = id->dst_node->get_edge (stmt);
4422 gcc_checking_assert (cg_edge);
4423 /* First, see if we can figure out what function is being called.
4424 If we cannot, then there is no hope of inlining the function. */
4425 if (cg_edge->indirect_unknown_callee)
4426 goto egress;
4427 fn = cg_edge->callee->decl;
4428 gcc_checking_assert (fn);
4430 /* If FN is a declaration of a function in a nested scope that was
4431 globally declared inline, we don't set its DECL_INITIAL.
4432 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4433 C++ front-end uses it for cdtors to refer to their internal
4434 declarations, that are not real functions. Fortunately those
4435 don't have trees to be saved, so we can tell by checking their
4436 gimple_body. */
4437 if (!DECL_INITIAL (fn)
4438 && DECL_ABSTRACT_ORIGIN (fn)
4439 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4440 fn = DECL_ABSTRACT_ORIGIN (fn);
4442 /* Don't try to inline functions that are not well-suited to inlining. */
4443 if (cg_edge->inline_failed)
4445 reason = cg_edge->inline_failed;
4446 /* If this call was originally indirect, we do not want to emit any
4447 inlining related warnings or sorry messages because there are no
4448 guarantees regarding those. */
4449 if (cg_edge->indirect_inlining_edge)
4450 goto egress;
4452 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4453 /* For extern inline functions that get redefined we always
4454 silently ignored always_inline flag. Better behaviour would
4455 be to be able to keep both bodies and use extern inline body
4456 for inlining, but we can't do that because frontends overwrite
4457 the body. */
4458 && !cg_edge->callee->local.redefined_extern_inline
4459 /* During early inline pass, report only when optimization is
4460 not turned on. */
4461 && (symtab->global_info_ready
4462 || !optimize
4463 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4464 /* PR 20090218-1_0.c. Body can be provided by another module. */
4465 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4467 error ("inlining failed in call to always_inline %q+F: %s", fn,
4468 cgraph_inline_failed_string (reason));
4469 error ("called from here");
4471 else if (warn_inline
4472 && DECL_DECLARED_INLINE_P (fn)
4473 && !DECL_NO_INLINE_WARNING_P (fn)
4474 && !DECL_IN_SYSTEM_HEADER (fn)
4475 && reason != CIF_UNSPECIFIED
4476 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4477 /* Do not warn about not inlined recursive calls. */
4478 && !cg_edge->recursive_p ()
4479 /* Avoid warnings during early inline pass. */
4480 && symtab->global_info_ready)
4482 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4483 fn, _(cgraph_inline_failed_string (reason)));
4484 warning (OPT_Winline, "called from here");
4486 goto egress;
4488 fn = cg_edge->callee->decl;
4489 cg_edge->callee->get_untransformed_body ();
4491 #ifdef ENABLE_CHECKING
4492 if (cg_edge->callee->decl != id->dst_node->decl)
4493 cg_edge->callee->verify ();
4494 #endif
4496 /* We will be inlining this callee. */
4497 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4498 id->assign_stmts.create (0);
4500 /* Update the callers EH personality. */
4501 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4502 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4503 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4505 /* Split the block holding the GIMPLE_CALL. */
4506 e = split_block (bb, stmt);
4507 bb = e->src;
4508 return_block = e->dest;
4509 remove_edge (e);
4511 /* split_block splits after the statement; work around this by
4512 moving the call into the second block manually. Not pretty,
4513 but seems easier than doing the CFG manipulation by hand
4514 when the GIMPLE_CALL is in the last statement of BB. */
4515 stmt_gsi = gsi_last_bb (bb);
4516 gsi_remove (&stmt_gsi, false);
4518 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4519 been the source of abnormal edges. In this case, schedule
4520 the removal of dead abnormal edges. */
4521 gsi = gsi_start_bb (return_block);
4522 if (gsi_end_p (gsi))
4524 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4525 purge_dead_abnormal_edges = true;
4527 else
4529 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4530 purge_dead_abnormal_edges = false;
4533 stmt_gsi = gsi_start_bb (return_block);
4535 /* Build a block containing code to initialize the arguments, the
4536 actual inline expansion of the body, and a label for the return
4537 statements within the function to jump to. The type of the
4538 statement expression is the return type of the function call.
4539 ??? If the call does not have an associated block then we will
4540 remap all callee blocks to NULL, effectively dropping most of
4541 its debug information. This should only happen for calls to
4542 artificial decls inserted by the compiler itself. We need to
4543 either link the inlined blocks into the caller block tree or
4544 not refer to them in any way to not break GC for locations. */
4545 if (gimple_block (stmt))
4547 id->block = make_node (BLOCK);
4548 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4549 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4550 prepend_lexical_block (gimple_block (stmt), id->block);
4553 /* Local declarations will be replaced by their equivalents in this
4554 map. */
4555 st = id->decl_map;
4556 id->decl_map = new hash_map<tree, tree>;
4557 dst = id->debug_map;
4558 id->debug_map = NULL;
4560 /* Record the function we are about to inline. */
4561 id->src_fn = fn;
4562 id->src_node = cg_edge->callee;
4563 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4564 id->call_stmt = stmt;
4566 /* If the the src function contains an IFN_VA_ARG, then so will the dst
4567 function after inlining. */
4568 if ((id->src_cfun->curr_properties & PROP_gimple_lva) == 0)
4570 struct function *dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4571 dst_cfun->curr_properties &= ~PROP_gimple_lva;
4574 gcc_assert (!id->src_cfun->after_inlining);
4576 id->entry_bb = bb;
4577 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4579 gimple_stmt_iterator si = gsi_last_bb (bb);
4580 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4581 NOT_TAKEN),
4582 GSI_NEW_STMT);
4584 initialize_inlined_parameters (id, stmt, fn, bb);
4586 if (DECL_INITIAL (fn))
4588 if (gimple_block (stmt))
4590 tree *var;
4592 prepend_lexical_block (id->block,
4593 remap_blocks (DECL_INITIAL (fn), id));
4594 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4595 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4596 == NULL_TREE));
4597 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4598 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4599 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4600 under it. The parameters can be then evaluated in the debugger,
4601 but don't show in backtraces. */
4602 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4603 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4605 tree v = *var;
4606 *var = TREE_CHAIN (v);
4607 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4608 BLOCK_VARS (id->block) = v;
4610 else
4611 var = &TREE_CHAIN (*var);
4613 else
4614 remap_blocks_to_null (DECL_INITIAL (fn), id);
4617 /* Return statements in the function body will be replaced by jumps
4618 to the RET_LABEL. */
4619 gcc_assert (DECL_INITIAL (fn));
4620 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4622 /* Find the LHS to which the result of this call is assigned. */
4623 return_slot = NULL;
4624 if (gimple_call_lhs (stmt))
4626 modify_dest = gimple_call_lhs (stmt);
4628 /* Remember where to copy returned bounds. */
4629 if (gimple_call_with_bounds_p (stmt)
4630 && TREE_CODE (modify_dest) == SSA_NAME)
4632 gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
4633 if (retbnd)
4635 return_bounds = gimple_call_lhs (retbnd);
4636 /* If returned bounds are not used then just
4637 remove unused call. */
4638 if (!return_bounds)
4640 gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
4641 gsi_remove (&iter, true);
4646 /* The function which we are inlining might not return a value,
4647 in which case we should issue a warning that the function
4648 does not return a value. In that case the optimizers will
4649 see that the variable to which the value is assigned was not
4650 initialized. We do not want to issue a warning about that
4651 uninitialized variable. */
4652 if (DECL_P (modify_dest))
4653 TREE_NO_WARNING (modify_dest) = 1;
4655 if (gimple_call_return_slot_opt_p (call_stmt))
4657 return_slot = modify_dest;
4658 modify_dest = NULL;
4661 else
4662 modify_dest = NULL;
4664 /* If we are inlining a call to the C++ operator new, we don't want
4665 to use type based alias analysis on the return value. Otherwise
4666 we may get confused if the compiler sees that the inlined new
4667 function returns a pointer which was just deleted. See bug
4668 33407. */
4669 if (DECL_IS_OPERATOR_NEW (fn))
4671 return_slot = NULL;
4672 modify_dest = NULL;
4675 /* Declare the return variable for the function. */
4676 use_retvar = declare_return_variable (id, return_slot, modify_dest,
4677 return_bounds, bb);
4679 /* Add local vars in this inlined callee to caller. */
4680 add_local_variables (id->src_cfun, cfun, id);
4682 if (dump_file && (dump_flags & TDF_DETAILS))
4684 fprintf (dump_file, "Inlining ");
4685 print_generic_expr (dump_file, id->src_fn, 0);
4686 fprintf (dump_file, " to ");
4687 print_generic_expr (dump_file, id->dst_fn, 0);
4688 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4691 /* This is it. Duplicate the callee body. Assume callee is
4692 pre-gimplified. Note that we must not alter the caller
4693 function in any way before this point, as this CALL_EXPR may be
4694 a self-referential call; if we're calling ourselves, we need to
4695 duplicate our body before altering anything. */
4696 copy_body (id, cg_edge->callee->count,
4697 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4698 bb, return_block, NULL);
4700 reset_debug_bindings (id, stmt_gsi);
4702 /* Reset the escaped solution. */
4703 if (cfun->gimple_df)
4704 pt_solution_reset (&cfun->gimple_df->escaped);
4706 /* Clean up. */
4707 if (id->debug_map)
4709 delete id->debug_map;
4710 id->debug_map = dst;
4712 delete id->decl_map;
4713 id->decl_map = st;
4715 /* Unlink the calls virtual operands before replacing it. */
4716 unlink_stmt_vdef (stmt);
4717 if (gimple_vdef (stmt)
4718 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4719 release_ssa_name (gimple_vdef (stmt));
4721 /* If the inlined function returns a result that we care about,
4722 substitute the GIMPLE_CALL with an assignment of the return
4723 variable to the LHS of the call. That is, if STMT was
4724 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4725 if (use_retvar && gimple_call_lhs (stmt))
4727 gimple old_stmt = stmt;
4728 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4729 gsi_replace (&stmt_gsi, stmt, false);
4730 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4732 /* Copy bounds if we copy structure with bounds. */
4733 if (chkp_function_instrumented_p (id->dst_fn)
4734 && !BOUNDED_P (use_retvar)
4735 && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
4736 id->assign_stmts.safe_push (stmt);
4738 else
4740 /* Handle the case of inlining a function with no return
4741 statement, which causes the return value to become undefined. */
4742 if (gimple_call_lhs (stmt)
4743 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4745 tree name = gimple_call_lhs (stmt);
4746 tree var = SSA_NAME_VAR (name);
4747 tree def = ssa_default_def (cfun, var);
4749 if (def)
4751 /* If the variable is used undefined, make this name
4752 undefined via a move. */
4753 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4754 gsi_replace (&stmt_gsi, stmt, true);
4756 else
4758 /* Otherwise make this variable undefined. */
4759 gsi_remove (&stmt_gsi, true);
4760 set_ssa_default_def (cfun, var, name);
4761 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4764 else
4765 gsi_remove (&stmt_gsi, true);
4768 /* Put returned bounds into the correct place if required. */
4769 if (return_bounds)
4771 gimple old_stmt = SSA_NAME_DEF_STMT (return_bounds);
4772 gimple new_stmt = gimple_build_assign (return_bounds, id->retbnd);
4773 gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
4774 unlink_stmt_vdef (old_stmt);
4775 gsi_replace (&bnd_gsi, new_stmt, false);
4776 maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
4777 cgraph_update_edges_for_call_stmt (old_stmt,
4778 gimple_call_fndecl (old_stmt),
4779 new_stmt);
4782 if (purge_dead_abnormal_edges)
4784 gimple_purge_dead_eh_edges (return_block);
4785 gimple_purge_dead_abnormal_call_edges (return_block);
4788 /* If the value of the new expression is ignored, that's OK. We
4789 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4790 the equivalent inlined version either. */
4791 if (is_gimple_assign (stmt))
4793 gcc_assert (gimple_assign_single_p (stmt)
4794 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4795 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4798 /* Copy bounds for all generated assigns that need it. */
4799 for (i = 0; i < id->assign_stmts.length (); i++)
4800 chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
4801 id->assign_stmts.release ();
4803 /* Output the inlining info for this abstract function, since it has been
4804 inlined. If we don't do this now, we can lose the information about the
4805 variables in the function when the blocks get blown away as soon as we
4806 remove the cgraph node. */
4807 if (gimple_block (stmt))
4808 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4810 /* Update callgraph if needed. */
4811 cg_edge->callee->remove ();
4813 id->block = NULL_TREE;
4814 successfully_inlined = TRUE;
4816 egress:
4817 input_location = saved_location;
4818 return successfully_inlined;
4821 /* Expand call statements reachable from STMT_P.
4822 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4823 in a MODIFY_EXPR. */
4825 static bool
4826 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4828 gimple_stmt_iterator gsi;
4829 bool inlined = false;
4831 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4833 gimple stmt = gsi_stmt (gsi);
4834 gsi_prev (&gsi);
4836 if (is_gimple_call (stmt)
4837 && !gimple_call_internal_p (stmt))
4838 inlined |= expand_call_inline (bb, stmt, id);
4841 return inlined;
4845 /* Walk all basic blocks created after FIRST and try to fold every statement
4846 in the STATEMENTS pointer set. */
4848 static void
4849 fold_marked_statements (int first, hash_set<gimple> *statements)
4851 for (; first < n_basic_blocks_for_fn (cfun); first++)
4852 if (BASIC_BLOCK_FOR_FN (cfun, first))
4854 gimple_stmt_iterator gsi;
4856 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4857 !gsi_end_p (gsi);
4858 gsi_next (&gsi))
4859 if (statements->contains (gsi_stmt (gsi)))
4861 gimple old_stmt = gsi_stmt (gsi);
4862 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4864 if (old_decl && DECL_BUILT_IN (old_decl))
4866 /* Folding builtins can create multiple instructions,
4867 we need to look at all of them. */
4868 gimple_stmt_iterator i2 = gsi;
4869 gsi_prev (&i2);
4870 if (fold_stmt (&gsi))
4872 gimple new_stmt;
4873 /* If a builtin at the end of a bb folded into nothing,
4874 the following loop won't work. */
4875 if (gsi_end_p (gsi))
4877 cgraph_update_edges_for_call_stmt (old_stmt,
4878 old_decl, NULL);
4879 break;
4881 if (gsi_end_p (i2))
4882 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4883 else
4884 gsi_next (&i2);
4885 while (1)
4887 new_stmt = gsi_stmt (i2);
4888 update_stmt (new_stmt);
4889 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4890 new_stmt);
4892 if (new_stmt == gsi_stmt (gsi))
4894 /* It is okay to check only for the very last
4895 of these statements. If it is a throwing
4896 statement nothing will change. If it isn't
4897 this can remove EH edges. If that weren't
4898 correct then because some intermediate stmts
4899 throw, but not the last one. That would mean
4900 we'd have to split the block, which we can't
4901 here and we'd loose anyway. And as builtins
4902 probably never throw, this all
4903 is mood anyway. */
4904 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4905 new_stmt))
4906 gimple_purge_dead_eh_edges (
4907 BASIC_BLOCK_FOR_FN (cfun, first));
4908 break;
4910 gsi_next (&i2);
4914 else if (fold_stmt (&gsi))
4916 /* Re-read the statement from GSI as fold_stmt() may
4917 have changed it. */
4918 gimple new_stmt = gsi_stmt (gsi);
4919 update_stmt (new_stmt);
4921 if (is_gimple_call (old_stmt)
4922 || is_gimple_call (new_stmt))
4923 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4924 new_stmt);
4926 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4927 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4928 first));
4934 /* Expand calls to inline functions in the body of FN. */
4936 unsigned int
4937 optimize_inline_calls (tree fn)
4939 copy_body_data id;
4940 basic_block bb;
4941 int last = n_basic_blocks_for_fn (cfun);
4942 bool inlined_p = false;
4944 /* Clear out ID. */
4945 memset (&id, 0, sizeof (id));
4947 id.src_node = id.dst_node = cgraph_node::get (fn);
4948 gcc_assert (id.dst_node->definition);
4949 id.dst_fn = fn;
4950 /* Or any functions that aren't finished yet. */
4951 if (current_function_decl)
4952 id.dst_fn = current_function_decl;
4954 id.copy_decl = copy_decl_maybe_to_var;
4955 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4956 id.transform_new_cfg = false;
4957 id.transform_return_to_modify = true;
4958 id.transform_parameter = true;
4959 id.transform_lang_insert_block = NULL;
4960 id.statements_to_fold = new hash_set<gimple>;
4962 push_gimplify_context ();
4964 /* We make no attempts to keep dominance info up-to-date. */
4965 free_dominance_info (CDI_DOMINATORS);
4966 free_dominance_info (CDI_POST_DOMINATORS);
4968 /* Register specific gimple functions. */
4969 gimple_register_cfg_hooks ();
4971 /* Reach the trees by walking over the CFG, and note the
4972 enclosing basic-blocks in the call edges. */
4973 /* We walk the blocks going forward, because inlined function bodies
4974 will split id->current_basic_block, and the new blocks will
4975 follow it; we'll trudge through them, processing their CALL_EXPRs
4976 along the way. */
4977 FOR_EACH_BB_FN (bb, cfun)
4978 inlined_p |= gimple_expand_calls_inline (bb, &id);
4980 pop_gimplify_context (NULL);
4982 #ifdef ENABLE_CHECKING
4984 struct cgraph_edge *e;
4986 id.dst_node->verify ();
4988 /* Double check that we inlined everything we are supposed to inline. */
4989 for (e = id.dst_node->callees; e; e = e->next_callee)
4990 gcc_assert (e->inline_failed);
4992 #endif
4994 /* Fold queued statements. */
4995 fold_marked_statements (last, id.statements_to_fold);
4996 delete id.statements_to_fold;
4998 gcc_assert (!id.debug_stmts.exists ());
5000 /* If we didn't inline into the function there is nothing to do. */
5001 if (!inlined_p)
5002 return 0;
5004 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5005 number_blocks (fn);
5007 delete_unreachable_blocks_update_callgraph (&id);
5008 #ifdef ENABLE_CHECKING
5009 id.dst_node->verify ();
5010 #endif
5012 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5013 not possible yet - the IPA passes might make various functions to not
5014 throw and they don't care to proactively update local EH info. This is
5015 done later in fixup_cfg pass that also execute the verification. */
5016 return (TODO_update_ssa
5017 | TODO_cleanup_cfg
5018 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5019 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5020 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5021 ? TODO_rebuild_frequencies : 0));
5024 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5026 tree
5027 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5029 enum tree_code code = TREE_CODE (*tp);
5030 enum tree_code_class cl = TREE_CODE_CLASS (code);
5032 /* We make copies of most nodes. */
5033 if (IS_EXPR_CODE_CLASS (cl)
5034 || code == TREE_LIST
5035 || code == TREE_VEC
5036 || code == TYPE_DECL
5037 || code == OMP_CLAUSE)
5039 /* Because the chain gets clobbered when we make a copy, we save it
5040 here. */
5041 tree chain = NULL_TREE, new_tree;
5043 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5044 chain = TREE_CHAIN (*tp);
5046 /* Copy the node. */
5047 new_tree = copy_node (*tp);
5049 *tp = new_tree;
5051 /* Now, restore the chain, if appropriate. That will cause
5052 walk_tree to walk into the chain as well. */
5053 if (code == PARM_DECL
5054 || code == TREE_LIST
5055 || code == OMP_CLAUSE)
5056 TREE_CHAIN (*tp) = chain;
5058 /* For now, we don't update BLOCKs when we make copies. So, we
5059 have to nullify all BIND_EXPRs. */
5060 if (TREE_CODE (*tp) == BIND_EXPR)
5061 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5063 else if (code == CONSTRUCTOR)
5065 /* CONSTRUCTOR nodes need special handling because
5066 we need to duplicate the vector of elements. */
5067 tree new_tree;
5069 new_tree = copy_node (*tp);
5070 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5071 *tp = new_tree;
5073 else if (code == STATEMENT_LIST)
5074 /* We used to just abort on STATEMENT_LIST, but we can run into them
5075 with statement-expressions (c++/40975). */
5076 copy_statement_list (tp);
5077 else if (TREE_CODE_CLASS (code) == tcc_type)
5078 *walk_subtrees = 0;
5079 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5080 *walk_subtrees = 0;
5081 else if (TREE_CODE_CLASS (code) == tcc_constant)
5082 *walk_subtrees = 0;
5083 return NULL_TREE;
5086 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5087 information indicating to what new SAVE_EXPR this one should be mapped,
5088 use that one. Otherwise, create a new node and enter it in ST. FN is
5089 the function into which the copy will be placed. */
5091 static void
5092 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5094 tree *n;
5095 tree t;
5097 /* See if we already encountered this SAVE_EXPR. */
5098 n = st->get (*tp);
5100 /* If we didn't already remap this SAVE_EXPR, do so now. */
5101 if (!n)
5103 t = copy_node (*tp);
5105 /* Remember this SAVE_EXPR. */
5106 st->put (*tp, t);
5107 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5108 st->put (t, t);
5110 else
5112 /* We've already walked into this SAVE_EXPR; don't do it again. */
5113 *walk_subtrees = 0;
5114 t = *n;
5117 /* Replace this SAVE_EXPR with the copy. */
5118 *tp = t;
5121 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5122 label, copies the declaration and enters it in the splay_tree in DATA (which
5123 is really a 'copy_body_data *'. */
5125 static tree
5126 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5127 bool *handled_ops_p ATTRIBUTE_UNUSED,
5128 struct walk_stmt_info *wi)
5130 copy_body_data *id = (copy_body_data *) wi->info;
5131 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5133 if (stmt)
5135 tree decl = gimple_label_label (stmt);
5137 /* Copy the decl and remember the copy. */
5138 insert_decl_map (id, decl, id->copy_decl (decl, id));
5141 return NULL_TREE;
5145 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5146 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5147 remaps all local declarations to appropriate replacements in gimple
5148 operands. */
5150 static tree
5151 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5153 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5154 copy_body_data *id = (copy_body_data *) wi->info;
5155 hash_map<tree, tree> *st = id->decl_map;
5156 tree *n;
5157 tree expr = *tp;
5159 /* Only a local declaration (variable or label). */
5160 if ((TREE_CODE (expr) == VAR_DECL
5161 && !TREE_STATIC (expr))
5162 || TREE_CODE (expr) == LABEL_DECL)
5164 /* Lookup the declaration. */
5165 n = st->get (expr);
5167 /* If it's there, remap it. */
5168 if (n)
5169 *tp = *n;
5170 *walk_subtrees = 0;
5172 else if (TREE_CODE (expr) == STATEMENT_LIST
5173 || TREE_CODE (expr) == BIND_EXPR
5174 || TREE_CODE (expr) == SAVE_EXPR)
5175 gcc_unreachable ();
5176 else if (TREE_CODE (expr) == TARGET_EXPR)
5178 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5179 It's OK for this to happen if it was part of a subtree that
5180 isn't immediately expanded, such as operand 2 of another
5181 TARGET_EXPR. */
5182 if (!TREE_OPERAND (expr, 1))
5184 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5185 TREE_OPERAND (expr, 3) = NULL_TREE;
5189 /* Keep iterating. */
5190 return NULL_TREE;
5194 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5195 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5196 remaps all local declarations to appropriate replacements in gimple
5197 statements. */
5199 static tree
5200 replace_locals_stmt (gimple_stmt_iterator *gsip,
5201 bool *handled_ops_p ATTRIBUTE_UNUSED,
5202 struct walk_stmt_info *wi)
5204 copy_body_data *id = (copy_body_data *) wi->info;
5205 gimple gs = gsi_stmt (*gsip);
5207 if (gbind *stmt = dyn_cast <gbind *> (gs))
5209 tree block = gimple_bind_block (stmt);
5211 if (block)
5213 remap_block (&block, id);
5214 gimple_bind_set_block (stmt, block);
5217 /* This will remap a lot of the same decls again, but this should be
5218 harmless. */
5219 if (gimple_bind_vars (stmt))
5220 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
5221 NULL, id));
5224 /* Keep iterating. */
5225 return NULL_TREE;
5229 /* Copies everything in SEQ and replaces variables and labels local to
5230 current_function_decl. */
5232 gimple_seq
5233 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5235 copy_body_data id;
5236 struct walk_stmt_info wi;
5237 gimple_seq copy;
5239 /* There's nothing to do for NULL_TREE. */
5240 if (seq == NULL)
5241 return seq;
5243 /* Set up ID. */
5244 memset (&id, 0, sizeof (id));
5245 id.src_fn = current_function_decl;
5246 id.dst_fn = current_function_decl;
5247 id.decl_map = new hash_map<tree, tree>;
5248 id.debug_map = NULL;
5250 id.copy_decl = copy_decl_no_change;
5251 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5252 id.transform_new_cfg = false;
5253 id.transform_return_to_modify = false;
5254 id.transform_parameter = false;
5255 id.transform_lang_insert_block = NULL;
5257 /* Walk the tree once to find local labels. */
5258 memset (&wi, 0, sizeof (wi));
5259 hash_set<tree> visited;
5260 wi.info = &id;
5261 wi.pset = &visited;
5262 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5264 copy = gimple_seq_copy (seq);
5266 /* Walk the copy, remapping decls. */
5267 memset (&wi, 0, sizeof (wi));
5268 wi.info = &id;
5269 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5271 /* Clean up. */
5272 delete id.decl_map;
5273 if (id.debug_map)
5274 delete id.debug_map;
5275 if (id.dependence_map)
5277 delete id.dependence_map;
5278 id.dependence_map = NULL;
5281 return copy;
5285 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5287 static tree
5288 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5290 if (*tp == data)
5291 return (tree) data;
5292 else
5293 return NULL;
5296 DEBUG_FUNCTION bool
5297 debug_find_tree (tree top, tree search)
5299 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5303 /* Declare the variables created by the inliner. Add all the variables in
5304 VARS to BIND_EXPR. */
5306 static void
5307 declare_inline_vars (tree block, tree vars)
5309 tree t;
5310 for (t = vars; t; t = DECL_CHAIN (t))
5312 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5313 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5314 add_local_decl (cfun, t);
5317 if (block)
5318 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5321 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5322 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5323 VAR_DECL translation. */
5325 static tree
5326 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5328 /* Don't generate debug information for the copy if we wouldn't have
5329 generated it for the copy either. */
5330 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5331 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5333 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5334 declaration inspired this copy. */
5335 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5337 /* The new variable/label has no RTL, yet. */
5338 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5339 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5340 SET_DECL_RTL (copy, 0);
5342 /* These args would always appear unused, if not for this. */
5343 TREE_USED (copy) = 1;
5345 /* Set the context for the new declaration. */
5346 if (!DECL_CONTEXT (decl))
5347 /* Globals stay global. */
5349 else if (DECL_CONTEXT (decl) != id->src_fn)
5350 /* Things that weren't in the scope of the function we're inlining
5351 from aren't in the scope we're inlining to, either. */
5353 else if (TREE_STATIC (decl))
5354 /* Function-scoped static variables should stay in the original
5355 function. */
5357 else
5358 /* Ordinary automatic local variables are now in the scope of the
5359 new function. */
5360 DECL_CONTEXT (copy) = id->dst_fn;
5362 return copy;
5365 static tree
5366 copy_decl_to_var (tree decl, copy_body_data *id)
5368 tree copy, type;
5370 gcc_assert (TREE_CODE (decl) == PARM_DECL
5371 || TREE_CODE (decl) == RESULT_DECL);
5373 type = TREE_TYPE (decl);
5375 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5376 VAR_DECL, DECL_NAME (decl), type);
5377 if (DECL_PT_UID_SET_P (decl))
5378 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5379 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5380 TREE_READONLY (copy) = TREE_READONLY (decl);
5381 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5382 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5384 return copy_decl_for_dup_finish (id, decl, copy);
5387 /* Like copy_decl_to_var, but create a return slot object instead of a
5388 pointer variable for return by invisible reference. */
5390 static tree
5391 copy_result_decl_to_var (tree decl, copy_body_data *id)
5393 tree copy, type;
5395 gcc_assert (TREE_CODE (decl) == PARM_DECL
5396 || TREE_CODE (decl) == RESULT_DECL);
5398 type = TREE_TYPE (decl);
5399 if (DECL_BY_REFERENCE (decl))
5400 type = TREE_TYPE (type);
5402 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5403 VAR_DECL, DECL_NAME (decl), type);
5404 if (DECL_PT_UID_SET_P (decl))
5405 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5406 TREE_READONLY (copy) = TREE_READONLY (decl);
5407 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5408 if (!DECL_BY_REFERENCE (decl))
5410 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5411 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5414 return copy_decl_for_dup_finish (id, decl, copy);
5417 tree
5418 copy_decl_no_change (tree decl, copy_body_data *id)
5420 tree copy;
5422 copy = copy_node (decl);
5424 /* The COPY is not abstract; it will be generated in DST_FN. */
5425 DECL_ABSTRACT_P (copy) = false;
5426 lang_hooks.dup_lang_specific_decl (copy);
5428 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5429 been taken; it's for internal bookkeeping in expand_goto_internal. */
5430 if (TREE_CODE (copy) == LABEL_DECL)
5432 TREE_ADDRESSABLE (copy) = 0;
5433 LABEL_DECL_UID (copy) = -1;
5436 return copy_decl_for_dup_finish (id, decl, copy);
5439 static tree
5440 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5442 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5443 return copy_decl_to_var (decl, id);
5444 else
5445 return copy_decl_no_change (decl, id);
5448 /* Return a copy of the function's argument tree. */
5449 static tree
5450 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5451 bitmap args_to_skip, tree *vars)
5453 tree arg, *parg;
5454 tree new_parm = NULL;
5455 int i = 0;
5457 parg = &new_parm;
5459 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5460 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5462 tree new_tree = remap_decl (arg, id);
5463 if (TREE_CODE (new_tree) != PARM_DECL)
5464 new_tree = id->copy_decl (arg, id);
5465 lang_hooks.dup_lang_specific_decl (new_tree);
5466 *parg = new_tree;
5467 parg = &DECL_CHAIN (new_tree);
5469 else if (!id->decl_map->get (arg))
5471 /* Make an equivalent VAR_DECL. If the argument was used
5472 as temporary variable later in function, the uses will be
5473 replaced by local variable. */
5474 tree var = copy_decl_to_var (arg, id);
5475 insert_decl_map (id, arg, var);
5476 /* Declare this new variable. */
5477 DECL_CHAIN (var) = *vars;
5478 *vars = var;
5480 return new_parm;
5483 /* Return a copy of the function's static chain. */
5484 static tree
5485 copy_static_chain (tree static_chain, copy_body_data * id)
5487 tree *chain_copy, *pvar;
5489 chain_copy = &static_chain;
5490 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5492 tree new_tree = remap_decl (*pvar, id);
5493 lang_hooks.dup_lang_specific_decl (new_tree);
5494 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5495 *pvar = new_tree;
5497 return static_chain;
5500 /* Return true if the function is allowed to be versioned.
5501 This is a guard for the versioning functionality. */
5503 bool
5504 tree_versionable_function_p (tree fndecl)
5506 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5507 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5510 /* Delete all unreachable basic blocks and update callgraph.
5511 Doing so is somewhat nontrivial because we need to update all clones and
5512 remove inline function that become unreachable. */
5514 static bool
5515 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5517 bool changed = false;
5518 basic_block b, next_bb;
5520 find_unreachable_blocks ();
5522 /* Delete all unreachable basic blocks. */
5524 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5525 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5527 next_bb = b->next_bb;
5529 if (!(b->flags & BB_REACHABLE))
5531 gimple_stmt_iterator bsi;
5533 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5535 struct cgraph_edge *e;
5536 struct cgraph_node *node;
5538 id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5540 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5541 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5543 if (!e->inline_failed)
5544 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5545 else
5546 e->remove ();
5548 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5549 && id->dst_node->clones)
5550 for (node = id->dst_node->clones; node != id->dst_node;)
5552 node->remove_stmt_references (gsi_stmt (bsi));
5553 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5554 && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5556 if (!e->inline_failed)
5557 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5558 else
5559 e->remove ();
5562 if (node->clones)
5563 node = node->clones;
5564 else if (node->next_sibling_clone)
5565 node = node->next_sibling_clone;
5566 else
5568 while (node != id->dst_node && !node->next_sibling_clone)
5569 node = node->clone_of;
5570 if (node != id->dst_node)
5571 node = node->next_sibling_clone;
5575 delete_basic_block (b);
5576 changed = true;
5580 return changed;
5583 /* Update clone info after duplication. */
5585 static void
5586 update_clone_info (copy_body_data * id)
5588 struct cgraph_node *node;
5589 if (!id->dst_node->clones)
5590 return;
5591 for (node = id->dst_node->clones; node != id->dst_node;)
5593 /* First update replace maps to match the new body. */
5594 if (node->clone.tree_map)
5596 unsigned int i;
5597 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5599 struct ipa_replace_map *replace_info;
5600 replace_info = (*node->clone.tree_map)[i];
5601 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5602 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5605 if (node->clones)
5606 node = node->clones;
5607 else if (node->next_sibling_clone)
5608 node = node->next_sibling_clone;
5609 else
5611 while (node != id->dst_node && !node->next_sibling_clone)
5612 node = node->clone_of;
5613 if (node != id->dst_node)
5614 node = node->next_sibling_clone;
5619 /* Create a copy of a function's tree.
5620 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5621 of the original function and the new copied function
5622 respectively. In case we want to replace a DECL
5623 tree with another tree while duplicating the function's
5624 body, TREE_MAP represents the mapping between these
5625 trees. If UPDATE_CLONES is set, the call_stmt fields
5626 of edges of clones of the function will be updated.
5628 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5629 from new version.
5630 If SKIP_RETURN is true, the new version will return void.
5631 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5632 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5634 void
5635 tree_function_versioning (tree old_decl, tree new_decl,
5636 vec<ipa_replace_map *, va_gc> *tree_map,
5637 bool update_clones, bitmap args_to_skip,
5638 bool skip_return, bitmap blocks_to_copy,
5639 basic_block new_entry)
5641 struct cgraph_node *old_version_node;
5642 struct cgraph_node *new_version_node;
5643 copy_body_data id;
5644 tree p;
5645 unsigned i;
5646 struct ipa_replace_map *replace_info;
5647 basic_block old_entry_block, bb;
5648 auto_vec<gimple, 10> init_stmts;
5649 tree vars = NULL_TREE;
5651 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5652 && TREE_CODE (new_decl) == FUNCTION_DECL);
5653 DECL_POSSIBLY_INLINED (old_decl) = 1;
5655 old_version_node = cgraph_node::get (old_decl);
5656 gcc_checking_assert (old_version_node);
5657 new_version_node = cgraph_node::get (new_decl);
5658 gcc_checking_assert (new_version_node);
5660 /* Copy over debug args. */
5661 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5663 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5664 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5665 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5666 old_debug_args = decl_debug_args_lookup (old_decl);
5667 if (old_debug_args)
5669 new_debug_args = decl_debug_args_insert (new_decl);
5670 *new_debug_args = vec_safe_copy (*old_debug_args);
5674 /* Output the inlining info for this abstract function, since it has been
5675 inlined. If we don't do this now, we can lose the information about the
5676 variables in the function when the blocks get blown away as soon as we
5677 remove the cgraph node. */
5678 (*debug_hooks->outlining_inline_function) (old_decl);
5680 DECL_ARTIFICIAL (new_decl) = 1;
5681 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5682 if (DECL_ORIGIN (old_decl) == old_decl)
5683 old_version_node->used_as_abstract_origin = true;
5684 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5686 /* Prepare the data structures for the tree copy. */
5687 memset (&id, 0, sizeof (id));
5689 /* Generate a new name for the new version. */
5690 id.statements_to_fold = new hash_set<gimple>;
5692 id.decl_map = new hash_map<tree, tree>;
5693 id.debug_map = NULL;
5694 id.src_fn = old_decl;
5695 id.dst_fn = new_decl;
5696 id.src_node = old_version_node;
5697 id.dst_node = new_version_node;
5698 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5699 id.blocks_to_copy = blocks_to_copy;
5701 id.copy_decl = copy_decl_no_change;
5702 id.transform_call_graph_edges
5703 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5704 id.transform_new_cfg = true;
5705 id.transform_return_to_modify = false;
5706 id.transform_parameter = false;
5707 id.transform_lang_insert_block = NULL;
5709 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5710 (DECL_STRUCT_FUNCTION (old_decl));
5711 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5712 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5713 initialize_cfun (new_decl, old_decl,
5714 old_entry_block->count);
5715 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5716 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5717 = id.src_cfun->gimple_df->ipa_pta;
5719 /* Copy the function's static chain. */
5720 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5721 if (p)
5722 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5723 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5724 &id);
5726 /* If there's a tree_map, prepare for substitution. */
5727 if (tree_map)
5728 for (i = 0; i < tree_map->length (); i++)
5730 gimple init;
5731 replace_info = (*tree_map)[i];
5732 if (replace_info->replace_p)
5734 if (!replace_info->old_tree)
5736 int i = replace_info->parm_num;
5737 tree parm;
5738 tree req_type;
5740 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5741 i --;
5742 replace_info->old_tree = parm;
5743 req_type = TREE_TYPE (parm);
5744 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5746 if (fold_convertible_p (req_type, replace_info->new_tree))
5747 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5748 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5749 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5750 else
5752 if (dump_file)
5754 fprintf (dump_file, " const ");
5755 print_generic_expr (dump_file, replace_info->new_tree, 0);
5756 fprintf (dump_file, " can't be converted to param ");
5757 print_generic_expr (dump_file, parm, 0);
5758 fprintf (dump_file, "\n");
5760 replace_info->old_tree = NULL;
5764 else
5765 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5766 if (replace_info->old_tree)
5768 init = setup_one_parameter (&id, replace_info->old_tree,
5769 replace_info->new_tree, id.src_fn,
5770 NULL,
5771 &vars);
5772 if (init)
5773 init_stmts.safe_push (init);
5777 /* Copy the function's arguments. */
5778 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5779 DECL_ARGUMENTS (new_decl) =
5780 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5781 args_to_skip, &vars);
5783 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5784 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5786 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5788 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5789 /* Add local vars. */
5790 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5792 if (DECL_RESULT (old_decl) == NULL_TREE)
5794 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5796 DECL_RESULT (new_decl)
5797 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5798 RESULT_DECL, NULL_TREE, void_type_node);
5799 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5800 cfun->returns_struct = 0;
5801 cfun->returns_pcc_struct = 0;
5803 else
5805 tree old_name;
5806 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5807 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5808 if (gimple_in_ssa_p (id.src_cfun)
5809 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5810 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5812 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
5813 insert_decl_map (&id, old_name, new_name);
5814 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5815 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5819 /* Set up the destination functions loop tree. */
5820 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5822 cfun->curr_properties &= ~PROP_loops;
5823 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5824 cfun->curr_properties |= PROP_loops;
5827 /* Copy the Function's body. */
5828 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5829 ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5830 new_entry);
5832 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5833 number_blocks (new_decl);
5835 /* We want to create the BB unconditionally, so that the addition of
5836 debug stmts doesn't affect BB count, which may in the end cause
5837 codegen differences. */
5838 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5839 while (init_stmts.length ())
5840 insert_init_stmt (&id, bb, init_stmts.pop ());
5841 update_clone_info (&id);
5843 /* Remap the nonlocal_goto_save_area, if any. */
5844 if (cfun->nonlocal_goto_save_area)
5846 struct walk_stmt_info wi;
5848 memset (&wi, 0, sizeof (wi));
5849 wi.info = &id;
5850 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5853 /* Clean up. */
5854 delete id.decl_map;
5855 if (id.debug_map)
5856 delete id.debug_map;
5857 free_dominance_info (CDI_DOMINATORS);
5858 free_dominance_info (CDI_POST_DOMINATORS);
5860 fold_marked_statements (0, id.statements_to_fold);
5861 delete id.statements_to_fold;
5862 fold_cond_expr_cond ();
5863 delete_unreachable_blocks_update_callgraph (&id);
5864 if (id.dst_node->definition)
5865 cgraph_edge::rebuild_references ();
5866 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
5868 calculate_dominance_info (CDI_DOMINATORS);
5869 fix_loop_structure (NULL);
5871 update_ssa (TODO_update_ssa);
5873 /* After partial cloning we need to rescale frequencies, so they are
5874 within proper range in the cloned function. */
5875 if (new_entry)
5877 struct cgraph_edge *e;
5878 rebuild_frequencies ();
5880 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5881 for (e = new_version_node->callees; e; e = e->next_callee)
5883 basic_block bb = gimple_bb (e->call_stmt);
5884 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5885 bb);
5886 e->count = bb->count;
5888 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5890 basic_block bb = gimple_bb (e->call_stmt);
5891 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5892 bb);
5893 e->count = bb->count;
5897 free_dominance_info (CDI_DOMINATORS);
5898 free_dominance_info (CDI_POST_DOMINATORS);
5900 gcc_assert (!id.debug_stmts.exists ());
5901 pop_cfun ();
5902 return;
5905 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5906 the callee and return the inlined body on success. */
5908 tree
5909 maybe_inline_call_in_expr (tree exp)
5911 tree fn = get_callee_fndecl (exp);
5913 /* We can only try to inline "const" functions. */
5914 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5916 call_expr_arg_iterator iter;
5917 copy_body_data id;
5918 tree param, arg, t;
5919 hash_map<tree, tree> decl_map;
5921 /* Remap the parameters. */
5922 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5923 param;
5924 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5925 decl_map.put (param, arg);
5927 memset (&id, 0, sizeof (id));
5928 id.src_fn = fn;
5929 id.dst_fn = current_function_decl;
5930 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5931 id.decl_map = &decl_map;
5933 id.copy_decl = copy_decl_no_change;
5934 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5935 id.transform_new_cfg = false;
5936 id.transform_return_to_modify = true;
5937 id.transform_parameter = true;
5938 id.transform_lang_insert_block = NULL;
5940 /* Make sure not to unshare trees behind the front-end's back
5941 since front-end specific mechanisms may rely on sharing. */
5942 id.regimplify = false;
5943 id.do_not_unshare = true;
5945 /* We're not inside any EH region. */
5946 id.eh_lp_nr = 0;
5948 t = copy_tree_body (&id);
5950 /* We can only return something suitable for use in a GENERIC
5951 expression tree. */
5952 if (TREE_CODE (t) == MODIFY_EXPR)
5953 return TREE_OPERAND (t, 1);
5956 return NULL_TREE;
5959 /* Duplicate a type, fields and all. */
5961 tree
5962 build_duplicate_type (tree type)
5964 struct copy_body_data id;
5966 memset (&id, 0, sizeof (id));
5967 id.src_fn = current_function_decl;
5968 id.dst_fn = current_function_decl;
5969 id.src_cfun = cfun;
5970 id.decl_map = new hash_map<tree, tree>;
5971 id.debug_map = NULL;
5972 id.copy_decl = copy_decl_no_change;
5974 type = remap_type_1 (type, &id);
5976 delete id.decl_map;
5977 if (id.debug_map)
5978 delete id.debug_map;
5980 TYPE_CANONICAL (type) = type;
5982 return type;
5985 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
5986 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
5987 evaluation. */
5989 tree
5990 copy_fn (tree fn, tree& parms, tree& result)
5992 copy_body_data id;
5993 tree param;
5994 hash_map<tree, tree> decl_map;
5996 tree *p = &parms;
5997 *p = NULL_TREE;
5999 memset (&id, 0, sizeof (id));
6000 id.src_fn = fn;
6001 id.dst_fn = current_function_decl;
6002 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6003 id.decl_map = &decl_map;
6005 id.copy_decl = copy_decl_no_change;
6006 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6007 id.transform_new_cfg = false;
6008 id.transform_return_to_modify = false;
6009 id.transform_parameter = true;
6010 id.transform_lang_insert_block = NULL;
6012 /* Make sure not to unshare trees behind the front-end's back
6013 since front-end specific mechanisms may rely on sharing. */
6014 id.regimplify = false;
6015 id.do_not_unshare = true;
6017 /* We're not inside any EH region. */
6018 id.eh_lp_nr = 0;
6020 /* Remap the parameters and result and return them to the caller. */
6021 for (param = DECL_ARGUMENTS (fn);
6022 param;
6023 param = DECL_CHAIN (param))
6025 *p = remap_decl (param, &id);
6026 p = &DECL_CHAIN (*p);
6029 if (DECL_RESULT (fn))
6030 result = remap_decl (DECL_RESULT (fn), &id);
6031 else
6032 result = NULL_TREE;
6034 return copy_tree_body (&id);