* gcc.dg/store-motion-fgcse-sm.c (dg-final): Cleanup
[official-gcc.git] / gcc / tree-inline.c
blob835edd12cf3f099dcf51eff5bbf3d2189f501651
1 /* Tree inlining.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "calls.h"
29 #include "tree-inline.h"
30 #include "flags.h"
31 #include "params.h"
32 #include "input.h"
33 #include "insn-config.h"
34 #include "hashtab.h"
35 #include "langhooks.h"
36 #include "predict.h"
37 #include "vec.h"
38 #include "hash-set.h"
39 #include "machmode.h"
40 #include "hard-reg-set.h"
41 #include "function.h"
42 #include "dominance.h"
43 #include "cfg.h"
44 #include "cfganal.h"
45 #include "basic-block.h"
46 #include "tree-iterator.h"
47 #include "intl.h"
48 #include "tree-ssa-alias.h"
49 #include "internal-fn.h"
50 #include "gimple-fold.h"
51 #include "tree-eh.h"
52 #include "gimple-expr.h"
53 #include "is-a.h"
54 #include "gimple.h"
55 #include "gimplify.h"
56 #include "gimple-iterator.h"
57 #include "gimplify-me.h"
58 #include "gimple-walk.h"
59 #include "gimple-ssa.h"
60 #include "tree-cfg.h"
61 #include "tree-phinodes.h"
62 #include "ssa-iterators.h"
63 #include "stringpool.h"
64 #include "tree-ssanames.h"
65 #include "tree-into-ssa.h"
66 #include "expr.h"
67 #include "tree-dfa.h"
68 #include "tree-ssa.h"
69 #include "tree-pretty-print.h"
70 #include "except.h"
71 #include "debug.h"
72 #include "hash-map.h"
73 #include "plugin-api.h"
74 #include "ipa-ref.h"
75 #include "cgraph.h"
76 #include "alloc-pool.h"
77 #include "ipa-prop.h"
78 #include "value-prof.h"
79 #include "tree-pass.h"
80 #include "target.h"
81 #include "cfgloop.h"
82 #include "builtins.h"
83 #include "tree-chkp.h"
85 #include "rtl.h" /* FIXME: For asm_str_count. */
87 /* I'm not real happy about this, but we need to handle gimple and
88 non-gimple trees. */
90 /* Inlining, Cloning, Versioning, Parallelization
92 Inlining: a function body is duplicated, but the PARM_DECLs are
93 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
94 MODIFY_EXPRs that store to a dedicated returned-value variable.
95 The duplicated eh_region info of the copy will later be appended
96 to the info for the caller; the eh_region info in copied throwing
97 statements and RESX statements are adjusted accordingly.
99 Cloning: (only in C++) We have one body for a con/de/structor, and
100 multiple function decls, each with a unique parameter list.
101 Duplicate the body, using the given splay tree; some parameters
102 will become constants (like 0 or 1).
104 Versioning: a function body is duplicated and the result is a new
105 function rather than into blocks of an existing function as with
106 inlining. Some parameters will become constants.
108 Parallelization: a region of a function is duplicated resulting in
109 a new function. Variables may be replaced with complex expressions
110 to enable shared variable semantics.
112 All of these will simultaneously lookup any callgraph edges. If
113 we're going to inline the duplicated function body, and the given
114 function has some cloned callgraph nodes (one for each place this
115 function will be inlined) those callgraph edges will be duplicated.
116 If we're cloning the body, those callgraph edges will be
117 updated to point into the new body. (Note that the original
118 callgraph node and edge list will not be altered.)
120 See the CALL_EXPR handling case in copy_tree_body_r (). */
122 /* To Do:
124 o In order to make inlining-on-trees work, we pessimized
125 function-local static constants. In particular, they are now
126 always output, even when not addressed. Fix this by treating
127 function-local static constants just like global static
128 constants; the back-end already knows not to output them if they
129 are not needed.
131 o Provide heuristics to clamp inlining of recursive template
132 calls? */
135 /* Weights that estimate_num_insns uses to estimate the size of the
136 produced code. */
138 eni_weights eni_size_weights;
140 /* Weights that estimate_num_insns uses to estimate the time necessary
141 to execute the produced code. */
143 eni_weights eni_time_weights;
145 /* Prototypes. */
147 static tree declare_return_variable (copy_body_data *, tree, tree, tree,
148 basic_block);
149 static void remap_block (tree *, copy_body_data *);
150 static void copy_bind_expr (tree *, int *, copy_body_data *);
151 static void declare_inline_vars (tree, tree);
152 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
153 static void prepend_lexical_block (tree current_block, tree new_block);
154 static tree copy_decl_to_var (tree, copy_body_data *);
155 static tree copy_result_decl_to_var (tree, copy_body_data *);
156 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
157 static gimple_seq remap_gimple_stmt (gimple, copy_body_data *);
158 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
159 static void insert_init_stmt (copy_body_data *, basic_block, gimple);
161 /* Insert a tree->tree mapping for ID. Despite the name suggests
162 that the trees should be variables, it is used for more than that. */
164 void
165 insert_decl_map (copy_body_data *id, tree key, tree value)
167 id->decl_map->put (key, value);
169 /* Always insert an identity map as well. If we see this same new
170 node again, we won't want to duplicate it a second time. */
171 if (key != value)
172 id->decl_map->put (value, value);
175 /* Insert a tree->tree mapping for ID. This is only used for
176 variables. */
178 static void
179 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
181 if (!gimple_in_ssa_p (id->src_cfun))
182 return;
184 if (!MAY_HAVE_DEBUG_STMTS)
185 return;
187 if (!target_for_debug_bind (key))
188 return;
190 gcc_assert (TREE_CODE (key) == PARM_DECL);
191 gcc_assert (TREE_CODE (value) == VAR_DECL);
193 if (!id->debug_map)
194 id->debug_map = new hash_map<tree, tree>;
196 id->debug_map->put (key, value);
199 /* If nonzero, we're remapping the contents of inlined debug
200 statements. If negative, an error has occurred, such as a
201 reference to a variable that isn't available in the inlined
202 context. */
203 static int processing_debug_stmt = 0;
205 /* Construct new SSA name for old NAME. ID is the inline context. */
207 static tree
208 remap_ssa_name (tree name, copy_body_data *id)
210 tree new_tree, var;
211 tree *n;
213 gcc_assert (TREE_CODE (name) == SSA_NAME);
215 n = id->decl_map->get (name);
216 if (n)
217 return unshare_expr (*n);
219 if (processing_debug_stmt)
221 if (SSA_NAME_IS_DEFAULT_DEF (name)
222 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
223 && id->entry_bb == NULL
224 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
226 tree vexpr = make_node (DEBUG_EXPR_DECL);
227 gimple def_temp;
228 gimple_stmt_iterator gsi;
229 tree val = SSA_NAME_VAR (name);
231 n = id->decl_map->get (val);
232 if (n != NULL)
233 val = *n;
234 if (TREE_CODE (val) != PARM_DECL)
236 processing_debug_stmt = -1;
237 return name;
239 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
240 DECL_ARTIFICIAL (vexpr) = 1;
241 TREE_TYPE (vexpr) = TREE_TYPE (name);
242 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
243 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
244 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
245 return vexpr;
248 processing_debug_stmt = -1;
249 return name;
252 /* Remap anonymous SSA names or SSA names of anonymous decls. */
253 var = SSA_NAME_VAR (name);
254 if (!var
255 || (!SSA_NAME_IS_DEFAULT_DEF (name)
256 && TREE_CODE (var) == VAR_DECL
257 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
258 && DECL_ARTIFICIAL (var)
259 && DECL_IGNORED_P (var)
260 && !DECL_NAME (var)))
262 struct ptr_info_def *pi;
263 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id), NULL);
264 if (!var && SSA_NAME_IDENTIFIER (name))
265 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
266 insert_decl_map (id, name, new_tree);
267 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
268 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
269 /* At least IPA points-to info can be directly transferred. */
270 if (id->src_cfun->gimple_df
271 && id->src_cfun->gimple_df->ipa_pta
272 && (pi = SSA_NAME_PTR_INFO (name))
273 && !pi->pt.anything)
275 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
276 new_pi->pt = pi->pt;
278 return new_tree;
281 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
282 in copy_bb. */
283 new_tree = remap_decl (var, id);
285 /* We might've substituted constant or another SSA_NAME for
286 the variable.
288 Replace the SSA name representing RESULT_DECL by variable during
289 inlining: this saves us from need to introduce PHI node in a case
290 return value is just partly initialized. */
291 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
292 && (!SSA_NAME_VAR (name)
293 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
294 || !id->transform_return_to_modify))
296 struct ptr_info_def *pi;
297 new_tree = make_ssa_name (new_tree, NULL);
298 insert_decl_map (id, name, new_tree);
299 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
300 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
301 /* At least IPA points-to info can be directly transferred. */
302 if (id->src_cfun->gimple_df
303 && id->src_cfun->gimple_df->ipa_pta
304 && (pi = SSA_NAME_PTR_INFO (name))
305 && !pi->pt.anything)
307 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
308 new_pi->pt = pi->pt;
310 if (SSA_NAME_IS_DEFAULT_DEF (name))
312 /* By inlining function having uninitialized variable, we might
313 extend the lifetime (variable might get reused). This cause
314 ICE in the case we end up extending lifetime of SSA name across
315 abnormal edge, but also increase register pressure.
317 We simply initialize all uninitialized vars by 0 except
318 for case we are inlining to very first BB. We can avoid
319 this for all BBs that are not inside strongly connected
320 regions of the CFG, but this is expensive to test. */
321 if (id->entry_bb
322 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
323 && (!SSA_NAME_VAR (name)
324 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
325 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
326 0)->dest
327 || EDGE_COUNT (id->entry_bb->preds) != 1))
329 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
330 gimple init_stmt;
331 tree zero = build_zero_cst (TREE_TYPE (new_tree));
333 init_stmt = gimple_build_assign (new_tree, zero);
334 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
335 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
337 else
339 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
340 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
344 else
345 insert_decl_map (id, name, new_tree);
346 return new_tree;
349 /* Remap DECL during the copying of the BLOCK tree for the function. */
351 tree
352 remap_decl (tree decl, copy_body_data *id)
354 tree *n;
356 /* We only remap local variables in the current function. */
358 /* See if we have remapped this declaration. */
360 n = id->decl_map->get (decl);
362 if (!n && processing_debug_stmt)
364 processing_debug_stmt = -1;
365 return decl;
368 /* If we didn't already have an equivalent for this declaration,
369 create one now. */
370 if (!n)
372 /* Make a copy of the variable or label. */
373 tree t = id->copy_decl (decl, id);
375 /* Remember it, so that if we encounter this local entity again
376 we can reuse this copy. Do this early because remap_type may
377 need this decl for TYPE_STUB_DECL. */
378 insert_decl_map (id, decl, t);
380 if (!DECL_P (t))
381 return t;
383 /* Remap types, if necessary. */
384 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
385 if (TREE_CODE (t) == TYPE_DECL)
386 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
388 /* Remap sizes as necessary. */
389 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
390 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
392 /* If fields, do likewise for offset and qualifier. */
393 if (TREE_CODE (t) == FIELD_DECL)
395 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
396 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
397 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
400 return t;
403 if (id->do_not_unshare)
404 return *n;
405 else
406 return unshare_expr (*n);
409 static tree
410 remap_type_1 (tree type, copy_body_data *id)
412 tree new_tree, t;
414 /* We do need a copy. build and register it now. If this is a pointer or
415 reference type, remap the designated type and make a new pointer or
416 reference type. */
417 if (TREE_CODE (type) == POINTER_TYPE)
419 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
420 TYPE_MODE (type),
421 TYPE_REF_CAN_ALIAS_ALL (type));
422 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
423 new_tree = build_type_attribute_qual_variant (new_tree,
424 TYPE_ATTRIBUTES (type),
425 TYPE_QUALS (type));
426 insert_decl_map (id, type, new_tree);
427 return new_tree;
429 else if (TREE_CODE (type) == REFERENCE_TYPE)
431 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
432 TYPE_MODE (type),
433 TYPE_REF_CAN_ALIAS_ALL (type));
434 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
435 new_tree = build_type_attribute_qual_variant (new_tree,
436 TYPE_ATTRIBUTES (type),
437 TYPE_QUALS (type));
438 insert_decl_map (id, type, new_tree);
439 return new_tree;
441 else
442 new_tree = copy_node (type);
444 insert_decl_map (id, type, new_tree);
446 /* This is a new type, not a copy of an old type. Need to reassociate
447 variants. We can handle everything except the main variant lazily. */
448 t = TYPE_MAIN_VARIANT (type);
449 if (type != t)
451 t = remap_type (t, id);
452 TYPE_MAIN_VARIANT (new_tree) = t;
453 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
454 TYPE_NEXT_VARIANT (t) = new_tree;
456 else
458 TYPE_MAIN_VARIANT (new_tree) = new_tree;
459 TYPE_NEXT_VARIANT (new_tree) = NULL;
462 if (TYPE_STUB_DECL (type))
463 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
465 /* Lazily create pointer and reference types. */
466 TYPE_POINTER_TO (new_tree) = NULL;
467 TYPE_REFERENCE_TO (new_tree) = NULL;
469 /* Copy all types that may contain references to local variables; be sure to
470 preserve sharing in between type and its main variant when possible. */
471 switch (TREE_CODE (new_tree))
473 case INTEGER_TYPE:
474 case REAL_TYPE:
475 case FIXED_POINT_TYPE:
476 case ENUMERAL_TYPE:
477 case BOOLEAN_TYPE:
478 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
480 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
481 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
483 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
484 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
486 else
488 t = TYPE_MIN_VALUE (new_tree);
489 if (t && TREE_CODE (t) != INTEGER_CST)
490 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
492 t = TYPE_MAX_VALUE (new_tree);
493 if (t && TREE_CODE (t) != INTEGER_CST)
494 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
496 return new_tree;
498 case FUNCTION_TYPE:
499 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
500 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
501 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
502 else
503 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
504 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
505 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
506 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
507 else
508 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
509 return new_tree;
511 case ARRAY_TYPE:
512 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
513 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
514 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
515 else
516 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
518 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
520 gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
521 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
523 else
524 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
525 break;
527 case RECORD_TYPE:
528 case UNION_TYPE:
529 case QUAL_UNION_TYPE:
530 if (TYPE_MAIN_VARIANT (type) != type
531 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
532 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
533 else
535 tree f, nf = NULL;
537 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
539 t = remap_decl (f, id);
540 DECL_CONTEXT (t) = new_tree;
541 DECL_CHAIN (t) = nf;
542 nf = t;
544 TYPE_FIELDS (new_tree) = nreverse (nf);
546 break;
548 case OFFSET_TYPE:
549 default:
550 /* Shouldn't have been thought variable sized. */
551 gcc_unreachable ();
554 /* All variants of type share the same size, so use the already remaped data. */
555 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
557 gcc_checking_assert (TYPE_SIZE (type) == TYPE_SIZE (TYPE_MAIN_VARIANT (type)));
558 gcc_checking_assert (TYPE_SIZE_UNIT (type) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type)));
560 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
561 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
563 else
565 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
566 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
569 return new_tree;
572 tree
573 remap_type (tree type, copy_body_data *id)
575 tree *node;
576 tree tmp;
578 if (type == NULL)
579 return type;
581 /* See if we have remapped this type. */
582 node = id->decl_map->get (type);
583 if (node)
584 return *node;
586 /* The type only needs remapping if it's variably modified. */
587 if (! variably_modified_type_p (type, id->src_fn))
589 insert_decl_map (id, type, type);
590 return type;
593 id->remapping_type_depth++;
594 tmp = remap_type_1 (type, id);
595 id->remapping_type_depth--;
597 return tmp;
600 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
602 static bool
603 can_be_nonlocal (tree decl, copy_body_data *id)
605 /* We can not duplicate function decls. */
606 if (TREE_CODE (decl) == FUNCTION_DECL)
607 return true;
609 /* Local static vars must be non-local or we get multiple declaration
610 problems. */
611 if (TREE_CODE (decl) == VAR_DECL
612 && !auto_var_in_fn_p (decl, id->src_fn))
613 return true;
615 return false;
618 static tree
619 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
620 copy_body_data *id)
622 tree old_var;
623 tree new_decls = NULL_TREE;
625 /* Remap its variables. */
626 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
628 tree new_var;
630 if (can_be_nonlocal (old_var, id))
632 /* We need to add this variable to the local decls as otherwise
633 nothing else will do so. */
634 if (TREE_CODE (old_var) == VAR_DECL
635 && ! DECL_EXTERNAL (old_var))
636 add_local_decl (cfun, old_var);
637 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
638 && !DECL_IGNORED_P (old_var)
639 && nonlocalized_list)
640 vec_safe_push (*nonlocalized_list, old_var);
641 continue;
644 /* Remap the variable. */
645 new_var = remap_decl (old_var, id);
647 /* If we didn't remap this variable, we can't mess with its
648 TREE_CHAIN. If we remapped this variable to the return slot, it's
649 already declared somewhere else, so don't declare it here. */
651 if (new_var == id->retvar)
653 else if (!new_var)
655 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
656 && !DECL_IGNORED_P (old_var)
657 && nonlocalized_list)
658 vec_safe_push (*nonlocalized_list, old_var);
660 else
662 gcc_assert (DECL_P (new_var));
663 DECL_CHAIN (new_var) = new_decls;
664 new_decls = new_var;
666 /* Also copy value-expressions. */
667 if (TREE_CODE (new_var) == VAR_DECL
668 && DECL_HAS_VALUE_EXPR_P (new_var))
670 tree tem = DECL_VALUE_EXPR (new_var);
671 bool old_regimplify = id->regimplify;
672 id->remapping_type_depth++;
673 walk_tree (&tem, copy_tree_body_r, id, NULL);
674 id->remapping_type_depth--;
675 id->regimplify = old_regimplify;
676 SET_DECL_VALUE_EXPR (new_var, tem);
681 return nreverse (new_decls);
684 /* Copy the BLOCK to contain remapped versions of the variables
685 therein. And hook the new block into the block-tree. */
687 static void
688 remap_block (tree *block, copy_body_data *id)
690 tree old_block;
691 tree new_block;
693 /* Make the new block. */
694 old_block = *block;
695 new_block = make_node (BLOCK);
696 TREE_USED (new_block) = TREE_USED (old_block);
697 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
698 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
699 BLOCK_NONLOCALIZED_VARS (new_block)
700 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
701 *block = new_block;
703 /* Remap its variables. */
704 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
705 &BLOCK_NONLOCALIZED_VARS (new_block),
706 id);
708 if (id->transform_lang_insert_block)
709 id->transform_lang_insert_block (new_block);
711 /* Remember the remapped block. */
712 insert_decl_map (id, old_block, new_block);
715 /* Copy the whole block tree and root it in id->block. */
716 static tree
717 remap_blocks (tree block, copy_body_data *id)
719 tree t;
720 tree new_tree = block;
722 if (!block)
723 return NULL;
725 remap_block (&new_tree, id);
726 gcc_assert (new_tree != block);
727 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
728 prepend_lexical_block (new_tree, remap_blocks (t, id));
729 /* Blocks are in arbitrary order, but make things slightly prettier and do
730 not swap order when producing a copy. */
731 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
732 return new_tree;
735 /* Remap the block tree rooted at BLOCK to nothing. */
736 static void
737 remap_blocks_to_null (tree block, copy_body_data *id)
739 tree t;
740 insert_decl_map (id, block, NULL_TREE);
741 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
742 remap_blocks_to_null (t, id);
745 static void
746 copy_statement_list (tree *tp)
748 tree_stmt_iterator oi, ni;
749 tree new_tree;
751 new_tree = alloc_stmt_list ();
752 ni = tsi_start (new_tree);
753 oi = tsi_start (*tp);
754 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
755 *tp = new_tree;
757 for (; !tsi_end_p (oi); tsi_next (&oi))
759 tree stmt = tsi_stmt (oi);
760 if (TREE_CODE (stmt) == STATEMENT_LIST)
761 /* This copy is not redundant; tsi_link_after will smash this
762 STATEMENT_LIST into the end of the one we're building, and we
763 don't want to do that with the original. */
764 copy_statement_list (&stmt);
765 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
769 static void
770 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
772 tree block = BIND_EXPR_BLOCK (*tp);
773 /* Copy (and replace) the statement. */
774 copy_tree_r (tp, walk_subtrees, NULL);
775 if (block)
777 remap_block (&block, id);
778 BIND_EXPR_BLOCK (*tp) = block;
781 if (BIND_EXPR_VARS (*tp))
782 /* This will remap a lot of the same decls again, but this should be
783 harmless. */
784 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
788 /* Create a new gimple_seq by remapping all the statements in BODY
789 using the inlining information in ID. */
791 static gimple_seq
792 remap_gimple_seq (gimple_seq body, copy_body_data *id)
794 gimple_stmt_iterator si;
795 gimple_seq new_body = NULL;
797 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
799 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
800 gimple_seq_add_seq (&new_body, new_stmts);
803 return new_body;
807 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
808 block using the mapping information in ID. */
810 static gimple
811 copy_gimple_bind (gbind *stmt, copy_body_data *id)
813 gimple new_bind;
814 tree new_block, new_vars;
815 gimple_seq body, new_body;
817 /* Copy the statement. Note that we purposely don't use copy_stmt
818 here because we need to remap statements as we copy. */
819 body = gimple_bind_body (stmt);
820 new_body = remap_gimple_seq (body, id);
822 new_block = gimple_bind_block (stmt);
823 if (new_block)
824 remap_block (&new_block, id);
826 /* This will remap a lot of the same decls again, but this should be
827 harmless. */
828 new_vars = gimple_bind_vars (stmt);
829 if (new_vars)
830 new_vars = remap_decls (new_vars, NULL, id);
832 new_bind = gimple_build_bind (new_vars, new_body, new_block);
834 return new_bind;
837 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
839 static bool
840 is_parm (tree decl)
842 if (TREE_CODE (decl) == SSA_NAME)
844 decl = SSA_NAME_VAR (decl);
845 if (!decl)
846 return false;
849 return (TREE_CODE (decl) == PARM_DECL);
852 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
853 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
854 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
855 recursing into the children nodes of *TP. */
857 static tree
858 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
860 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
861 copy_body_data *id = (copy_body_data *) wi_p->info;
862 tree fn = id->src_fn;
864 if (TREE_CODE (*tp) == SSA_NAME)
866 *tp = remap_ssa_name (*tp, id);
867 *walk_subtrees = 0;
868 return NULL;
870 else if (auto_var_in_fn_p (*tp, fn))
872 /* Local variables and labels need to be replaced by equivalent
873 variables. We don't want to copy static variables; there's
874 only one of those, no matter how many times we inline the
875 containing function. Similarly for globals from an outer
876 function. */
877 tree new_decl;
879 /* Remap the declaration. */
880 new_decl = remap_decl (*tp, id);
881 gcc_assert (new_decl);
882 /* Replace this variable with the copy. */
883 STRIP_TYPE_NOPS (new_decl);
884 /* ??? The C++ frontend uses void * pointer zero to initialize
885 any other type. This confuses the middle-end type verification.
886 As cloned bodies do not go through gimplification again the fixup
887 there doesn't trigger. */
888 if (TREE_CODE (new_decl) == INTEGER_CST
889 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
890 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
891 *tp = new_decl;
892 *walk_subtrees = 0;
894 else if (TREE_CODE (*tp) == STATEMENT_LIST)
895 gcc_unreachable ();
896 else if (TREE_CODE (*tp) == SAVE_EXPR)
897 gcc_unreachable ();
898 else if (TREE_CODE (*tp) == LABEL_DECL
899 && (!DECL_CONTEXT (*tp)
900 || decl_function_context (*tp) == id->src_fn))
901 /* These may need to be remapped for EH handling. */
902 *tp = remap_decl (*tp, id);
903 else if (TREE_CODE (*tp) == FIELD_DECL)
905 /* If the enclosing record type is variably_modified_type_p, the field
906 has already been remapped. Otherwise, it need not be. */
907 tree *n = id->decl_map->get (*tp);
908 if (n)
909 *tp = *n;
910 *walk_subtrees = 0;
912 else if (TYPE_P (*tp))
913 /* Types may need remapping as well. */
914 *tp = remap_type (*tp, id);
915 else if (CONSTANT_CLASS_P (*tp))
917 /* If this is a constant, we have to copy the node iff the type
918 will be remapped. copy_tree_r will not copy a constant. */
919 tree new_type = remap_type (TREE_TYPE (*tp), id);
921 if (new_type == TREE_TYPE (*tp))
922 *walk_subtrees = 0;
924 else if (TREE_CODE (*tp) == INTEGER_CST)
925 *tp = wide_int_to_tree (new_type, *tp);
926 else
928 *tp = copy_node (*tp);
929 TREE_TYPE (*tp) = new_type;
932 else
934 /* Otherwise, just copy the node. Note that copy_tree_r already
935 knows not to copy VAR_DECLs, etc., so this is safe. */
937 if (TREE_CODE (*tp) == MEM_REF)
939 /* We need to re-canonicalize MEM_REFs from inline substitutions
940 that can happen when a pointer argument is an ADDR_EXPR.
941 Recurse here manually to allow that. */
942 tree ptr = TREE_OPERAND (*tp, 0);
943 tree type = remap_type (TREE_TYPE (*tp), id);
944 tree old = *tp;
945 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
946 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
947 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
948 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
949 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
950 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
951 remapped a parameter as the property might be valid only
952 for the parameter itself. */
953 if (TREE_THIS_NOTRAP (old)
954 && (!is_parm (TREE_OPERAND (old, 0))
955 || (!id->transform_parameter && is_parm (ptr))))
956 TREE_THIS_NOTRAP (*tp) = 1;
957 *walk_subtrees = 0;
958 return NULL;
961 /* Here is the "usual case". Copy this tree node, and then
962 tweak some special cases. */
963 copy_tree_r (tp, walk_subtrees, NULL);
965 if (TREE_CODE (*tp) != OMP_CLAUSE)
966 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
968 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
970 /* The copied TARGET_EXPR has never been expanded, even if the
971 original node was expanded already. */
972 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
973 TREE_OPERAND (*tp, 3) = NULL_TREE;
975 else if (TREE_CODE (*tp) == ADDR_EXPR)
977 /* Variable substitution need not be simple. In particular,
978 the MEM_REF substitution above. Make sure that
979 TREE_CONSTANT and friends are up-to-date. */
980 int invariant = is_gimple_min_invariant (*tp);
981 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
982 recompute_tree_invariant_for_addr_expr (*tp);
984 /* If this used to be invariant, but is not any longer,
985 then regimplification is probably needed. */
986 if (invariant && !is_gimple_min_invariant (*tp))
987 id->regimplify = true;
989 *walk_subtrees = 0;
993 /* Update the TREE_BLOCK for the cloned expr. */
994 if (EXPR_P (*tp))
996 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
997 tree old_block = TREE_BLOCK (*tp);
998 if (old_block)
1000 tree *n;
1001 n = id->decl_map->get (TREE_BLOCK (*tp));
1002 if (n)
1003 new_block = *n;
1005 TREE_SET_BLOCK (*tp, new_block);
1008 /* Keep iterating. */
1009 return NULL_TREE;
1013 /* Called from copy_body_id via walk_tree. DATA is really a
1014 `copy_body_data *'. */
1016 tree
1017 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1019 copy_body_data *id = (copy_body_data *) data;
1020 tree fn = id->src_fn;
1021 tree new_block;
1023 /* Begin by recognizing trees that we'll completely rewrite for the
1024 inlining context. Our output for these trees is completely
1025 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1026 into an edge). Further down, we'll handle trees that get
1027 duplicated and/or tweaked. */
1029 /* When requested, RETURN_EXPRs should be transformed to just the
1030 contained MODIFY_EXPR. The branch semantics of the return will
1031 be handled elsewhere by manipulating the CFG rather than a statement. */
1032 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1034 tree assignment = TREE_OPERAND (*tp, 0);
1036 /* If we're returning something, just turn that into an
1037 assignment into the equivalent of the original RESULT_DECL.
1038 If the "assignment" is just the result decl, the result
1039 decl has already been set (e.g. a recent "foo (&result_decl,
1040 ...)"); just toss the entire RETURN_EXPR. */
1041 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1043 /* Replace the RETURN_EXPR with (a copy of) the
1044 MODIFY_EXPR hanging underneath. */
1045 *tp = copy_node (assignment);
1047 else /* Else the RETURN_EXPR returns no value. */
1049 *tp = NULL;
1050 return (tree) (void *)1;
1053 else if (TREE_CODE (*tp) == SSA_NAME)
1055 *tp = remap_ssa_name (*tp, id);
1056 *walk_subtrees = 0;
1057 return NULL;
1060 /* Local variables and labels need to be replaced by equivalent
1061 variables. We don't want to copy static variables; there's only
1062 one of those, no matter how many times we inline the containing
1063 function. Similarly for globals from an outer function. */
1064 else if (auto_var_in_fn_p (*tp, fn))
1066 tree new_decl;
1068 /* Remap the declaration. */
1069 new_decl = remap_decl (*tp, id);
1070 gcc_assert (new_decl);
1071 /* Replace this variable with the copy. */
1072 STRIP_TYPE_NOPS (new_decl);
1073 *tp = new_decl;
1074 *walk_subtrees = 0;
1076 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1077 copy_statement_list (tp);
1078 else if (TREE_CODE (*tp) == SAVE_EXPR
1079 || TREE_CODE (*tp) == TARGET_EXPR)
1080 remap_save_expr (tp, id->decl_map, walk_subtrees);
1081 else if (TREE_CODE (*tp) == LABEL_DECL
1082 && (! DECL_CONTEXT (*tp)
1083 || decl_function_context (*tp) == id->src_fn))
1084 /* These may need to be remapped for EH handling. */
1085 *tp = remap_decl (*tp, id);
1086 else if (TREE_CODE (*tp) == BIND_EXPR)
1087 copy_bind_expr (tp, walk_subtrees, id);
1088 /* Types may need remapping as well. */
1089 else if (TYPE_P (*tp))
1090 *tp = remap_type (*tp, id);
1092 /* If this is a constant, we have to copy the node iff the type will be
1093 remapped. copy_tree_r will not copy a constant. */
1094 else if (CONSTANT_CLASS_P (*tp))
1096 tree new_type = remap_type (TREE_TYPE (*tp), id);
1098 if (new_type == TREE_TYPE (*tp))
1099 *walk_subtrees = 0;
1101 else if (TREE_CODE (*tp) == INTEGER_CST)
1102 *tp = wide_int_to_tree (new_type, *tp);
1103 else
1105 *tp = copy_node (*tp);
1106 TREE_TYPE (*tp) = new_type;
1110 /* Otherwise, just copy the node. Note that copy_tree_r already
1111 knows not to copy VAR_DECLs, etc., so this is safe. */
1112 else
1114 /* Here we handle trees that are not completely rewritten.
1115 First we detect some inlining-induced bogosities for
1116 discarding. */
1117 if (TREE_CODE (*tp) == MODIFY_EXPR
1118 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1119 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1121 /* Some assignments VAR = VAR; don't generate any rtl code
1122 and thus don't count as variable modification. Avoid
1123 keeping bogosities like 0 = 0. */
1124 tree decl = TREE_OPERAND (*tp, 0), value;
1125 tree *n;
1127 n = id->decl_map->get (decl);
1128 if (n)
1130 value = *n;
1131 STRIP_TYPE_NOPS (value);
1132 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1134 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1135 return copy_tree_body_r (tp, walk_subtrees, data);
1139 else if (TREE_CODE (*tp) == INDIRECT_REF)
1141 /* Get rid of *& from inline substitutions that can happen when a
1142 pointer argument is an ADDR_EXPR. */
1143 tree decl = TREE_OPERAND (*tp, 0);
1144 tree *n = id->decl_map->get (decl);
1145 if (n)
1147 /* If we happen to get an ADDR_EXPR in n->value, strip
1148 it manually here as we'll eventually get ADDR_EXPRs
1149 which lie about their types pointed to. In this case
1150 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1151 but we absolutely rely on that. As fold_indirect_ref
1152 does other useful transformations, try that first, though. */
1153 tree type = TREE_TYPE (*tp);
1154 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1155 tree old = *tp;
1156 *tp = gimple_fold_indirect_ref (ptr);
1157 if (! *tp)
1159 if (TREE_CODE (ptr) == ADDR_EXPR)
1162 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1163 /* ??? We should either assert here or build
1164 a VIEW_CONVERT_EXPR instead of blindly leaking
1165 incompatible types to our IL. */
1166 if (! *tp)
1167 *tp = TREE_OPERAND (ptr, 0);
1169 else
1171 *tp = build1 (INDIRECT_REF, type, ptr);
1172 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1173 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1174 TREE_READONLY (*tp) = TREE_READONLY (old);
1175 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1176 have remapped a parameter as the property might be
1177 valid only for the parameter itself. */
1178 if (TREE_THIS_NOTRAP (old)
1179 && (!is_parm (TREE_OPERAND (old, 0))
1180 || (!id->transform_parameter && is_parm (ptr))))
1181 TREE_THIS_NOTRAP (*tp) = 1;
1184 *walk_subtrees = 0;
1185 return NULL;
1188 else if (TREE_CODE (*tp) == MEM_REF)
1190 /* We need to re-canonicalize MEM_REFs from inline substitutions
1191 that can happen when a pointer argument is an ADDR_EXPR.
1192 Recurse here manually to allow that. */
1193 tree ptr = TREE_OPERAND (*tp, 0);
1194 tree type = remap_type (TREE_TYPE (*tp), id);
1195 tree old = *tp;
1196 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1197 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1198 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1199 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1200 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1201 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1202 remapped a parameter as the property might be valid only
1203 for the parameter itself. */
1204 if (TREE_THIS_NOTRAP (old)
1205 && (!is_parm (TREE_OPERAND (old, 0))
1206 || (!id->transform_parameter && is_parm (ptr))))
1207 TREE_THIS_NOTRAP (*tp) = 1;
1208 *walk_subtrees = 0;
1209 return NULL;
1212 /* Here is the "usual case". Copy this tree node, and then
1213 tweak some special cases. */
1214 copy_tree_r (tp, walk_subtrees, NULL);
1216 /* If EXPR has block defined, map it to newly constructed block.
1217 When inlining we want EXPRs without block appear in the block
1218 of function call if we are not remapping a type. */
1219 if (EXPR_P (*tp))
1221 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1222 if (TREE_BLOCK (*tp))
1224 tree *n;
1225 n = id->decl_map->get (TREE_BLOCK (*tp));
1226 if (n)
1227 new_block = *n;
1229 TREE_SET_BLOCK (*tp, new_block);
1232 if (TREE_CODE (*tp) != OMP_CLAUSE)
1233 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1235 /* The copied TARGET_EXPR has never been expanded, even if the
1236 original node was expanded already. */
1237 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1239 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1240 TREE_OPERAND (*tp, 3) = NULL_TREE;
1243 /* Variable substitution need not be simple. In particular, the
1244 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1245 and friends are up-to-date. */
1246 else if (TREE_CODE (*tp) == ADDR_EXPR)
1248 int invariant = is_gimple_min_invariant (*tp);
1249 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1251 /* Handle the case where we substituted an INDIRECT_REF
1252 into the operand of the ADDR_EXPR. */
1253 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1254 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1255 else
1256 recompute_tree_invariant_for_addr_expr (*tp);
1258 /* If this used to be invariant, but is not any longer,
1259 then regimplification is probably needed. */
1260 if (invariant && !is_gimple_min_invariant (*tp))
1261 id->regimplify = true;
1263 *walk_subtrees = 0;
1267 /* Keep iterating. */
1268 return NULL_TREE;
1271 /* Helper for remap_gimple_stmt. Given an EH region number for the
1272 source function, map that to the duplicate EH region number in
1273 the destination function. */
1275 static int
1276 remap_eh_region_nr (int old_nr, copy_body_data *id)
1278 eh_region old_r, new_r;
1280 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1281 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1283 return new_r->index;
1286 /* Similar, but operate on INTEGER_CSTs. */
1288 static tree
1289 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1291 int old_nr, new_nr;
1293 old_nr = tree_to_shwi (old_t_nr);
1294 new_nr = remap_eh_region_nr (old_nr, id);
1296 return build_int_cst (integer_type_node, new_nr);
1299 /* Helper for copy_bb. Remap statement STMT using the inlining
1300 information in ID. Return the new statement copy. */
1302 static gimple_seq
1303 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1305 gimple copy = NULL;
1306 struct walk_stmt_info wi;
1307 bool skip_first = false;
1308 gimple_seq stmts = NULL;
1310 /* Begin by recognizing trees that we'll completely rewrite for the
1311 inlining context. Our output for these trees is completely
1312 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1313 into an edge). Further down, we'll handle trees that get
1314 duplicated and/or tweaked. */
1316 /* When requested, GIMPLE_RETURNs should be transformed to just the
1317 contained GIMPLE_ASSIGN. The branch semantics of the return will
1318 be handled elsewhere by manipulating the CFG rather than the
1319 statement. */
1320 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1322 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1323 tree retbnd = gimple_return_retbnd (stmt);
1324 tree bndslot = id->retbnd;
1326 if (retbnd && bndslot)
1328 gimple bndcopy = gimple_build_assign (bndslot, retbnd);
1329 memset (&wi, 0, sizeof (wi));
1330 wi.info = id;
1331 walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1332 gimple_seq_add_stmt (&stmts, bndcopy);
1335 /* If we're returning something, just turn that into an
1336 assignment into the equivalent of the original RESULT_DECL.
1337 If RETVAL is just the result decl, the result decl has
1338 already been set (e.g. a recent "foo (&result_decl, ...)");
1339 just toss the entire GIMPLE_RETURN. */
1340 if (retval
1341 && (TREE_CODE (retval) != RESULT_DECL
1342 && (TREE_CODE (retval) != SSA_NAME
1343 || ! SSA_NAME_VAR (retval)
1344 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1346 copy = gimple_build_assign (id->do_not_unshare
1347 ? id->retvar : unshare_expr (id->retvar),
1348 retval);
1349 /* id->retvar is already substituted. Skip it on later remapping. */
1350 skip_first = true;
1352 /* We need to copy bounds if return structure with pointers into
1353 instrumented function. */
1354 if (chkp_function_instrumented_p (id->dst_fn)
1355 && !bndslot
1356 && !BOUNDED_P (id->retvar)
1357 && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
1358 id->assign_stmts.safe_push (copy);
1361 else
1362 return stmts;
1364 else if (gimple_has_substatements (stmt))
1366 gimple_seq s1, s2;
1368 /* When cloning bodies from the C++ front end, we will be handed bodies
1369 in High GIMPLE form. Handle here all the High GIMPLE statements that
1370 have embedded statements. */
1371 switch (gimple_code (stmt))
1373 case GIMPLE_BIND:
1374 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1375 break;
1377 case GIMPLE_CATCH:
1379 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1380 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1381 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1383 break;
1385 case GIMPLE_EH_FILTER:
1386 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1387 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1388 break;
1390 case GIMPLE_TRY:
1391 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1392 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1393 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1394 break;
1396 case GIMPLE_WITH_CLEANUP_EXPR:
1397 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1398 copy = gimple_build_wce (s1);
1399 break;
1401 case GIMPLE_OMP_PARALLEL:
1403 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1404 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1405 copy = gimple_build_omp_parallel
1406 (s1,
1407 gimple_omp_parallel_clauses (omp_par_stmt),
1408 gimple_omp_parallel_child_fn (omp_par_stmt),
1409 gimple_omp_parallel_data_arg (omp_par_stmt));
1411 break;
1413 case GIMPLE_OMP_TASK:
1414 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1415 copy = gimple_build_omp_task
1416 (s1,
1417 gimple_omp_task_clauses (stmt),
1418 gimple_omp_task_child_fn (stmt),
1419 gimple_omp_task_data_arg (stmt),
1420 gimple_omp_task_copy_fn (stmt),
1421 gimple_omp_task_arg_size (stmt),
1422 gimple_omp_task_arg_align (stmt));
1423 break;
1425 case GIMPLE_OMP_FOR:
1426 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1427 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1428 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1429 gimple_omp_for_clauses (stmt),
1430 gimple_omp_for_collapse (stmt), s2);
1432 size_t i;
1433 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1435 gimple_omp_for_set_index (copy, i,
1436 gimple_omp_for_index (stmt, i));
1437 gimple_omp_for_set_initial (copy, i,
1438 gimple_omp_for_initial (stmt, i));
1439 gimple_omp_for_set_final (copy, i,
1440 gimple_omp_for_final (stmt, i));
1441 gimple_omp_for_set_incr (copy, i,
1442 gimple_omp_for_incr (stmt, i));
1443 gimple_omp_for_set_cond (copy, i,
1444 gimple_omp_for_cond (stmt, i));
1447 break;
1449 case GIMPLE_OMP_MASTER:
1450 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1451 copy = gimple_build_omp_master (s1);
1452 break;
1454 case GIMPLE_OMP_TASKGROUP:
1455 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1456 copy = gimple_build_omp_taskgroup (s1);
1457 break;
1459 case GIMPLE_OMP_ORDERED:
1460 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1461 copy = gimple_build_omp_ordered (s1);
1462 break;
1464 case GIMPLE_OMP_SECTION:
1465 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1466 copy = gimple_build_omp_section (s1);
1467 break;
1469 case GIMPLE_OMP_SECTIONS:
1470 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1471 copy = gimple_build_omp_sections
1472 (s1, gimple_omp_sections_clauses (stmt));
1473 break;
1475 case GIMPLE_OMP_SINGLE:
1476 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1477 copy = gimple_build_omp_single
1478 (s1, gimple_omp_single_clauses (stmt));
1479 break;
1481 case GIMPLE_OMP_TARGET:
1482 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1483 copy = gimple_build_omp_target
1484 (s1, gimple_omp_target_kind (stmt),
1485 gimple_omp_target_clauses (stmt));
1486 break;
1488 case GIMPLE_OMP_TEAMS:
1489 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1490 copy = gimple_build_omp_teams
1491 (s1, gimple_omp_teams_clauses (stmt));
1492 break;
1494 case GIMPLE_OMP_CRITICAL:
1495 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1496 copy = gimple_build_omp_critical (s1,
1497 gimple_omp_critical_name (
1498 as_a <gomp_critical *> (stmt)));
1499 break;
1501 case GIMPLE_TRANSACTION:
1503 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1504 gtransaction *new_trans_stmt;
1505 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1506 id);
1507 copy = new_trans_stmt
1508 = gimple_build_transaction (
1510 gimple_transaction_label (old_trans_stmt));
1511 gimple_transaction_set_subcode (
1512 new_trans_stmt,
1513 gimple_transaction_subcode (old_trans_stmt));
1515 break;
1517 default:
1518 gcc_unreachable ();
1521 else
1523 if (gimple_assign_copy_p (stmt)
1524 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1525 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1527 /* Here we handle statements that are not completely rewritten.
1528 First we detect some inlining-induced bogosities for
1529 discarding. */
1531 /* Some assignments VAR = VAR; don't generate any rtl code
1532 and thus don't count as variable modification. Avoid
1533 keeping bogosities like 0 = 0. */
1534 tree decl = gimple_assign_lhs (stmt), value;
1535 tree *n;
1537 n = id->decl_map->get (decl);
1538 if (n)
1540 value = *n;
1541 STRIP_TYPE_NOPS (value);
1542 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1543 return NULL;
1547 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1548 in a block that we aren't copying during tree_function_versioning,
1549 just drop the clobber stmt. */
1550 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1552 tree lhs = gimple_assign_lhs (stmt);
1553 if (TREE_CODE (lhs) == MEM_REF
1554 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1556 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1557 if (gimple_bb (def_stmt)
1558 && !bitmap_bit_p (id->blocks_to_copy,
1559 gimple_bb (def_stmt)->index))
1560 return NULL;
1564 if (gimple_debug_bind_p (stmt))
1566 gdebug *copy
1567 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1568 gimple_debug_bind_get_value (stmt),
1569 stmt);
1570 id->debug_stmts.safe_push (copy);
1571 gimple_seq_add_stmt (&stmts, copy);
1572 return stmts;
1574 if (gimple_debug_source_bind_p (stmt))
1576 gdebug *copy = gimple_build_debug_source_bind
1577 (gimple_debug_source_bind_get_var (stmt),
1578 gimple_debug_source_bind_get_value (stmt),
1579 stmt);
1580 id->debug_stmts.safe_push (copy);
1581 gimple_seq_add_stmt (&stmts, copy);
1582 return stmts;
1585 /* Create a new deep copy of the statement. */
1586 copy = gimple_copy (stmt);
1588 /* Clear flags that need revisiting. */
1589 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1590 if (gimple_call_tail_p (call_stmt))
1591 gimple_call_set_tail (call_stmt, false);
1593 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1594 RESX and EH_DISPATCH. */
1595 if (id->eh_map)
1596 switch (gimple_code (copy))
1598 case GIMPLE_CALL:
1600 tree r, fndecl = gimple_call_fndecl (copy);
1601 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1602 switch (DECL_FUNCTION_CODE (fndecl))
1604 case BUILT_IN_EH_COPY_VALUES:
1605 r = gimple_call_arg (copy, 1);
1606 r = remap_eh_region_tree_nr (r, id);
1607 gimple_call_set_arg (copy, 1, r);
1608 /* FALLTHRU */
1610 case BUILT_IN_EH_POINTER:
1611 case BUILT_IN_EH_FILTER:
1612 r = gimple_call_arg (copy, 0);
1613 r = remap_eh_region_tree_nr (r, id);
1614 gimple_call_set_arg (copy, 0, r);
1615 break;
1617 default:
1618 break;
1621 /* Reset alias info if we didn't apply measures to
1622 keep it valid over inlining by setting DECL_PT_UID. */
1623 if (!id->src_cfun->gimple_df
1624 || !id->src_cfun->gimple_df->ipa_pta)
1625 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1627 break;
1629 case GIMPLE_RESX:
1631 gresx *resx_stmt = as_a <gresx *> (copy);
1632 int r = gimple_resx_region (resx_stmt);
1633 r = remap_eh_region_nr (r, id);
1634 gimple_resx_set_region (resx_stmt, r);
1636 break;
1638 case GIMPLE_EH_DISPATCH:
1640 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1641 int r = gimple_eh_dispatch_region (eh_dispatch);
1642 r = remap_eh_region_nr (r, id);
1643 gimple_eh_dispatch_set_region (eh_dispatch, r);
1645 break;
1647 default:
1648 break;
1652 /* If STMT has a block defined, map it to the newly constructed
1653 block. */
1654 if (gimple_block (copy))
1656 tree *n;
1657 n = id->decl_map->get (gimple_block (copy));
1658 gcc_assert (n);
1659 gimple_set_block (copy, *n);
1662 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1664 gimple_seq_add_stmt (&stmts, copy);
1665 return stmts;
1668 /* Remap all the operands in COPY. */
1669 memset (&wi, 0, sizeof (wi));
1670 wi.info = id;
1671 if (skip_first)
1672 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1673 else
1674 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1676 /* Clear the copied virtual operands. We are not remapping them here
1677 but are going to recreate them from scratch. */
1678 if (gimple_has_mem_ops (copy))
1680 gimple_set_vdef (copy, NULL_TREE);
1681 gimple_set_vuse (copy, NULL_TREE);
1684 gimple_seq_add_stmt (&stmts, copy);
1685 return stmts;
1689 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1690 later */
1692 static basic_block
1693 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1694 gcov_type count_scale)
1696 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1697 basic_block copy_basic_block;
1698 tree decl;
1699 gcov_type freq;
1700 basic_block prev;
1702 /* Search for previous copied basic block. */
1703 prev = bb->prev_bb;
1704 while (!prev->aux)
1705 prev = prev->prev_bb;
1707 /* create_basic_block() will append every new block to
1708 basic_block_info automatically. */
1709 copy_basic_block = create_basic_block (NULL, (void *) 0,
1710 (basic_block) prev->aux);
1711 copy_basic_block->count = apply_scale (bb->count, count_scale);
1713 /* We are going to rebuild frequencies from scratch. These values
1714 have just small importance to drive canonicalize_loop_headers. */
1715 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1717 /* We recompute frequencies after inlining, so this is quite safe. */
1718 if (freq > BB_FREQ_MAX)
1719 freq = BB_FREQ_MAX;
1720 copy_basic_block->frequency = freq;
1722 copy_gsi = gsi_start_bb (copy_basic_block);
1724 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1726 gimple_seq stmts;
1727 gimple stmt = gsi_stmt (gsi);
1728 gimple orig_stmt = stmt;
1729 gimple_stmt_iterator stmts_gsi;
1730 bool stmt_added = false;
1732 id->regimplify = false;
1733 stmts = remap_gimple_stmt (stmt, id);
1735 if (gimple_seq_empty_p (stmts))
1736 continue;
1738 seq_gsi = copy_gsi;
1740 for (stmts_gsi = gsi_start (stmts);
1741 !gsi_end_p (stmts_gsi); )
1743 stmt = gsi_stmt (stmts_gsi);
1745 /* Advance iterator now before stmt is moved to seq_gsi. */
1746 gsi_next (&stmts_gsi);
1748 if (gimple_nop_p (stmt))
1749 continue;
1751 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1752 orig_stmt);
1754 /* With return slot optimization we can end up with
1755 non-gimple (foo *)&this->m, fix that here. */
1756 if (is_gimple_assign (stmt)
1757 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1758 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1760 tree new_rhs;
1761 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1762 gimple_assign_rhs1 (stmt),
1763 true, NULL, false,
1764 GSI_CONTINUE_LINKING);
1765 gimple_assign_set_rhs1 (stmt, new_rhs);
1766 id->regimplify = false;
1769 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1771 if (id->regimplify)
1772 gimple_regimplify_operands (stmt, &seq_gsi);
1774 stmt_added = true;
1777 if (!stmt_added)
1778 continue;
1780 /* If copy_basic_block has been empty at the start of this iteration,
1781 call gsi_start_bb again to get at the newly added statements. */
1782 if (gsi_end_p (copy_gsi))
1783 copy_gsi = gsi_start_bb (copy_basic_block);
1784 else
1785 gsi_next (&copy_gsi);
1787 /* Process the new statement. The call to gimple_regimplify_operands
1788 possibly turned the statement into multiple statements, we
1789 need to process all of them. */
1792 tree fn;
1793 gcall *call_stmt;
1795 stmt = gsi_stmt (copy_gsi);
1796 call_stmt = dyn_cast <gcall *> (stmt);
1797 if (call_stmt
1798 && gimple_call_va_arg_pack_p (call_stmt)
1799 && id->call_stmt)
1801 /* __builtin_va_arg_pack () should be replaced by
1802 all arguments corresponding to ... in the caller. */
1803 tree p;
1804 gcall *new_call;
1805 vec<tree> argarray;
1806 size_t nargs = gimple_call_num_args (id->call_stmt);
1807 size_t n, i, nargs_to_copy;
1808 bool remove_bounds = false;
1810 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1811 nargs--;
1813 /* Bounds should be removed from arg pack in case
1814 we handle not instrumented call in instrumented
1815 function. */
1816 nargs_to_copy = nargs;
1817 if (gimple_call_with_bounds_p (id->call_stmt)
1818 && !gimple_call_with_bounds_p (stmt))
1820 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1821 i < gimple_call_num_args (id->call_stmt);
1822 i++)
1823 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1824 nargs_to_copy--;
1825 remove_bounds = true;
1828 /* Create the new array of arguments. */
1829 n = nargs_to_copy + gimple_call_num_args (call_stmt);
1830 argarray.create (n);
1831 argarray.safe_grow_cleared (n);
1833 /* Copy all the arguments before '...' */
1834 memcpy (argarray.address (),
1835 gimple_call_arg_ptr (call_stmt, 0),
1836 gimple_call_num_args (call_stmt) * sizeof (tree));
1838 if (remove_bounds)
1840 /* Append the rest of arguments removing bounds. */
1841 unsigned cur = gimple_call_num_args (call_stmt);
1842 i = gimple_call_num_args (id->call_stmt) - nargs;
1843 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1844 i < gimple_call_num_args (id->call_stmt);
1845 i++)
1846 if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1847 argarray[cur++] = gimple_call_arg (id->call_stmt, i);
1848 gcc_assert (cur == n);
1850 else
1852 /* Append the arguments passed in '...' */
1853 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1854 gimple_call_arg_ptr (id->call_stmt, 0)
1855 + (gimple_call_num_args (id->call_stmt) - nargs),
1856 nargs * sizeof (tree));
1859 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1860 argarray);
1862 argarray.release ();
1864 /* Copy all GIMPLE_CALL flags, location and block, except
1865 GF_CALL_VA_ARG_PACK. */
1866 gimple_call_copy_flags (new_call, call_stmt);
1867 gimple_call_set_va_arg_pack (new_call, false);
1868 gimple_set_location (new_call, gimple_location (stmt));
1869 gimple_set_block (new_call, gimple_block (stmt));
1870 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1872 gsi_replace (&copy_gsi, new_call, false);
1873 stmt = new_call;
1875 else if (is_gimple_call (stmt)
1876 && id->call_stmt
1877 && (decl = gimple_call_fndecl (stmt))
1878 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1879 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1881 /* __builtin_va_arg_pack_len () should be replaced by
1882 the number of anonymous arguments. */
1883 size_t nargs = gimple_call_num_args (id->call_stmt), i;
1884 tree count, p;
1885 gimple new_stmt;
1887 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1888 nargs--;
1890 /* For instrumented calls we should ignore bounds. */
1891 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1892 i < gimple_call_num_args (id->call_stmt);
1893 i++)
1894 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1895 nargs--;
1897 count = build_int_cst (integer_type_node, nargs);
1898 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1899 gsi_replace (&copy_gsi, new_stmt, false);
1900 stmt = new_stmt;
1903 /* Statements produced by inlining can be unfolded, especially
1904 when we constant propagated some operands. We can't fold
1905 them right now for two reasons:
1906 1) folding require SSA_NAME_DEF_STMTs to be correct
1907 2) we can't change function calls to builtins.
1908 So we just mark statement for later folding. We mark
1909 all new statements, instead just statements that has changed
1910 by some nontrivial substitution so even statements made
1911 foldable indirectly are updated. If this turns out to be
1912 expensive, copy_body can be told to watch for nontrivial
1913 changes. */
1914 if (id->statements_to_fold)
1915 id->statements_to_fold->add (stmt);
1917 /* We're duplicating a CALL_EXPR. Find any corresponding
1918 callgraph edges and update or duplicate them. */
1919 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
1921 struct cgraph_edge *edge;
1923 switch (id->transform_call_graph_edges)
1925 case CB_CGE_DUPLICATE:
1926 edge = id->src_node->get_edge (orig_stmt);
1927 if (edge)
1929 int edge_freq = edge->frequency;
1930 int new_freq;
1931 struct cgraph_edge *old_edge = edge;
1932 edge = edge->clone (id->dst_node, call_stmt,
1933 gimple_uid (stmt),
1934 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1935 true);
1936 /* We could also just rescale the frequency, but
1937 doing so would introduce roundoff errors and make
1938 verifier unhappy. */
1939 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
1940 copy_basic_block);
1942 /* Speculative calls consist of two edges - direct and indirect.
1943 Duplicate the whole thing and distribute frequencies accordingly. */
1944 if (edge->speculative)
1946 struct cgraph_edge *direct, *indirect;
1947 struct ipa_ref *ref;
1949 gcc_assert (!edge->indirect_unknown_callee);
1950 old_edge->speculative_call_info (direct, indirect, ref);
1951 indirect = indirect->clone (id->dst_node, call_stmt,
1952 gimple_uid (stmt),
1953 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1954 true);
1955 if (old_edge->frequency + indirect->frequency)
1957 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
1958 (old_edge->frequency + indirect->frequency)),
1959 CGRAPH_FREQ_MAX);
1960 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
1961 (old_edge->frequency + indirect->frequency)),
1962 CGRAPH_FREQ_MAX);
1964 id->dst_node->clone_reference (ref, stmt);
1966 else
1968 edge->frequency = new_freq;
1969 if (dump_file
1970 && profile_status_for_fn (cfun) != PROFILE_ABSENT
1971 && (edge_freq > edge->frequency + 10
1972 || edge_freq < edge->frequency - 10))
1974 fprintf (dump_file, "Edge frequency estimated by "
1975 "cgraph %i diverge from inliner's estimate %i\n",
1976 edge_freq,
1977 edge->frequency);
1978 fprintf (dump_file,
1979 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1980 bb->index,
1981 bb->frequency,
1982 copy_basic_block->frequency);
1986 break;
1988 case CB_CGE_MOVE_CLONES:
1989 id->dst_node->set_call_stmt_including_clones (orig_stmt,
1990 call_stmt);
1991 edge = id->dst_node->get_edge (stmt);
1992 break;
1994 case CB_CGE_MOVE:
1995 edge = id->dst_node->get_edge (orig_stmt);
1996 if (edge)
1997 edge->set_call_stmt (call_stmt);
1998 break;
2000 default:
2001 gcc_unreachable ();
2004 /* Constant propagation on argument done during inlining
2005 may create new direct call. Produce an edge for it. */
2006 if ((!edge
2007 || (edge->indirect_inlining_edge
2008 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2009 && id->dst_node->definition
2010 && (fn = gimple_call_fndecl (stmt)) != NULL)
2012 struct cgraph_node *dest = cgraph_node::get (fn);
2014 /* We have missing edge in the callgraph. This can happen
2015 when previous inlining turned an indirect call into a
2016 direct call by constant propagating arguments or we are
2017 producing dead clone (for further cloning). In all
2018 other cases we hit a bug (incorrect node sharing is the
2019 most common reason for missing edges). */
2020 gcc_assert (!dest->definition
2021 || dest->address_taken
2022 || !id->src_node->definition
2023 || !id->dst_node->definition);
2024 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2025 id->dst_node->create_edge_including_clones
2026 (dest, orig_stmt, call_stmt, bb->count,
2027 compute_call_stmt_bb_frequency (id->dst_node->decl,
2028 copy_basic_block),
2029 CIF_ORIGINALLY_INDIRECT_CALL);
2030 else
2031 id->dst_node->create_edge (dest, call_stmt,
2032 bb->count,
2033 compute_call_stmt_bb_frequency
2034 (id->dst_node->decl,
2035 copy_basic_block))->inline_failed
2036 = CIF_ORIGINALLY_INDIRECT_CALL;
2037 if (dump_file)
2039 fprintf (dump_file, "Created new direct edge to %s\n",
2040 dest->name ());
2044 notice_special_calls (as_a <gcall *> (stmt));
2047 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2048 id->eh_map, id->eh_lp_nr);
2050 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
2052 ssa_op_iter i;
2053 tree def;
2055 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
2056 if (TREE_CODE (def) == SSA_NAME)
2057 SSA_NAME_DEF_STMT (def) = stmt;
2060 gsi_next (&copy_gsi);
2062 while (!gsi_end_p (copy_gsi));
2064 copy_gsi = gsi_last_bb (copy_basic_block);
2067 return copy_basic_block;
2070 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2071 form is quite easy, since dominator relationship for old basic blocks does
2072 not change.
2074 There is however exception where inlining might change dominator relation
2075 across EH edges from basic block within inlined functions destinating
2076 to landing pads in function we inline into.
2078 The function fills in PHI_RESULTs of such PHI nodes if they refer
2079 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2080 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2081 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2082 set, and this means that there will be no overlapping live ranges
2083 for the underlying symbol.
2085 This might change in future if we allow redirecting of EH edges and
2086 we might want to change way build CFG pre-inlining to include
2087 all the possible edges then. */
2088 static void
2089 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2090 bool can_throw, bool nonlocal_goto)
2092 edge e;
2093 edge_iterator ei;
2095 FOR_EACH_EDGE (e, ei, bb->succs)
2096 if (!e->dest->aux
2097 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2099 gphi *phi;
2100 gphi_iterator si;
2102 if (!nonlocal_goto)
2103 gcc_assert (e->flags & EDGE_EH);
2105 if (!can_throw)
2106 gcc_assert (!(e->flags & EDGE_EH));
2108 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2110 edge re;
2112 phi = si.phi ();
2114 /* For abnormal goto/call edges the receiver can be the
2115 ENTRY_BLOCK. Do not assert this cannot happen. */
2117 gcc_assert ((e->flags & EDGE_EH)
2118 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2120 re = find_edge (ret_bb, e->dest);
2121 gcc_checking_assert (re);
2122 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2123 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2125 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2126 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2132 /* Copy edges from BB into its copy constructed earlier, scale profile
2133 accordingly. Edges will be taken care of later. Assume aux
2134 pointers to point to the copies of each BB. Return true if any
2135 debug stmts are left after a statement that must end the basic block. */
2137 static bool
2138 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
2139 basic_block abnormal_goto_dest)
2141 basic_block new_bb = (basic_block) bb->aux;
2142 edge_iterator ei;
2143 edge old_edge;
2144 gimple_stmt_iterator si;
2145 int flags;
2146 bool need_debug_cleanup = false;
2148 /* Use the indices from the original blocks to create edges for the
2149 new ones. */
2150 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2151 if (!(old_edge->flags & EDGE_EH))
2153 edge new_edge;
2155 flags = old_edge->flags;
2157 /* Return edges do get a FALLTHRU flag when the get inlined. */
2158 if (old_edge->dest->index == EXIT_BLOCK
2159 && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2160 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2161 flags |= EDGE_FALLTHRU;
2162 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2163 new_edge->count = apply_scale (old_edge->count, count_scale);
2164 new_edge->probability = old_edge->probability;
2167 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2168 return false;
2170 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2172 gimple copy_stmt;
2173 bool can_throw, nonlocal_goto;
2175 copy_stmt = gsi_stmt (si);
2176 if (!is_gimple_debug (copy_stmt))
2177 update_stmt (copy_stmt);
2179 /* Do this before the possible split_block. */
2180 gsi_next (&si);
2182 /* If this tree could throw an exception, there are two
2183 cases where we need to add abnormal edge(s): the
2184 tree wasn't in a region and there is a "current
2185 region" in the caller; or the original tree had
2186 EH edges. In both cases split the block after the tree,
2187 and add abnormal edge(s) as needed; we need both
2188 those from the callee and the caller.
2189 We check whether the copy can throw, because the const
2190 propagation can change an INDIRECT_REF which throws
2191 into a COMPONENT_REF which doesn't. If the copy
2192 can throw, the original could also throw. */
2193 can_throw = stmt_can_throw_internal (copy_stmt);
2194 nonlocal_goto
2195 = (stmt_can_make_abnormal_goto (copy_stmt)
2196 && !computed_goto_p (copy_stmt));
2198 if (can_throw || nonlocal_goto)
2200 if (!gsi_end_p (si))
2202 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2203 gsi_next (&si);
2204 if (gsi_end_p (si))
2205 need_debug_cleanup = true;
2207 if (!gsi_end_p (si))
2208 /* Note that bb's predecessor edges aren't necessarily
2209 right at this point; split_block doesn't care. */
2211 edge e = split_block (new_bb, copy_stmt);
2213 new_bb = e->dest;
2214 new_bb->aux = e->src->aux;
2215 si = gsi_start_bb (new_bb);
2219 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2220 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2221 else if (can_throw)
2222 make_eh_edges (copy_stmt);
2224 /* If the call we inline cannot make abnormal goto do not add
2225 additional abnormal edges but only retain those already present
2226 in the original function body. */
2227 if (abnormal_goto_dest == NULL)
2228 nonlocal_goto = false;
2229 if (nonlocal_goto)
2231 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2233 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2234 nonlocal_goto = false;
2235 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2236 in OpenMP regions which aren't allowed to be left abnormally.
2237 So, no need to add abnormal edge in that case. */
2238 else if (is_gimple_call (copy_stmt)
2239 && gimple_call_internal_p (copy_stmt)
2240 && (gimple_call_internal_fn (copy_stmt)
2241 == IFN_ABNORMAL_DISPATCHER)
2242 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2243 nonlocal_goto = false;
2244 else
2245 make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2248 if ((can_throw || nonlocal_goto)
2249 && gimple_in_ssa_p (cfun))
2250 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2251 can_throw, nonlocal_goto);
2253 return need_debug_cleanup;
2256 /* Copy the PHIs. All blocks and edges are copied, some blocks
2257 was possibly split and new outgoing EH edges inserted.
2258 BB points to the block of original function and AUX pointers links
2259 the original and newly copied blocks. */
2261 static void
2262 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2264 basic_block const new_bb = (basic_block) bb->aux;
2265 edge_iterator ei;
2266 gphi *phi;
2267 gphi_iterator si;
2268 edge new_edge;
2269 bool inserted = false;
2271 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2273 tree res, new_res;
2274 gphi *new_phi;
2276 phi = si.phi ();
2277 res = PHI_RESULT (phi);
2278 new_res = res;
2279 if (!virtual_operand_p (res))
2281 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2282 new_phi = create_phi_node (new_res, new_bb);
2283 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2285 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2286 tree arg;
2287 tree new_arg;
2288 edge_iterator ei2;
2289 location_t locus;
2291 /* When doing partial cloning, we allow PHIs on the entry block
2292 as long as all the arguments are the same. Find any input
2293 edge to see argument to copy. */
2294 if (!old_edge)
2295 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2296 if (!old_edge->src->aux)
2297 break;
2299 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2300 new_arg = arg;
2301 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2302 gcc_assert (new_arg);
2303 /* With return slot optimization we can end up with
2304 non-gimple (foo *)&this->m, fix that here. */
2305 if (TREE_CODE (new_arg) != SSA_NAME
2306 && TREE_CODE (new_arg) != FUNCTION_DECL
2307 && !is_gimple_val (new_arg))
2309 gimple_seq stmts = NULL;
2310 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2311 gsi_insert_seq_on_edge (new_edge, stmts);
2312 inserted = true;
2314 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2315 if (LOCATION_BLOCK (locus))
2317 tree *n;
2318 n = id->decl_map->get (LOCATION_BLOCK (locus));
2319 gcc_assert (n);
2320 if (*n)
2321 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2322 else
2323 locus = LOCATION_LOCUS (locus);
2325 else
2326 locus = LOCATION_LOCUS (locus);
2328 add_phi_arg (new_phi, new_arg, new_edge, locus);
2333 /* Commit the delayed edge insertions. */
2334 if (inserted)
2335 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2336 gsi_commit_one_edge_insert (new_edge, NULL);
2340 /* Wrapper for remap_decl so it can be used as a callback. */
2342 static tree
2343 remap_decl_1 (tree decl, void *data)
2345 return remap_decl (decl, (copy_body_data *) data);
2348 /* Build struct function and associated datastructures for the new clone
2349 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2350 the cfun to the function of new_fndecl (and current_function_decl too). */
2352 static void
2353 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2355 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2356 gcov_type count_scale;
2358 if (!DECL_ARGUMENTS (new_fndecl))
2359 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2360 if (!DECL_RESULT (new_fndecl))
2361 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2363 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2364 count_scale
2365 = GCOV_COMPUTE_SCALE (count,
2366 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2367 else
2368 count_scale = REG_BR_PROB_BASE;
2370 /* Register specific tree functions. */
2371 gimple_register_cfg_hooks ();
2373 /* Get clean struct function. */
2374 push_struct_function (new_fndecl);
2376 /* We will rebuild these, so just sanity check that they are empty. */
2377 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2378 gcc_assert (cfun->local_decls == NULL);
2379 gcc_assert (cfun->cfg == NULL);
2380 gcc_assert (cfun->decl == new_fndecl);
2382 /* Copy items we preserve during cloning. */
2383 cfun->static_chain_decl = src_cfun->static_chain_decl;
2384 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2385 cfun->function_end_locus = src_cfun->function_end_locus;
2386 cfun->curr_properties = src_cfun->curr_properties;
2387 cfun->last_verified = src_cfun->last_verified;
2388 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2389 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2390 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2391 cfun->stdarg = src_cfun->stdarg;
2392 cfun->after_inlining = src_cfun->after_inlining;
2393 cfun->can_throw_non_call_exceptions
2394 = src_cfun->can_throw_non_call_exceptions;
2395 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2396 cfun->returns_struct = src_cfun->returns_struct;
2397 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2399 init_empty_tree_cfg ();
2401 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2402 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2403 (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2404 REG_BR_PROB_BASE);
2405 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2406 = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2407 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2408 (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2409 REG_BR_PROB_BASE);
2410 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2411 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2412 if (src_cfun->eh)
2413 init_eh_for_function ();
2415 if (src_cfun->gimple_df)
2417 init_tree_ssa (cfun);
2418 cfun->gimple_df->in_ssa_p = true;
2419 init_ssa_operands (cfun);
2423 /* Helper function for copy_cfg_body. Move debug stmts from the end
2424 of NEW_BB to the beginning of successor basic blocks when needed. If the
2425 successor has multiple predecessors, reset them, otherwise keep
2426 their value. */
2428 static void
2429 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2431 edge e;
2432 edge_iterator ei;
2433 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2435 if (gsi_end_p (si)
2436 || gsi_one_before_end_p (si)
2437 || !(stmt_can_throw_internal (gsi_stmt (si))
2438 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2439 return;
2441 FOR_EACH_EDGE (e, ei, new_bb->succs)
2443 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2444 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2445 while (is_gimple_debug (gsi_stmt (ssi)))
2447 gimple stmt = gsi_stmt (ssi);
2448 gdebug *new_stmt;
2449 tree var;
2450 tree value;
2452 /* For the last edge move the debug stmts instead of copying
2453 them. */
2454 if (ei_one_before_end_p (ei))
2456 si = ssi;
2457 gsi_prev (&ssi);
2458 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2459 gimple_debug_bind_reset_value (stmt);
2460 gsi_remove (&si, false);
2461 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2462 continue;
2465 if (gimple_debug_bind_p (stmt))
2467 var = gimple_debug_bind_get_var (stmt);
2468 if (single_pred_p (e->dest))
2470 value = gimple_debug_bind_get_value (stmt);
2471 value = unshare_expr (value);
2473 else
2474 value = NULL_TREE;
2475 new_stmt = gimple_build_debug_bind (var, value, stmt);
2477 else if (gimple_debug_source_bind_p (stmt))
2479 var = gimple_debug_source_bind_get_var (stmt);
2480 value = gimple_debug_source_bind_get_value (stmt);
2481 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2483 else
2484 gcc_unreachable ();
2485 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2486 id->debug_stmts.safe_push (new_stmt);
2487 gsi_prev (&ssi);
2492 /* Make a copy of the sub-loops of SRC_PARENT and place them
2493 as siblings of DEST_PARENT. */
2495 static void
2496 copy_loops (copy_body_data *id,
2497 struct loop *dest_parent, struct loop *src_parent)
2499 struct loop *src_loop = src_parent->inner;
2500 while (src_loop)
2502 if (!id->blocks_to_copy
2503 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2505 struct loop *dest_loop = alloc_loop ();
2507 /* Assign the new loop its header and latch and associate
2508 those with the new loop. */
2509 dest_loop->header = (basic_block)src_loop->header->aux;
2510 dest_loop->header->loop_father = dest_loop;
2511 if (src_loop->latch != NULL)
2513 dest_loop->latch = (basic_block)src_loop->latch->aux;
2514 dest_loop->latch->loop_father = dest_loop;
2517 /* Copy loop meta-data. */
2518 copy_loop_info (src_loop, dest_loop);
2520 /* Finally place it into the loop array and the loop tree. */
2521 place_new_loop (cfun, dest_loop);
2522 flow_loop_tree_node_add (dest_parent, dest_loop);
2524 dest_loop->safelen = src_loop->safelen;
2525 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2526 if (src_loop->force_vectorize)
2528 dest_loop->force_vectorize = true;
2529 cfun->has_force_vectorize_loops = true;
2531 if (src_loop->simduid)
2533 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2534 cfun->has_simduid_loops = true;
2537 /* Recurse. */
2538 copy_loops (id, dest_loop, src_loop);
2540 src_loop = src_loop->next;
2544 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2546 void
2547 redirect_all_calls (copy_body_data * id, basic_block bb)
2549 gimple_stmt_iterator si;
2550 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2552 if (is_gimple_call (gsi_stmt (si)))
2554 struct cgraph_edge *edge = id->dst_node->get_edge (gsi_stmt (si));
2555 if (edge)
2556 edge->redirect_call_stmt_to_callee ();
2561 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2562 with each bb's frequency. Used when NODE has a 0-weight entry
2563 but we are about to inline it into a non-zero count call bb.
2564 See the comments for handle_missing_profiles() in predict.c for
2565 when this can happen for COMDATs. */
2567 void
2568 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2570 basic_block bb;
2571 edge_iterator ei;
2572 edge e;
2573 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2575 FOR_ALL_BB_FN(bb, fn)
2577 bb->count = apply_scale (count,
2578 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2579 FOR_EACH_EDGE (e, ei, bb->succs)
2580 e->count = apply_probability (e->src->count, e->probability);
2584 /* Make a copy of the body of FN so that it can be inserted inline in
2585 another function. Walks FN via CFG, returns new fndecl. */
2587 static tree
2588 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2589 basic_block entry_block_map, basic_block exit_block_map,
2590 basic_block new_entry)
2592 tree callee_fndecl = id->src_fn;
2593 /* Original cfun for the callee, doesn't change. */
2594 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2595 struct function *cfun_to_copy;
2596 basic_block bb;
2597 tree new_fndecl = NULL;
2598 bool need_debug_cleanup = false;
2599 gcov_type count_scale;
2600 int last;
2601 int incoming_frequency = 0;
2602 gcov_type incoming_count = 0;
2604 /* This can happen for COMDAT routines that end up with 0 counts
2605 despite being called (see the comments for handle_missing_profiles()
2606 in predict.c as to why). Apply counts to the blocks in the callee
2607 before inlining, using the guessed edge frequencies, so that we don't
2608 end up with a 0-count inline body which can confuse downstream
2609 optimizations such as function splitting. */
2610 if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2612 /* Apply the larger of the call bb count and the total incoming
2613 call edge count to the callee. */
2614 gcov_type in_count = 0;
2615 struct cgraph_edge *in_edge;
2616 for (in_edge = id->src_node->callers; in_edge;
2617 in_edge = in_edge->next_caller)
2618 in_count += in_edge->count;
2619 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2622 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2623 count_scale
2624 = GCOV_COMPUTE_SCALE (count,
2625 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2626 else
2627 count_scale = REG_BR_PROB_BASE;
2629 /* Register specific tree functions. */
2630 gimple_register_cfg_hooks ();
2632 /* If we are inlining just region of the function, make sure to connect
2633 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2634 part of loop, we must compute frequency and probability of
2635 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2636 probabilities of edges incoming from nonduplicated region. */
2637 if (new_entry)
2639 edge e;
2640 edge_iterator ei;
2642 FOR_EACH_EDGE (e, ei, new_entry->preds)
2643 if (!e->src->aux)
2645 incoming_frequency += EDGE_FREQUENCY (e);
2646 incoming_count += e->count;
2648 incoming_count = apply_scale (incoming_count, count_scale);
2649 incoming_frequency
2650 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2651 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2652 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2655 /* Must have a CFG here at this point. */
2656 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2657 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2659 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2661 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2662 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2663 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2664 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2666 /* Duplicate any exception-handling regions. */
2667 if (cfun->eh)
2668 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2669 remap_decl_1, id);
2671 /* Use aux pointers to map the original blocks to copy. */
2672 FOR_EACH_BB_FN (bb, cfun_to_copy)
2673 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2675 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2676 bb->aux = new_bb;
2677 new_bb->aux = bb;
2678 new_bb->loop_father = entry_block_map->loop_father;
2681 last = last_basic_block_for_fn (cfun);
2683 /* Now that we've duplicated the blocks, duplicate their edges. */
2684 basic_block abnormal_goto_dest = NULL;
2685 if (id->call_stmt
2686 && stmt_can_make_abnormal_goto (id->call_stmt))
2688 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2690 bb = gimple_bb (id->call_stmt);
2691 gsi_next (&gsi);
2692 if (gsi_end_p (gsi))
2693 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2695 FOR_ALL_BB_FN (bb, cfun_to_copy)
2696 if (!id->blocks_to_copy
2697 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2698 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2699 abnormal_goto_dest);
2701 if (new_entry)
2703 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2704 e->probability = REG_BR_PROB_BASE;
2705 e->count = incoming_count;
2708 /* Duplicate the loop tree, if available and wanted. */
2709 if (loops_for_fn (src_cfun) != NULL
2710 && current_loops != NULL)
2712 copy_loops (id, entry_block_map->loop_father,
2713 get_loop (src_cfun, 0));
2714 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2715 loops_state_set (LOOPS_NEED_FIXUP);
2718 /* If the loop tree in the source function needed fixup, mark the
2719 destination loop tree for fixup, too. */
2720 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2721 loops_state_set (LOOPS_NEED_FIXUP);
2723 if (gimple_in_ssa_p (cfun))
2724 FOR_ALL_BB_FN (bb, cfun_to_copy)
2725 if (!id->blocks_to_copy
2726 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2727 copy_phis_for_bb (bb, id);
2729 FOR_ALL_BB_FN (bb, cfun_to_copy)
2730 if (bb->aux)
2732 if (need_debug_cleanup
2733 && bb->index != ENTRY_BLOCK
2734 && bb->index != EXIT_BLOCK)
2735 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2736 /* Update call edge destinations. This can not be done before loop
2737 info is updated, because we may split basic blocks. */
2738 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2739 redirect_all_calls (id, (basic_block)bb->aux);
2740 ((basic_block)bb->aux)->aux = NULL;
2741 bb->aux = NULL;
2744 /* Zero out AUX fields of newly created block during EH edge
2745 insertion. */
2746 for (; last < last_basic_block_for_fn (cfun); last++)
2748 if (need_debug_cleanup)
2749 maybe_move_debug_stmts_to_successors (id,
2750 BASIC_BLOCK_FOR_FN (cfun, last));
2751 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2752 /* Update call edge destinations. This can not be done before loop
2753 info is updated, because we may split basic blocks. */
2754 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2755 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2757 entry_block_map->aux = NULL;
2758 exit_block_map->aux = NULL;
2760 if (id->eh_map)
2762 delete id->eh_map;
2763 id->eh_map = NULL;
2766 return new_fndecl;
2769 /* Copy the debug STMT using ID. We deal with these statements in a
2770 special way: if any variable in their VALUE expression wasn't
2771 remapped yet, we won't remap it, because that would get decl uids
2772 out of sync, causing codegen differences between -g and -g0. If
2773 this arises, we drop the VALUE expression altogether. */
2775 static void
2776 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2778 tree t, *n;
2779 struct walk_stmt_info wi;
2781 if (gimple_block (stmt))
2783 n = id->decl_map->get (gimple_block (stmt));
2784 gimple_set_block (stmt, n ? *n : id->block);
2787 /* Remap all the operands in COPY. */
2788 memset (&wi, 0, sizeof (wi));
2789 wi.info = id;
2791 processing_debug_stmt = 1;
2793 if (gimple_debug_source_bind_p (stmt))
2794 t = gimple_debug_source_bind_get_var (stmt);
2795 else
2796 t = gimple_debug_bind_get_var (stmt);
2798 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2799 && (n = id->debug_map->get (t)))
2801 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2802 t = *n;
2804 else if (TREE_CODE (t) == VAR_DECL
2805 && !is_global_var (t)
2806 && !id->decl_map->get (t))
2807 /* T is a non-localized variable. */;
2808 else
2809 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2811 if (gimple_debug_bind_p (stmt))
2813 gimple_debug_bind_set_var (stmt, t);
2815 if (gimple_debug_bind_has_value_p (stmt))
2816 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2817 remap_gimple_op_r, &wi, NULL);
2819 /* Punt if any decl couldn't be remapped. */
2820 if (processing_debug_stmt < 0)
2821 gimple_debug_bind_reset_value (stmt);
2823 else if (gimple_debug_source_bind_p (stmt))
2825 gimple_debug_source_bind_set_var (stmt, t);
2826 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2827 remap_gimple_op_r, &wi, NULL);
2828 /* When inlining and source bind refers to one of the optimized
2829 away parameters, change the source bind into normal debug bind
2830 referring to the corresponding DEBUG_EXPR_DECL that should have
2831 been bound before the call stmt. */
2832 t = gimple_debug_source_bind_get_value (stmt);
2833 if (t != NULL_TREE
2834 && TREE_CODE (t) == PARM_DECL
2835 && id->call_stmt)
2837 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2838 unsigned int i;
2839 if (debug_args != NULL)
2841 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2842 if ((**debug_args)[i] == DECL_ORIGIN (t)
2843 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2845 t = (**debug_args)[i + 1];
2846 stmt->subcode = GIMPLE_DEBUG_BIND;
2847 gimple_debug_bind_set_value (stmt, t);
2848 break;
2854 processing_debug_stmt = 0;
2856 update_stmt (stmt);
2859 /* Process deferred debug stmts. In order to give values better odds
2860 of being successfully remapped, we delay the processing of debug
2861 stmts until all other stmts that might require remapping are
2862 processed. */
2864 static void
2865 copy_debug_stmts (copy_body_data *id)
2867 size_t i;
2868 gdebug *stmt;
2870 if (!id->debug_stmts.exists ())
2871 return;
2873 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2874 copy_debug_stmt (stmt, id);
2876 id->debug_stmts.release ();
2879 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2880 another function. */
2882 static tree
2883 copy_tree_body (copy_body_data *id)
2885 tree fndecl = id->src_fn;
2886 tree body = DECL_SAVED_TREE (fndecl);
2888 walk_tree (&body, copy_tree_body_r, id, NULL);
2890 return body;
2893 /* Make a copy of the body of FN so that it can be inserted inline in
2894 another function. */
2896 static tree
2897 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2898 basic_block entry_block_map, basic_block exit_block_map,
2899 basic_block new_entry)
2901 tree fndecl = id->src_fn;
2902 tree body;
2904 /* If this body has a CFG, walk CFG and copy. */
2905 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2906 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2907 new_entry);
2908 copy_debug_stmts (id);
2910 return body;
2913 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2914 defined in function FN, or of a data member thereof. */
2916 static bool
2917 self_inlining_addr_expr (tree value, tree fn)
2919 tree var;
2921 if (TREE_CODE (value) != ADDR_EXPR)
2922 return false;
2924 var = get_base_address (TREE_OPERAND (value, 0));
2926 return var && auto_var_in_fn_p (var, fn);
2929 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2930 lexical block and line number information from base_stmt, if given,
2931 or from the last stmt of the block otherwise. */
2933 static gimple
2934 insert_init_debug_bind (copy_body_data *id,
2935 basic_block bb, tree var, tree value,
2936 gimple base_stmt)
2938 gimple note;
2939 gimple_stmt_iterator gsi;
2940 tree tracked_var;
2942 if (!gimple_in_ssa_p (id->src_cfun))
2943 return NULL;
2945 if (!MAY_HAVE_DEBUG_STMTS)
2946 return NULL;
2948 tracked_var = target_for_debug_bind (var);
2949 if (!tracked_var)
2950 return NULL;
2952 if (bb)
2954 gsi = gsi_last_bb (bb);
2955 if (!base_stmt && !gsi_end_p (gsi))
2956 base_stmt = gsi_stmt (gsi);
2959 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2961 if (bb)
2963 if (!gsi_end_p (gsi))
2964 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2965 else
2966 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2969 return note;
2972 static void
2973 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2975 /* If VAR represents a zero-sized variable, it's possible that the
2976 assignment statement may result in no gimple statements. */
2977 if (init_stmt)
2979 gimple_stmt_iterator si = gsi_last_bb (bb);
2981 /* We can end up with init statements that store to a non-register
2982 from a rhs with a conversion. Handle that here by forcing the
2983 rhs into a temporary. gimple_regimplify_operands is not
2984 prepared to do this for us. */
2985 if (!is_gimple_debug (init_stmt)
2986 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2987 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2988 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2990 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2991 gimple_expr_type (init_stmt),
2992 gimple_assign_rhs1 (init_stmt));
2993 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2994 GSI_NEW_STMT);
2995 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2996 gimple_assign_set_rhs1 (init_stmt, rhs);
2998 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2999 gimple_regimplify_operands (init_stmt, &si);
3001 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
3003 tree def = gimple_assign_lhs (init_stmt);
3004 insert_init_debug_bind (id, bb, def, def, init_stmt);
3009 /* Initialize parameter P with VALUE. If needed, produce init statement
3010 at the end of BB. When BB is NULL, we return init statement to be
3011 output later. */
3012 static gimple
3013 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3014 basic_block bb, tree *vars)
3016 gimple init_stmt = NULL;
3017 tree var;
3018 tree rhs = value;
3019 tree def = (gimple_in_ssa_p (cfun)
3020 ? ssa_default_def (id->src_cfun, p) : NULL);
3022 if (value
3023 && value != error_mark_node
3024 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3026 /* If we can match up types by promotion/demotion do so. */
3027 if (fold_convertible_p (TREE_TYPE (p), value))
3028 rhs = fold_convert (TREE_TYPE (p), value);
3029 else
3031 /* ??? For valid programs we should not end up here.
3032 Still if we end up with truly mismatched types here, fall back
3033 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3034 GIMPLE to the following passes. */
3035 if (!is_gimple_reg_type (TREE_TYPE (value))
3036 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3037 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3038 else
3039 rhs = build_zero_cst (TREE_TYPE (p));
3043 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3044 here since the type of this decl must be visible to the calling
3045 function. */
3046 var = copy_decl_to_var (p, id);
3048 /* Declare this new variable. */
3049 DECL_CHAIN (var) = *vars;
3050 *vars = var;
3052 /* Make gimplifier happy about this variable. */
3053 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3055 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3056 we would not need to create a new variable here at all, if it
3057 weren't for debug info. Still, we can just use the argument
3058 value. */
3059 if (TREE_READONLY (p)
3060 && !TREE_ADDRESSABLE (p)
3061 && value && !TREE_SIDE_EFFECTS (value)
3062 && !def)
3064 /* We may produce non-gimple trees by adding NOPs or introduce
3065 invalid sharing when operand is not really constant.
3066 It is not big deal to prohibit constant propagation here as
3067 we will constant propagate in DOM1 pass anyway. */
3068 if (is_gimple_min_invariant (value)
3069 && useless_type_conversion_p (TREE_TYPE (p),
3070 TREE_TYPE (value))
3071 /* We have to be very careful about ADDR_EXPR. Make sure
3072 the base variable isn't a local variable of the inlined
3073 function, e.g., when doing recursive inlining, direct or
3074 mutually-recursive or whatever, which is why we don't
3075 just test whether fn == current_function_decl. */
3076 && ! self_inlining_addr_expr (value, fn))
3078 insert_decl_map (id, p, value);
3079 insert_debug_decl_map (id, p, var);
3080 return insert_init_debug_bind (id, bb, var, value, NULL);
3084 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3085 that way, when the PARM_DECL is encountered, it will be
3086 automatically replaced by the VAR_DECL. */
3087 insert_decl_map (id, p, var);
3089 /* Even if P was TREE_READONLY, the new VAR should not be.
3090 In the original code, we would have constructed a
3091 temporary, and then the function body would have never
3092 changed the value of P. However, now, we will be
3093 constructing VAR directly. The constructor body may
3094 change its value multiple times as it is being
3095 constructed. Therefore, it must not be TREE_READONLY;
3096 the back-end assumes that TREE_READONLY variable is
3097 assigned to only once. */
3098 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3099 TREE_READONLY (var) = 0;
3101 /* If there is no setup required and we are in SSA, take the easy route
3102 replacing all SSA names representing the function parameter by the
3103 SSA name passed to function.
3105 We need to construct map for the variable anyway as it might be used
3106 in different SSA names when parameter is set in function.
3108 Do replacement at -O0 for const arguments replaced by constant.
3109 This is important for builtin_constant_p and other construct requiring
3110 constant argument to be visible in inlined function body. */
3111 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3112 && (optimize
3113 || (TREE_READONLY (p)
3114 && is_gimple_min_invariant (rhs)))
3115 && (TREE_CODE (rhs) == SSA_NAME
3116 || is_gimple_min_invariant (rhs))
3117 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3119 insert_decl_map (id, def, rhs);
3120 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3123 /* If the value of argument is never used, don't care about initializing
3124 it. */
3125 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3127 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3128 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3131 /* Initialize this VAR_DECL from the equivalent argument. Convert
3132 the argument to the proper type in case it was promoted. */
3133 if (value)
3135 if (rhs == error_mark_node)
3137 insert_decl_map (id, p, var);
3138 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3141 STRIP_USELESS_TYPE_CONVERSION (rhs);
3143 /* If we are in SSA form properly remap the default definition
3144 or assign to a dummy SSA name if the parameter is unused and
3145 we are not optimizing. */
3146 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3148 if (def)
3150 def = remap_ssa_name (def, id);
3151 init_stmt = gimple_build_assign (def, rhs);
3152 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3153 set_ssa_default_def (cfun, var, NULL);
3155 else if (!optimize)
3157 def = make_ssa_name (var, NULL);
3158 init_stmt = gimple_build_assign (def, rhs);
3161 else
3162 init_stmt = gimple_build_assign (var, rhs);
3164 if (bb && init_stmt)
3165 insert_init_stmt (id, bb, init_stmt);
3167 return init_stmt;
3170 /* Generate code to initialize the parameters of the function at the
3171 top of the stack in ID from the GIMPLE_CALL STMT. */
3173 static void
3174 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
3175 tree fn, basic_block bb)
3177 tree parms;
3178 size_t i;
3179 tree p;
3180 tree vars = NULL_TREE;
3181 tree static_chain = gimple_call_chain (stmt);
3183 /* Figure out what the parameters are. */
3184 parms = DECL_ARGUMENTS (fn);
3186 /* Loop through the parameter declarations, replacing each with an
3187 equivalent VAR_DECL, appropriately initialized. */
3188 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3190 tree val;
3191 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3192 setup_one_parameter (id, p, val, fn, bb, &vars);
3194 /* After remapping parameters remap their types. This has to be done
3195 in a second loop over all parameters to appropriately remap
3196 variable sized arrays when the size is specified in a
3197 parameter following the array. */
3198 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3200 tree *varp = id->decl_map->get (p);
3201 if (varp
3202 && TREE_CODE (*varp) == VAR_DECL)
3204 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3205 ? ssa_default_def (id->src_cfun, p) : NULL);
3206 tree var = *varp;
3207 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3208 /* Also remap the default definition if it was remapped
3209 to the default definition of the parameter replacement
3210 by the parameter setup. */
3211 if (def)
3213 tree *defp = id->decl_map->get (def);
3214 if (defp
3215 && TREE_CODE (*defp) == SSA_NAME
3216 && SSA_NAME_VAR (*defp) == var)
3217 TREE_TYPE (*defp) = TREE_TYPE (var);
3222 /* Initialize the static chain. */
3223 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3224 gcc_assert (fn != current_function_decl);
3225 if (p)
3227 /* No static chain? Seems like a bug in tree-nested.c. */
3228 gcc_assert (static_chain);
3230 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3233 declare_inline_vars (id->block, vars);
3237 /* Declare a return variable to replace the RESULT_DECL for the
3238 function we are calling. An appropriate DECL_STMT is returned.
3239 The USE_STMT is filled to contain a use of the declaration to
3240 indicate the return value of the function.
3242 RETURN_SLOT, if non-null is place where to store the result. It
3243 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3244 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3246 RETURN_BOUNDS holds a destination for returned bounds.
3248 The return value is a (possibly null) value that holds the result
3249 as seen by the caller. */
3251 static tree
3252 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3253 tree return_bounds, basic_block entry_bb)
3255 tree callee = id->src_fn;
3256 tree result = DECL_RESULT (callee);
3257 tree callee_type = TREE_TYPE (result);
3258 tree caller_type;
3259 tree var, use;
3261 /* Handle type-mismatches in the function declaration return type
3262 vs. the call expression. */
3263 if (modify_dest)
3264 caller_type = TREE_TYPE (modify_dest);
3265 else
3266 caller_type = TREE_TYPE (TREE_TYPE (callee));
3268 /* We don't need to do anything for functions that don't return anything. */
3269 if (VOID_TYPE_P (callee_type))
3270 return NULL_TREE;
3272 /* If there was a return slot, then the return value is the
3273 dereferenced address of that object. */
3274 if (return_slot)
3276 /* The front end shouldn't have used both return_slot and
3277 a modify expression. */
3278 gcc_assert (!modify_dest);
3279 if (DECL_BY_REFERENCE (result))
3281 tree return_slot_addr = build_fold_addr_expr (return_slot);
3282 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3284 /* We are going to construct *&return_slot and we can't do that
3285 for variables believed to be not addressable.
3287 FIXME: This check possibly can match, because values returned
3288 via return slot optimization are not believed to have address
3289 taken by alias analysis. */
3290 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3291 var = return_slot_addr;
3293 else
3295 var = return_slot;
3296 gcc_assert (TREE_CODE (var) != SSA_NAME);
3297 if (TREE_ADDRESSABLE (result))
3298 mark_addressable (var);
3300 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3301 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3302 && !DECL_GIMPLE_REG_P (result)
3303 && DECL_P (var))
3304 DECL_GIMPLE_REG_P (var) = 0;
3305 use = NULL;
3306 goto done;
3309 /* All types requiring non-trivial constructors should have been handled. */
3310 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3312 /* Attempt to avoid creating a new temporary variable. */
3313 if (modify_dest
3314 && TREE_CODE (modify_dest) != SSA_NAME)
3316 bool use_it = false;
3318 /* We can't use MODIFY_DEST if there's type promotion involved. */
3319 if (!useless_type_conversion_p (callee_type, caller_type))
3320 use_it = false;
3322 /* ??? If we're assigning to a variable sized type, then we must
3323 reuse the destination variable, because we've no good way to
3324 create variable sized temporaries at this point. */
3325 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3326 use_it = true;
3328 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3329 reuse it as the result of the call directly. Don't do this if
3330 it would promote MODIFY_DEST to addressable. */
3331 else if (TREE_ADDRESSABLE (result))
3332 use_it = false;
3333 else
3335 tree base_m = get_base_address (modify_dest);
3337 /* If the base isn't a decl, then it's a pointer, and we don't
3338 know where that's going to go. */
3339 if (!DECL_P (base_m))
3340 use_it = false;
3341 else if (is_global_var (base_m))
3342 use_it = false;
3343 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3344 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3345 && !DECL_GIMPLE_REG_P (result)
3346 && DECL_GIMPLE_REG_P (base_m))
3347 use_it = false;
3348 else if (!TREE_ADDRESSABLE (base_m))
3349 use_it = true;
3352 if (use_it)
3354 var = modify_dest;
3355 use = NULL;
3356 goto done;
3360 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3362 var = copy_result_decl_to_var (result, id);
3363 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3365 /* Do not have the rest of GCC warn about this variable as it should
3366 not be visible to the user. */
3367 TREE_NO_WARNING (var) = 1;
3369 declare_inline_vars (id->block, var);
3371 /* Build the use expr. If the return type of the function was
3372 promoted, convert it back to the expected type. */
3373 use = var;
3374 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3376 /* If we can match up types by promotion/demotion do so. */
3377 if (fold_convertible_p (caller_type, var))
3378 use = fold_convert (caller_type, var);
3379 else
3381 /* ??? For valid programs we should not end up here.
3382 Still if we end up with truly mismatched types here, fall back
3383 to using a MEM_REF to not leak invalid GIMPLE to the following
3384 passes. */
3385 /* Prevent var from being written into SSA form. */
3386 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3387 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3388 DECL_GIMPLE_REG_P (var) = false;
3389 else if (is_gimple_reg_type (TREE_TYPE (var)))
3390 TREE_ADDRESSABLE (var) = true;
3391 use = fold_build2 (MEM_REF, caller_type,
3392 build_fold_addr_expr (var),
3393 build_int_cst (ptr_type_node, 0));
3397 STRIP_USELESS_TYPE_CONVERSION (use);
3399 if (DECL_BY_REFERENCE (result))
3401 TREE_ADDRESSABLE (var) = 1;
3402 var = build_fold_addr_expr (var);
3405 done:
3406 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3407 way, when the RESULT_DECL is encountered, it will be
3408 automatically replaced by the VAR_DECL.
3410 When returning by reference, ensure that RESULT_DECL remaps to
3411 gimple_val. */
3412 if (DECL_BY_REFERENCE (result)
3413 && !is_gimple_val (var))
3415 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3416 insert_decl_map (id, result, temp);
3417 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3418 it's default_def SSA_NAME. */
3419 if (gimple_in_ssa_p (id->src_cfun)
3420 && is_gimple_reg (result))
3422 temp = make_ssa_name (temp, NULL);
3423 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3425 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3427 else
3428 insert_decl_map (id, result, var);
3430 /* Remember this so we can ignore it in remap_decls. */
3431 id->retvar = var;
3433 /* If returned bounds are used, then make var for them. */
3434 if (return_bounds)
3436 tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
3437 DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
3438 TREE_NO_WARNING (bndtemp) = 1;
3439 declare_inline_vars (id->block, bndtemp);
3441 id->retbnd = bndtemp;
3442 insert_init_stmt (id, entry_bb,
3443 gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
3446 return use;
3449 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3450 to a local label. */
3452 static tree
3453 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3455 tree node = *nodep;
3456 tree fn = (tree) fnp;
3458 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3459 return node;
3461 if (TYPE_P (node))
3462 *walk_subtrees = 0;
3464 return NULL_TREE;
3467 /* Determine if the function can be copied. If so return NULL. If
3468 not return a string describng the reason for failure. */
3470 static const char *
3471 copy_forbidden (struct function *fun, tree fndecl)
3473 const char *reason = fun->cannot_be_copied_reason;
3474 tree decl;
3475 unsigned ix;
3477 /* Only examine the function once. */
3478 if (fun->cannot_be_copied_set)
3479 return reason;
3481 /* We cannot copy a function that receives a non-local goto
3482 because we cannot remap the destination label used in the
3483 function that is performing the non-local goto. */
3484 /* ??? Actually, this should be possible, if we work at it.
3485 No doubt there's just a handful of places that simply
3486 assume it doesn't happen and don't substitute properly. */
3487 if (fun->has_nonlocal_label)
3489 reason = G_("function %q+F can never be copied "
3490 "because it receives a non-local goto");
3491 goto fail;
3494 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3495 if (TREE_CODE (decl) == VAR_DECL
3496 && TREE_STATIC (decl)
3497 && !DECL_EXTERNAL (decl)
3498 && DECL_INITIAL (decl)
3499 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3500 has_label_address_in_static_1,
3501 fndecl))
3503 reason = G_("function %q+F can never be copied because it saves "
3504 "address of local label in a static variable");
3505 goto fail;
3508 fail:
3509 fun->cannot_be_copied_reason = reason;
3510 fun->cannot_be_copied_set = true;
3511 return reason;
3515 static const char *inline_forbidden_reason;
3517 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3518 iff a function can not be inlined. Also sets the reason why. */
3520 static tree
3521 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3522 struct walk_stmt_info *wip)
3524 tree fn = (tree) wip->info;
3525 tree t;
3526 gimple stmt = gsi_stmt (*gsi);
3528 switch (gimple_code (stmt))
3530 case GIMPLE_CALL:
3531 /* Refuse to inline alloca call unless user explicitly forced so as
3532 this may change program's memory overhead drastically when the
3533 function using alloca is called in loop. In GCC present in
3534 SPEC2000 inlining into schedule_block cause it to require 2GB of
3535 RAM instead of 256MB. Don't do so for alloca calls emitted for
3536 VLA objects as those can't cause unbounded growth (they're always
3537 wrapped inside stack_save/stack_restore regions. */
3538 if (gimple_alloca_call_p (stmt)
3539 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3540 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3542 inline_forbidden_reason
3543 = G_("function %q+F can never be inlined because it uses "
3544 "alloca (override using the always_inline attribute)");
3545 *handled_ops_p = true;
3546 return fn;
3549 t = gimple_call_fndecl (stmt);
3550 if (t == NULL_TREE)
3551 break;
3553 /* We cannot inline functions that call setjmp. */
3554 if (setjmp_call_p (t))
3556 inline_forbidden_reason
3557 = G_("function %q+F can never be inlined because it uses setjmp");
3558 *handled_ops_p = true;
3559 return t;
3562 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3563 switch (DECL_FUNCTION_CODE (t))
3565 /* We cannot inline functions that take a variable number of
3566 arguments. */
3567 case BUILT_IN_VA_START:
3568 case BUILT_IN_NEXT_ARG:
3569 case BUILT_IN_VA_END:
3570 inline_forbidden_reason
3571 = G_("function %q+F can never be inlined because it "
3572 "uses variable argument lists");
3573 *handled_ops_p = true;
3574 return t;
3576 case BUILT_IN_LONGJMP:
3577 /* We can't inline functions that call __builtin_longjmp at
3578 all. The non-local goto machinery really requires the
3579 destination be in a different function. If we allow the
3580 function calling __builtin_longjmp to be inlined into the
3581 function calling __builtin_setjmp, Things will Go Awry. */
3582 inline_forbidden_reason
3583 = G_("function %q+F can never be inlined because "
3584 "it uses setjmp-longjmp exception handling");
3585 *handled_ops_p = true;
3586 return t;
3588 case BUILT_IN_NONLOCAL_GOTO:
3589 /* Similarly. */
3590 inline_forbidden_reason
3591 = G_("function %q+F can never be inlined because "
3592 "it uses non-local goto");
3593 *handled_ops_p = true;
3594 return t;
3596 case BUILT_IN_RETURN:
3597 case BUILT_IN_APPLY_ARGS:
3598 /* If a __builtin_apply_args caller would be inlined,
3599 it would be saving arguments of the function it has
3600 been inlined into. Similarly __builtin_return would
3601 return from the function the inline has been inlined into. */
3602 inline_forbidden_reason
3603 = G_("function %q+F can never be inlined because "
3604 "it uses __builtin_return or __builtin_apply_args");
3605 *handled_ops_p = true;
3606 return t;
3608 default:
3609 break;
3611 break;
3613 case GIMPLE_GOTO:
3614 t = gimple_goto_dest (stmt);
3616 /* We will not inline a function which uses computed goto. The
3617 addresses of its local labels, which may be tucked into
3618 global storage, are of course not constant across
3619 instantiations, which causes unexpected behavior. */
3620 if (TREE_CODE (t) != LABEL_DECL)
3622 inline_forbidden_reason
3623 = G_("function %q+F can never be inlined "
3624 "because it contains a computed goto");
3625 *handled_ops_p = true;
3626 return t;
3628 break;
3630 default:
3631 break;
3634 *handled_ops_p = false;
3635 return NULL_TREE;
3638 /* Return true if FNDECL is a function that cannot be inlined into
3639 another one. */
3641 static bool
3642 inline_forbidden_p (tree fndecl)
3644 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3645 struct walk_stmt_info wi;
3646 basic_block bb;
3647 bool forbidden_p = false;
3649 /* First check for shared reasons not to copy the code. */
3650 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3651 if (inline_forbidden_reason != NULL)
3652 return true;
3654 /* Next, walk the statements of the function looking for
3655 constraucts we can't handle, or are non-optimal for inlining. */
3656 hash_set<tree> visited_nodes;
3657 memset (&wi, 0, sizeof (wi));
3658 wi.info = (void *) fndecl;
3659 wi.pset = &visited_nodes;
3661 FOR_EACH_BB_FN (bb, fun)
3663 gimple ret;
3664 gimple_seq seq = bb_seq (bb);
3665 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3666 forbidden_p = (ret != NULL);
3667 if (forbidden_p)
3668 break;
3671 return forbidden_p;
3674 /* Return false if the function FNDECL cannot be inlined on account of its
3675 attributes, true otherwise. */
3676 static bool
3677 function_attribute_inlinable_p (const_tree fndecl)
3679 if (targetm.attribute_table)
3681 const_tree a;
3683 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3685 const_tree name = TREE_PURPOSE (a);
3686 int i;
3688 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3689 if (is_attribute_p (targetm.attribute_table[i].name, name))
3690 return targetm.function_attribute_inlinable_p (fndecl);
3694 return true;
3697 /* Returns nonzero if FN is a function that does not have any
3698 fundamental inline blocking properties. */
3700 bool
3701 tree_inlinable_function_p (tree fn)
3703 bool inlinable = true;
3704 bool do_warning;
3705 tree always_inline;
3707 /* If we've already decided this function shouldn't be inlined,
3708 there's no need to check again. */
3709 if (DECL_UNINLINABLE (fn))
3710 return false;
3712 /* We only warn for functions declared `inline' by the user. */
3713 do_warning = (warn_inline
3714 && DECL_DECLARED_INLINE_P (fn)
3715 && !DECL_NO_INLINE_WARNING_P (fn)
3716 && !DECL_IN_SYSTEM_HEADER (fn));
3718 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3720 if (flag_no_inline
3721 && always_inline == NULL)
3723 if (do_warning)
3724 warning (OPT_Winline, "function %q+F can never be inlined because it "
3725 "is suppressed using -fno-inline", fn);
3726 inlinable = false;
3729 else if (!function_attribute_inlinable_p (fn))
3731 if (do_warning)
3732 warning (OPT_Winline, "function %q+F can never be inlined because it "
3733 "uses attributes conflicting with inlining", fn);
3734 inlinable = false;
3737 else if (inline_forbidden_p (fn))
3739 /* See if we should warn about uninlinable functions. Previously,
3740 some of these warnings would be issued while trying to expand
3741 the function inline, but that would cause multiple warnings
3742 about functions that would for example call alloca. But since
3743 this a property of the function, just one warning is enough.
3744 As a bonus we can now give more details about the reason why a
3745 function is not inlinable. */
3746 if (always_inline)
3747 error (inline_forbidden_reason, fn);
3748 else if (do_warning)
3749 warning (OPT_Winline, inline_forbidden_reason, fn);
3751 inlinable = false;
3754 /* Squirrel away the result so that we don't have to check again. */
3755 DECL_UNINLINABLE (fn) = !inlinable;
3757 return inlinable;
3760 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3761 word size and take possible memcpy call into account and return
3762 cost based on whether optimizing for size or speed according to SPEED_P. */
3765 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3767 HOST_WIDE_INT size;
3769 gcc_assert (!VOID_TYPE_P (type));
3771 if (TREE_CODE (type) == VECTOR_TYPE)
3773 machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3774 machine_mode simd
3775 = targetm.vectorize.preferred_simd_mode (inner);
3776 int simd_mode_size = GET_MODE_SIZE (simd);
3777 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3778 / simd_mode_size);
3781 size = int_size_in_bytes (type);
3783 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3784 /* Cost of a memcpy call, 3 arguments and the call. */
3785 return 4;
3786 else
3787 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3790 /* Returns cost of operation CODE, according to WEIGHTS */
3792 static int
3793 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3794 tree op1 ATTRIBUTE_UNUSED, tree op2)
3796 switch (code)
3798 /* These are "free" conversions, or their presumed cost
3799 is folded into other operations. */
3800 case RANGE_EXPR:
3801 CASE_CONVERT:
3802 case COMPLEX_EXPR:
3803 case PAREN_EXPR:
3804 case VIEW_CONVERT_EXPR:
3805 return 0;
3807 /* Assign cost of 1 to usual operations.
3808 ??? We may consider mapping RTL costs to this. */
3809 case COND_EXPR:
3810 case VEC_COND_EXPR:
3811 case VEC_PERM_EXPR:
3813 case PLUS_EXPR:
3814 case POINTER_PLUS_EXPR:
3815 case MINUS_EXPR:
3816 case MULT_EXPR:
3817 case MULT_HIGHPART_EXPR:
3818 case FMA_EXPR:
3820 case ADDR_SPACE_CONVERT_EXPR:
3821 case FIXED_CONVERT_EXPR:
3822 case FIX_TRUNC_EXPR:
3824 case NEGATE_EXPR:
3825 case FLOAT_EXPR:
3826 case MIN_EXPR:
3827 case MAX_EXPR:
3828 case ABS_EXPR:
3830 case LSHIFT_EXPR:
3831 case RSHIFT_EXPR:
3832 case LROTATE_EXPR:
3833 case RROTATE_EXPR:
3835 case BIT_IOR_EXPR:
3836 case BIT_XOR_EXPR:
3837 case BIT_AND_EXPR:
3838 case BIT_NOT_EXPR:
3840 case TRUTH_ANDIF_EXPR:
3841 case TRUTH_ORIF_EXPR:
3842 case TRUTH_AND_EXPR:
3843 case TRUTH_OR_EXPR:
3844 case TRUTH_XOR_EXPR:
3845 case TRUTH_NOT_EXPR:
3847 case LT_EXPR:
3848 case LE_EXPR:
3849 case GT_EXPR:
3850 case GE_EXPR:
3851 case EQ_EXPR:
3852 case NE_EXPR:
3853 case ORDERED_EXPR:
3854 case UNORDERED_EXPR:
3856 case UNLT_EXPR:
3857 case UNLE_EXPR:
3858 case UNGT_EXPR:
3859 case UNGE_EXPR:
3860 case UNEQ_EXPR:
3861 case LTGT_EXPR:
3863 case CONJ_EXPR:
3865 case PREDECREMENT_EXPR:
3866 case PREINCREMENT_EXPR:
3867 case POSTDECREMENT_EXPR:
3868 case POSTINCREMENT_EXPR:
3870 case REALIGN_LOAD_EXPR:
3872 case REDUC_MAX_EXPR:
3873 case REDUC_MIN_EXPR:
3874 case REDUC_PLUS_EXPR:
3875 case WIDEN_SUM_EXPR:
3876 case WIDEN_MULT_EXPR:
3877 case DOT_PROD_EXPR:
3878 case SAD_EXPR:
3879 case WIDEN_MULT_PLUS_EXPR:
3880 case WIDEN_MULT_MINUS_EXPR:
3881 case WIDEN_LSHIFT_EXPR:
3883 case VEC_WIDEN_MULT_HI_EXPR:
3884 case VEC_WIDEN_MULT_LO_EXPR:
3885 case VEC_WIDEN_MULT_EVEN_EXPR:
3886 case VEC_WIDEN_MULT_ODD_EXPR:
3887 case VEC_UNPACK_HI_EXPR:
3888 case VEC_UNPACK_LO_EXPR:
3889 case VEC_UNPACK_FLOAT_HI_EXPR:
3890 case VEC_UNPACK_FLOAT_LO_EXPR:
3891 case VEC_PACK_TRUNC_EXPR:
3892 case VEC_PACK_SAT_EXPR:
3893 case VEC_PACK_FIX_TRUNC_EXPR:
3894 case VEC_WIDEN_LSHIFT_HI_EXPR:
3895 case VEC_WIDEN_LSHIFT_LO_EXPR:
3897 return 1;
3899 /* Few special cases of expensive operations. This is useful
3900 to avoid inlining on functions having too many of these. */
3901 case TRUNC_DIV_EXPR:
3902 case CEIL_DIV_EXPR:
3903 case FLOOR_DIV_EXPR:
3904 case ROUND_DIV_EXPR:
3905 case EXACT_DIV_EXPR:
3906 case TRUNC_MOD_EXPR:
3907 case CEIL_MOD_EXPR:
3908 case FLOOR_MOD_EXPR:
3909 case ROUND_MOD_EXPR:
3910 case RDIV_EXPR:
3911 if (TREE_CODE (op2) != INTEGER_CST)
3912 return weights->div_mod_cost;
3913 return 1;
3915 default:
3916 /* We expect a copy assignment with no operator. */
3917 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3918 return 0;
3923 /* Estimate number of instructions that will be created by expanding
3924 the statements in the statement sequence STMTS.
3925 WEIGHTS contains weights attributed to various constructs. */
3927 static
3928 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3930 int cost;
3931 gimple_stmt_iterator gsi;
3933 cost = 0;
3934 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3935 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3937 return cost;
3941 /* Estimate number of instructions that will be created by expanding STMT.
3942 WEIGHTS contains weights attributed to various constructs. */
3945 estimate_num_insns (gimple stmt, eni_weights *weights)
3947 unsigned cost, i;
3948 enum gimple_code code = gimple_code (stmt);
3949 tree lhs;
3950 tree rhs;
3952 switch (code)
3954 case GIMPLE_ASSIGN:
3955 /* Try to estimate the cost of assignments. We have three cases to
3956 deal with:
3957 1) Simple assignments to registers;
3958 2) Stores to things that must live in memory. This includes
3959 "normal" stores to scalars, but also assignments of large
3960 structures, or constructors of big arrays;
3962 Let us look at the first two cases, assuming we have "a = b + C":
3963 <GIMPLE_ASSIGN <var_decl "a">
3964 <plus_expr <var_decl "b"> <constant C>>
3965 If "a" is a GIMPLE register, the assignment to it is free on almost
3966 any target, because "a" usually ends up in a real register. Hence
3967 the only cost of this expression comes from the PLUS_EXPR, and we
3968 can ignore the GIMPLE_ASSIGN.
3969 If "a" is not a GIMPLE register, the assignment to "a" will most
3970 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3971 of moving something into "a", which we compute using the function
3972 estimate_move_cost. */
3973 if (gimple_clobber_p (stmt))
3974 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3976 lhs = gimple_assign_lhs (stmt);
3977 rhs = gimple_assign_rhs1 (stmt);
3979 cost = 0;
3981 /* Account for the cost of moving to / from memory. */
3982 if (gimple_store_p (stmt))
3983 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
3984 if (gimple_assign_load_p (stmt))
3985 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
3987 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3988 gimple_assign_rhs1 (stmt),
3989 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3990 == GIMPLE_BINARY_RHS
3991 ? gimple_assign_rhs2 (stmt) : NULL);
3992 break;
3994 case GIMPLE_COND:
3995 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3996 gimple_op (stmt, 0),
3997 gimple_op (stmt, 1));
3998 break;
4000 case GIMPLE_SWITCH:
4002 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4003 /* Take into account cost of the switch + guess 2 conditional jumps for
4004 each case label.
4006 TODO: once the switch expansion logic is sufficiently separated, we can
4007 do better job on estimating cost of the switch. */
4008 if (weights->time_based)
4009 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4010 else
4011 cost = gimple_switch_num_labels (switch_stmt) * 2;
4013 break;
4015 case GIMPLE_CALL:
4017 tree decl;
4019 if (gimple_call_internal_p (stmt))
4020 return 0;
4021 else if ((decl = gimple_call_fndecl (stmt))
4022 && DECL_BUILT_IN (decl))
4024 /* Do not special case builtins where we see the body.
4025 This just confuse inliner. */
4026 struct cgraph_node *node;
4027 if (!(node = cgraph_node::get (decl))
4028 || node->definition)
4030 /* For buitins that are likely expanded to nothing or
4031 inlined do not account operand costs. */
4032 else if (is_simple_builtin (decl))
4033 return 0;
4034 else if (is_inexpensive_builtin (decl))
4035 return weights->target_builtin_call_cost;
4036 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
4038 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4039 specialize the cheap expansion we do here.
4040 ??? This asks for a more general solution. */
4041 switch (DECL_FUNCTION_CODE (decl))
4043 case BUILT_IN_POW:
4044 case BUILT_IN_POWF:
4045 case BUILT_IN_POWL:
4046 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4047 && REAL_VALUES_EQUAL
4048 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
4049 return estimate_operator_cost
4050 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4051 gimple_call_arg (stmt, 0));
4052 break;
4054 default:
4055 break;
4060 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4061 if (gimple_call_lhs (stmt))
4062 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4063 weights->time_based);
4064 for (i = 0; i < gimple_call_num_args (stmt); i++)
4066 tree arg = gimple_call_arg (stmt, i);
4067 cost += estimate_move_cost (TREE_TYPE (arg),
4068 weights->time_based);
4070 break;
4073 case GIMPLE_RETURN:
4074 return weights->return_cost;
4076 case GIMPLE_GOTO:
4077 case GIMPLE_LABEL:
4078 case GIMPLE_NOP:
4079 case GIMPLE_PHI:
4080 case GIMPLE_PREDICT:
4081 case GIMPLE_DEBUG:
4082 return 0;
4084 case GIMPLE_ASM:
4086 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4087 /* 1000 means infinity. This avoids overflows later
4088 with very long asm statements. */
4089 if (count > 1000)
4090 count = 1000;
4091 return count;
4094 case GIMPLE_RESX:
4095 /* This is either going to be an external function call with one
4096 argument, or two register copy statements plus a goto. */
4097 return 2;
4099 case GIMPLE_EH_DISPATCH:
4100 /* ??? This is going to turn into a switch statement. Ideally
4101 we'd have a look at the eh region and estimate the number of
4102 edges involved. */
4103 return 10;
4105 case GIMPLE_BIND:
4106 return estimate_num_insns_seq (
4107 gimple_bind_body (as_a <gbind *> (stmt)),
4108 weights);
4110 case GIMPLE_EH_FILTER:
4111 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4113 case GIMPLE_CATCH:
4114 return estimate_num_insns_seq (gimple_catch_handler (
4115 as_a <gcatch *> (stmt)),
4116 weights);
4118 case GIMPLE_TRY:
4119 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4120 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4122 /* OpenMP directives are generally very expensive. */
4124 case GIMPLE_OMP_RETURN:
4125 case GIMPLE_OMP_SECTIONS_SWITCH:
4126 case GIMPLE_OMP_ATOMIC_STORE:
4127 case GIMPLE_OMP_CONTINUE:
4128 /* ...except these, which are cheap. */
4129 return 0;
4131 case GIMPLE_OMP_ATOMIC_LOAD:
4132 return weights->omp_cost;
4134 case GIMPLE_OMP_FOR:
4135 return (weights->omp_cost
4136 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4137 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4139 case GIMPLE_OMP_PARALLEL:
4140 case GIMPLE_OMP_TASK:
4141 case GIMPLE_OMP_CRITICAL:
4142 case GIMPLE_OMP_MASTER:
4143 case GIMPLE_OMP_TASKGROUP:
4144 case GIMPLE_OMP_ORDERED:
4145 case GIMPLE_OMP_SECTION:
4146 case GIMPLE_OMP_SECTIONS:
4147 case GIMPLE_OMP_SINGLE:
4148 case GIMPLE_OMP_TARGET:
4149 case GIMPLE_OMP_TEAMS:
4150 return (weights->omp_cost
4151 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4153 case GIMPLE_TRANSACTION:
4154 return (weights->tm_cost
4155 + estimate_num_insns_seq (gimple_transaction_body (
4156 as_a <gtransaction *> (stmt)),
4157 weights));
4159 default:
4160 gcc_unreachable ();
4163 return cost;
4166 /* Estimate number of instructions that will be created by expanding
4167 function FNDECL. WEIGHTS contains weights attributed to various
4168 constructs. */
4171 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4173 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4174 gimple_stmt_iterator bsi;
4175 basic_block bb;
4176 int n = 0;
4178 gcc_assert (my_function && my_function->cfg);
4179 FOR_EACH_BB_FN (bb, my_function)
4181 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4182 n += estimate_num_insns (gsi_stmt (bsi), weights);
4185 return n;
4189 /* Initializes weights used by estimate_num_insns. */
4191 void
4192 init_inline_once (void)
4194 eni_size_weights.call_cost = 1;
4195 eni_size_weights.indirect_call_cost = 3;
4196 eni_size_weights.target_builtin_call_cost = 1;
4197 eni_size_weights.div_mod_cost = 1;
4198 eni_size_weights.omp_cost = 40;
4199 eni_size_weights.tm_cost = 10;
4200 eni_size_weights.time_based = false;
4201 eni_size_weights.return_cost = 1;
4203 /* Estimating time for call is difficult, since we have no idea what the
4204 called function does. In the current uses of eni_time_weights,
4205 underestimating the cost does less harm than overestimating it, so
4206 we choose a rather small value here. */
4207 eni_time_weights.call_cost = 10;
4208 eni_time_weights.indirect_call_cost = 15;
4209 eni_time_weights.target_builtin_call_cost = 1;
4210 eni_time_weights.div_mod_cost = 10;
4211 eni_time_weights.omp_cost = 40;
4212 eni_time_weights.tm_cost = 40;
4213 eni_time_weights.time_based = true;
4214 eni_time_weights.return_cost = 2;
4217 /* Estimate the number of instructions in a gimple_seq. */
4220 count_insns_seq (gimple_seq seq, eni_weights *weights)
4222 gimple_stmt_iterator gsi;
4223 int n = 0;
4224 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
4225 n += estimate_num_insns (gsi_stmt (gsi), weights);
4227 return n;
4231 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4233 static void
4234 prepend_lexical_block (tree current_block, tree new_block)
4236 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4237 BLOCK_SUBBLOCKS (current_block) = new_block;
4238 BLOCK_SUPERCONTEXT (new_block) = current_block;
4241 /* Add local variables from CALLEE to CALLER. */
4243 static inline void
4244 add_local_variables (struct function *callee, struct function *caller,
4245 copy_body_data *id)
4247 tree var;
4248 unsigned ix;
4250 FOR_EACH_LOCAL_DECL (callee, ix, var)
4251 if (!can_be_nonlocal (var, id))
4253 tree new_var = remap_decl (var, id);
4255 /* Remap debug-expressions. */
4256 if (TREE_CODE (new_var) == VAR_DECL
4257 && DECL_HAS_DEBUG_EXPR_P (var)
4258 && new_var != var)
4260 tree tem = DECL_DEBUG_EXPR (var);
4261 bool old_regimplify = id->regimplify;
4262 id->remapping_type_depth++;
4263 walk_tree (&tem, copy_tree_body_r, id, NULL);
4264 id->remapping_type_depth--;
4265 id->regimplify = old_regimplify;
4266 SET_DECL_DEBUG_EXPR (new_var, tem);
4267 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4269 add_local_decl (caller, new_var);
4273 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4275 static bool
4276 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
4278 tree use_retvar;
4279 tree fn;
4280 hash_map<tree, tree> *dst;
4281 hash_map<tree, tree> *st = NULL;
4282 tree return_slot;
4283 tree modify_dest;
4284 tree return_bounds = NULL;
4285 location_t saved_location;
4286 struct cgraph_edge *cg_edge;
4287 cgraph_inline_failed_t reason;
4288 basic_block return_block;
4289 edge e;
4290 gimple_stmt_iterator gsi, stmt_gsi;
4291 bool successfully_inlined = FALSE;
4292 bool purge_dead_abnormal_edges;
4293 gcall *call_stmt;
4294 unsigned int i;
4296 /* Set input_location here so we get the right instantiation context
4297 if we call instantiate_decl from inlinable_function_p. */
4298 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4299 saved_location = input_location;
4300 input_location = gimple_location (stmt);
4302 /* From here on, we're only interested in CALL_EXPRs. */
4303 call_stmt = dyn_cast <gcall *> (stmt);
4304 if (!call_stmt)
4305 goto egress;
4307 cg_edge = id->dst_node->get_edge (stmt);
4308 gcc_checking_assert (cg_edge);
4309 /* First, see if we can figure out what function is being called.
4310 If we cannot, then there is no hope of inlining the function. */
4311 if (cg_edge->indirect_unknown_callee)
4312 goto egress;
4313 fn = cg_edge->callee->decl;
4314 gcc_checking_assert (fn);
4316 /* If FN is a declaration of a function in a nested scope that was
4317 globally declared inline, we don't set its DECL_INITIAL.
4318 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4319 C++ front-end uses it for cdtors to refer to their internal
4320 declarations, that are not real functions. Fortunately those
4321 don't have trees to be saved, so we can tell by checking their
4322 gimple_body. */
4323 if (!DECL_INITIAL (fn)
4324 && DECL_ABSTRACT_ORIGIN (fn)
4325 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4326 fn = DECL_ABSTRACT_ORIGIN (fn);
4328 /* Don't try to inline functions that are not well-suited to inlining. */
4329 if (cg_edge->inline_failed)
4331 reason = cg_edge->inline_failed;
4332 /* If this call was originally indirect, we do not want to emit any
4333 inlining related warnings or sorry messages because there are no
4334 guarantees regarding those. */
4335 if (cg_edge->indirect_inlining_edge)
4336 goto egress;
4338 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4339 /* For extern inline functions that get redefined we always
4340 silently ignored always_inline flag. Better behaviour would
4341 be to be able to keep both bodies and use extern inline body
4342 for inlining, but we can't do that because frontends overwrite
4343 the body. */
4344 && !cg_edge->callee->local.redefined_extern_inline
4345 /* During early inline pass, report only when optimization is
4346 not turned on. */
4347 && (symtab->global_info_ready
4348 || !optimize
4349 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4350 /* PR 20090218-1_0.c. Body can be provided by another module. */
4351 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4353 error ("inlining failed in call to always_inline %q+F: %s", fn,
4354 cgraph_inline_failed_string (reason));
4355 error ("called from here");
4357 else if (warn_inline
4358 && DECL_DECLARED_INLINE_P (fn)
4359 && !DECL_NO_INLINE_WARNING_P (fn)
4360 && !DECL_IN_SYSTEM_HEADER (fn)
4361 && reason != CIF_UNSPECIFIED
4362 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4363 /* Do not warn about not inlined recursive calls. */
4364 && !cg_edge->recursive_p ()
4365 /* Avoid warnings during early inline pass. */
4366 && symtab->global_info_ready)
4368 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4369 fn, _(cgraph_inline_failed_string (reason)));
4370 warning (OPT_Winline, "called from here");
4372 goto egress;
4374 fn = cg_edge->callee->decl;
4375 cg_edge->callee->get_untransformed_body ();
4377 #ifdef ENABLE_CHECKING
4378 if (cg_edge->callee->decl != id->dst_node->decl)
4379 cg_edge->callee->verify ();
4380 #endif
4382 /* We will be inlining this callee. */
4383 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4384 id->assign_stmts.create (0);
4386 /* Update the callers EH personality. */
4387 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4388 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4389 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4391 /* Split the block holding the GIMPLE_CALL. */
4392 e = split_block (bb, stmt);
4393 bb = e->src;
4394 return_block = e->dest;
4395 remove_edge (e);
4397 /* split_block splits after the statement; work around this by
4398 moving the call into the second block manually. Not pretty,
4399 but seems easier than doing the CFG manipulation by hand
4400 when the GIMPLE_CALL is in the last statement of BB. */
4401 stmt_gsi = gsi_last_bb (bb);
4402 gsi_remove (&stmt_gsi, false);
4404 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4405 been the source of abnormal edges. In this case, schedule
4406 the removal of dead abnormal edges. */
4407 gsi = gsi_start_bb (return_block);
4408 if (gsi_end_p (gsi))
4410 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4411 purge_dead_abnormal_edges = true;
4413 else
4415 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4416 purge_dead_abnormal_edges = false;
4419 stmt_gsi = gsi_start_bb (return_block);
4421 /* Build a block containing code to initialize the arguments, the
4422 actual inline expansion of the body, and a label for the return
4423 statements within the function to jump to. The type of the
4424 statement expression is the return type of the function call.
4425 ??? If the call does not have an associated block then we will
4426 remap all callee blocks to NULL, effectively dropping most of
4427 its debug information. This should only happen for calls to
4428 artificial decls inserted by the compiler itself. We need to
4429 either link the inlined blocks into the caller block tree or
4430 not refer to them in any way to not break GC for locations. */
4431 if (gimple_block (stmt))
4433 id->block = make_node (BLOCK);
4434 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4435 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4436 prepend_lexical_block (gimple_block (stmt), id->block);
4439 /* Local declarations will be replaced by their equivalents in this
4440 map. */
4441 st = id->decl_map;
4442 id->decl_map = new hash_map<tree, tree>;
4443 dst = id->debug_map;
4444 id->debug_map = NULL;
4446 /* Record the function we are about to inline. */
4447 id->src_fn = fn;
4448 id->src_node = cg_edge->callee;
4449 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4450 id->call_stmt = stmt;
4452 gcc_assert (!id->src_cfun->after_inlining);
4454 id->entry_bb = bb;
4455 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4457 gimple_stmt_iterator si = gsi_last_bb (bb);
4458 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4459 NOT_TAKEN),
4460 GSI_NEW_STMT);
4462 initialize_inlined_parameters (id, stmt, fn, bb);
4464 if (DECL_INITIAL (fn))
4466 if (gimple_block (stmt))
4468 tree *var;
4470 prepend_lexical_block (id->block,
4471 remap_blocks (DECL_INITIAL (fn), id));
4472 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4473 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4474 == NULL_TREE));
4475 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4476 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4477 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4478 under it. The parameters can be then evaluated in the debugger,
4479 but don't show in backtraces. */
4480 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4481 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4483 tree v = *var;
4484 *var = TREE_CHAIN (v);
4485 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4486 BLOCK_VARS (id->block) = v;
4488 else
4489 var = &TREE_CHAIN (*var);
4491 else
4492 remap_blocks_to_null (DECL_INITIAL (fn), id);
4495 /* Return statements in the function body will be replaced by jumps
4496 to the RET_LABEL. */
4497 gcc_assert (DECL_INITIAL (fn));
4498 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4500 /* Find the LHS to which the result of this call is assigned. */
4501 return_slot = NULL;
4502 if (gimple_call_lhs (stmt))
4504 modify_dest = gimple_call_lhs (stmt);
4506 /* Remember where to copy returned bounds. */
4507 if (gimple_call_with_bounds_p (stmt)
4508 && TREE_CODE (modify_dest) == SSA_NAME)
4510 gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
4511 if (retbnd)
4513 return_bounds = gimple_call_lhs (retbnd);
4514 /* If returned bounds are not used then just
4515 remove unused call. */
4516 if (!return_bounds)
4518 gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
4519 gsi_remove (&iter, true);
4524 /* The function which we are inlining might not return a value,
4525 in which case we should issue a warning that the function
4526 does not return a value. In that case the optimizers will
4527 see that the variable to which the value is assigned was not
4528 initialized. We do not want to issue a warning about that
4529 uninitialized variable. */
4530 if (DECL_P (modify_dest))
4531 TREE_NO_WARNING (modify_dest) = 1;
4533 if (gimple_call_return_slot_opt_p (call_stmt))
4535 return_slot = modify_dest;
4536 modify_dest = NULL;
4539 else
4540 modify_dest = NULL;
4542 /* If we are inlining a call to the C++ operator new, we don't want
4543 to use type based alias analysis on the return value. Otherwise
4544 we may get confused if the compiler sees that the inlined new
4545 function returns a pointer which was just deleted. See bug
4546 33407. */
4547 if (DECL_IS_OPERATOR_NEW (fn))
4549 return_slot = NULL;
4550 modify_dest = NULL;
4553 /* Declare the return variable for the function. */
4554 use_retvar = declare_return_variable (id, return_slot, modify_dest,
4555 return_bounds, bb);
4557 /* Add local vars in this inlined callee to caller. */
4558 add_local_variables (id->src_cfun, cfun, id);
4560 if (dump_file && (dump_flags & TDF_DETAILS))
4562 fprintf (dump_file, "Inlining ");
4563 print_generic_expr (dump_file, id->src_fn, 0);
4564 fprintf (dump_file, " to ");
4565 print_generic_expr (dump_file, id->dst_fn, 0);
4566 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4569 /* This is it. Duplicate the callee body. Assume callee is
4570 pre-gimplified. Note that we must not alter the caller
4571 function in any way before this point, as this CALL_EXPR may be
4572 a self-referential call; if we're calling ourselves, we need to
4573 duplicate our body before altering anything. */
4574 copy_body (id, cg_edge->callee->count,
4575 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4576 bb, return_block, NULL);
4578 /* Reset the escaped solution. */
4579 if (cfun->gimple_df)
4580 pt_solution_reset (&cfun->gimple_df->escaped);
4582 /* Clean up. */
4583 if (id->debug_map)
4585 delete id->debug_map;
4586 id->debug_map = dst;
4588 delete id->decl_map;
4589 id->decl_map = st;
4591 /* Unlink the calls virtual operands before replacing it. */
4592 unlink_stmt_vdef (stmt);
4593 if (gimple_vdef (stmt)
4594 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4595 release_ssa_name (gimple_vdef (stmt));
4597 /* If the inlined function returns a result that we care about,
4598 substitute the GIMPLE_CALL with an assignment of the return
4599 variable to the LHS of the call. That is, if STMT was
4600 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4601 if (use_retvar && gimple_call_lhs (stmt))
4603 gimple old_stmt = stmt;
4604 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4605 gsi_replace (&stmt_gsi, stmt, false);
4606 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4608 /* Copy bounds if we copy structure with bounds. */
4609 if (chkp_function_instrumented_p (id->dst_fn)
4610 && !BOUNDED_P (use_retvar)
4611 && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
4612 id->assign_stmts.safe_push (stmt);
4614 else
4616 /* Handle the case of inlining a function with no return
4617 statement, which causes the return value to become undefined. */
4618 if (gimple_call_lhs (stmt)
4619 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4621 tree name = gimple_call_lhs (stmt);
4622 tree var = SSA_NAME_VAR (name);
4623 tree def = ssa_default_def (cfun, var);
4625 if (def)
4627 /* If the variable is used undefined, make this name
4628 undefined via a move. */
4629 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4630 gsi_replace (&stmt_gsi, stmt, true);
4632 else
4634 /* Otherwise make this variable undefined. */
4635 gsi_remove (&stmt_gsi, true);
4636 set_ssa_default_def (cfun, var, name);
4637 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4640 else
4641 gsi_remove (&stmt_gsi, true);
4644 /* Put returned bounds into the correct place if required. */
4645 if (return_bounds)
4647 gimple old_stmt = SSA_NAME_DEF_STMT (return_bounds);
4648 gimple new_stmt = gimple_build_assign (return_bounds, id->retbnd);
4649 gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
4650 unlink_stmt_vdef (old_stmt);
4651 gsi_replace (&bnd_gsi, new_stmt, false);
4652 maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
4653 cgraph_update_edges_for_call_stmt (old_stmt,
4654 gimple_call_fndecl (old_stmt),
4655 new_stmt);
4658 if (purge_dead_abnormal_edges)
4660 gimple_purge_dead_eh_edges (return_block);
4661 gimple_purge_dead_abnormal_call_edges (return_block);
4664 /* If the value of the new expression is ignored, that's OK. We
4665 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4666 the equivalent inlined version either. */
4667 if (is_gimple_assign (stmt))
4669 gcc_assert (gimple_assign_single_p (stmt)
4670 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4671 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4674 /* Copy bounds for all generated assigns that need it. */
4675 for (i = 0; i < id->assign_stmts.length (); i++)
4676 chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
4677 id->assign_stmts.release ();
4679 /* Output the inlining info for this abstract function, since it has been
4680 inlined. If we don't do this now, we can lose the information about the
4681 variables in the function when the blocks get blown away as soon as we
4682 remove the cgraph node. */
4683 if (gimple_block (stmt))
4684 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4686 /* Update callgraph if needed. */
4687 cg_edge->callee->remove ();
4689 id->block = NULL_TREE;
4690 successfully_inlined = TRUE;
4692 egress:
4693 input_location = saved_location;
4694 return successfully_inlined;
4697 /* Expand call statements reachable from STMT_P.
4698 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4699 in a MODIFY_EXPR. */
4701 static bool
4702 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4704 gimple_stmt_iterator gsi;
4706 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4708 gimple stmt = gsi_stmt (gsi);
4710 if (is_gimple_call (stmt)
4711 && !gimple_call_internal_p (stmt)
4712 && expand_call_inline (bb, stmt, id))
4713 return true;
4716 return false;
4720 /* Walk all basic blocks created after FIRST and try to fold every statement
4721 in the STATEMENTS pointer set. */
4723 static void
4724 fold_marked_statements (int first, hash_set<gimple> *statements)
4726 for (; first < n_basic_blocks_for_fn (cfun); first++)
4727 if (BASIC_BLOCK_FOR_FN (cfun, first))
4729 gimple_stmt_iterator gsi;
4731 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4732 !gsi_end_p (gsi);
4733 gsi_next (&gsi))
4734 if (statements->contains (gsi_stmt (gsi)))
4736 gimple old_stmt = gsi_stmt (gsi);
4737 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4739 if (old_decl && DECL_BUILT_IN (old_decl))
4741 /* Folding builtins can create multiple instructions,
4742 we need to look at all of them. */
4743 gimple_stmt_iterator i2 = gsi;
4744 gsi_prev (&i2);
4745 if (fold_stmt (&gsi))
4747 gimple new_stmt;
4748 /* If a builtin at the end of a bb folded into nothing,
4749 the following loop won't work. */
4750 if (gsi_end_p (gsi))
4752 cgraph_update_edges_for_call_stmt (old_stmt,
4753 old_decl, NULL);
4754 break;
4756 if (gsi_end_p (i2))
4757 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4758 else
4759 gsi_next (&i2);
4760 while (1)
4762 new_stmt = gsi_stmt (i2);
4763 update_stmt (new_stmt);
4764 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4765 new_stmt);
4767 if (new_stmt == gsi_stmt (gsi))
4769 /* It is okay to check only for the very last
4770 of these statements. If it is a throwing
4771 statement nothing will change. If it isn't
4772 this can remove EH edges. If that weren't
4773 correct then because some intermediate stmts
4774 throw, but not the last one. That would mean
4775 we'd have to split the block, which we can't
4776 here and we'd loose anyway. And as builtins
4777 probably never throw, this all
4778 is mood anyway. */
4779 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4780 new_stmt))
4781 gimple_purge_dead_eh_edges (
4782 BASIC_BLOCK_FOR_FN (cfun, first));
4783 break;
4785 gsi_next (&i2);
4789 else if (fold_stmt (&gsi))
4791 /* Re-read the statement from GSI as fold_stmt() may
4792 have changed it. */
4793 gimple new_stmt = gsi_stmt (gsi);
4794 update_stmt (new_stmt);
4796 if (is_gimple_call (old_stmt)
4797 || is_gimple_call (new_stmt))
4798 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4799 new_stmt);
4801 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4802 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4803 first));
4809 /* Expand calls to inline functions in the body of FN. */
4811 unsigned int
4812 optimize_inline_calls (tree fn)
4814 copy_body_data id;
4815 basic_block bb;
4816 int last = n_basic_blocks_for_fn (cfun);
4817 bool inlined_p = false;
4819 /* Clear out ID. */
4820 memset (&id, 0, sizeof (id));
4822 id.src_node = id.dst_node = cgraph_node::get (fn);
4823 gcc_assert (id.dst_node->definition);
4824 id.dst_fn = fn;
4825 /* Or any functions that aren't finished yet. */
4826 if (current_function_decl)
4827 id.dst_fn = current_function_decl;
4829 id.copy_decl = copy_decl_maybe_to_var;
4830 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4831 id.transform_new_cfg = false;
4832 id.transform_return_to_modify = true;
4833 id.transform_parameter = true;
4834 id.transform_lang_insert_block = NULL;
4835 id.statements_to_fold = new hash_set<gimple>;
4837 push_gimplify_context ();
4839 /* We make no attempts to keep dominance info up-to-date. */
4840 free_dominance_info (CDI_DOMINATORS);
4841 free_dominance_info (CDI_POST_DOMINATORS);
4843 /* Register specific gimple functions. */
4844 gimple_register_cfg_hooks ();
4846 /* Reach the trees by walking over the CFG, and note the
4847 enclosing basic-blocks in the call edges. */
4848 /* We walk the blocks going forward, because inlined function bodies
4849 will split id->current_basic_block, and the new blocks will
4850 follow it; we'll trudge through them, processing their CALL_EXPRs
4851 along the way. */
4852 FOR_EACH_BB_FN (bb, cfun)
4853 inlined_p |= gimple_expand_calls_inline (bb, &id);
4855 pop_gimplify_context (NULL);
4857 #ifdef ENABLE_CHECKING
4859 struct cgraph_edge *e;
4861 id.dst_node->verify ();
4863 /* Double check that we inlined everything we are supposed to inline. */
4864 for (e = id.dst_node->callees; e; e = e->next_callee)
4865 gcc_assert (e->inline_failed);
4867 #endif
4869 /* Fold queued statements. */
4870 fold_marked_statements (last, id.statements_to_fold);
4871 delete id.statements_to_fold;
4873 gcc_assert (!id.debug_stmts.exists ());
4875 /* If we didn't inline into the function there is nothing to do. */
4876 if (!inlined_p)
4877 return 0;
4879 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4880 number_blocks (fn);
4882 delete_unreachable_blocks_update_callgraph (&id);
4883 #ifdef ENABLE_CHECKING
4884 id.dst_node->verify ();
4885 #endif
4887 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4888 not possible yet - the IPA passes might make various functions to not
4889 throw and they don't care to proactively update local EH info. This is
4890 done later in fixup_cfg pass that also execute the verification. */
4891 return (TODO_update_ssa
4892 | TODO_cleanup_cfg
4893 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4894 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4895 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
4896 ? TODO_rebuild_frequencies : 0));
4899 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4901 tree
4902 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4904 enum tree_code code = TREE_CODE (*tp);
4905 enum tree_code_class cl = TREE_CODE_CLASS (code);
4907 /* We make copies of most nodes. */
4908 if (IS_EXPR_CODE_CLASS (cl)
4909 || code == TREE_LIST
4910 || code == TREE_VEC
4911 || code == TYPE_DECL
4912 || code == OMP_CLAUSE)
4914 /* Because the chain gets clobbered when we make a copy, we save it
4915 here. */
4916 tree chain = NULL_TREE, new_tree;
4918 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4919 chain = TREE_CHAIN (*tp);
4921 /* Copy the node. */
4922 new_tree = copy_node (*tp);
4924 *tp = new_tree;
4926 /* Now, restore the chain, if appropriate. That will cause
4927 walk_tree to walk into the chain as well. */
4928 if (code == PARM_DECL
4929 || code == TREE_LIST
4930 || code == OMP_CLAUSE)
4931 TREE_CHAIN (*tp) = chain;
4933 /* For now, we don't update BLOCKs when we make copies. So, we
4934 have to nullify all BIND_EXPRs. */
4935 if (TREE_CODE (*tp) == BIND_EXPR)
4936 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4938 else if (code == CONSTRUCTOR)
4940 /* CONSTRUCTOR nodes need special handling because
4941 we need to duplicate the vector of elements. */
4942 tree new_tree;
4944 new_tree = copy_node (*tp);
4945 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
4946 *tp = new_tree;
4948 else if (code == STATEMENT_LIST)
4949 /* We used to just abort on STATEMENT_LIST, but we can run into them
4950 with statement-expressions (c++/40975). */
4951 copy_statement_list (tp);
4952 else if (TREE_CODE_CLASS (code) == tcc_type)
4953 *walk_subtrees = 0;
4954 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4955 *walk_subtrees = 0;
4956 else if (TREE_CODE_CLASS (code) == tcc_constant)
4957 *walk_subtrees = 0;
4958 return NULL_TREE;
4961 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4962 information indicating to what new SAVE_EXPR this one should be mapped,
4963 use that one. Otherwise, create a new node and enter it in ST. FN is
4964 the function into which the copy will be placed. */
4966 static void
4967 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
4969 tree *n;
4970 tree t;
4972 /* See if we already encountered this SAVE_EXPR. */
4973 n = st->get (*tp);
4975 /* If we didn't already remap this SAVE_EXPR, do so now. */
4976 if (!n)
4978 t = copy_node (*tp);
4980 /* Remember this SAVE_EXPR. */
4981 st->put (*tp, t);
4982 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4983 st->put (t, t);
4985 else
4987 /* We've already walked into this SAVE_EXPR; don't do it again. */
4988 *walk_subtrees = 0;
4989 t = *n;
4992 /* Replace this SAVE_EXPR with the copy. */
4993 *tp = t;
4996 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4997 label, copies the declaration and enters it in the splay_tree in DATA (which
4998 is really a 'copy_body_data *'. */
5000 static tree
5001 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5002 bool *handled_ops_p ATTRIBUTE_UNUSED,
5003 struct walk_stmt_info *wi)
5005 copy_body_data *id = (copy_body_data *) wi->info;
5006 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5008 if (stmt)
5010 tree decl = gimple_label_label (stmt);
5012 /* Copy the decl and remember the copy. */
5013 insert_decl_map (id, decl, id->copy_decl (decl, id));
5016 return NULL_TREE;
5020 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5021 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5022 remaps all local declarations to appropriate replacements in gimple
5023 operands. */
5025 static tree
5026 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5028 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5029 copy_body_data *id = (copy_body_data *) wi->info;
5030 hash_map<tree, tree> *st = id->decl_map;
5031 tree *n;
5032 tree expr = *tp;
5034 /* Only a local declaration (variable or label). */
5035 if ((TREE_CODE (expr) == VAR_DECL
5036 && !TREE_STATIC (expr))
5037 || TREE_CODE (expr) == LABEL_DECL)
5039 /* Lookup the declaration. */
5040 n = st->get (expr);
5042 /* If it's there, remap it. */
5043 if (n)
5044 *tp = *n;
5045 *walk_subtrees = 0;
5047 else if (TREE_CODE (expr) == STATEMENT_LIST
5048 || TREE_CODE (expr) == BIND_EXPR
5049 || TREE_CODE (expr) == SAVE_EXPR)
5050 gcc_unreachable ();
5051 else if (TREE_CODE (expr) == TARGET_EXPR)
5053 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5054 It's OK for this to happen if it was part of a subtree that
5055 isn't immediately expanded, such as operand 2 of another
5056 TARGET_EXPR. */
5057 if (!TREE_OPERAND (expr, 1))
5059 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5060 TREE_OPERAND (expr, 3) = NULL_TREE;
5064 /* Keep iterating. */
5065 return NULL_TREE;
5069 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5070 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5071 remaps all local declarations to appropriate replacements in gimple
5072 statements. */
5074 static tree
5075 replace_locals_stmt (gimple_stmt_iterator *gsip,
5076 bool *handled_ops_p ATTRIBUTE_UNUSED,
5077 struct walk_stmt_info *wi)
5079 copy_body_data *id = (copy_body_data *) wi->info;
5080 gimple gs = gsi_stmt (*gsip);
5082 if (gbind *stmt = dyn_cast <gbind *> (gs))
5084 tree block = gimple_bind_block (stmt);
5086 if (block)
5088 remap_block (&block, id);
5089 gimple_bind_set_block (stmt, block);
5092 /* This will remap a lot of the same decls again, but this should be
5093 harmless. */
5094 if (gimple_bind_vars (stmt))
5095 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
5096 NULL, id));
5099 /* Keep iterating. */
5100 return NULL_TREE;
5104 /* Copies everything in SEQ and replaces variables and labels local to
5105 current_function_decl. */
5107 gimple_seq
5108 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5110 copy_body_data id;
5111 struct walk_stmt_info wi;
5112 gimple_seq copy;
5114 /* There's nothing to do for NULL_TREE. */
5115 if (seq == NULL)
5116 return seq;
5118 /* Set up ID. */
5119 memset (&id, 0, sizeof (id));
5120 id.src_fn = current_function_decl;
5121 id.dst_fn = current_function_decl;
5122 id.decl_map = new hash_map<tree, tree>;
5123 id.debug_map = NULL;
5125 id.copy_decl = copy_decl_no_change;
5126 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5127 id.transform_new_cfg = false;
5128 id.transform_return_to_modify = false;
5129 id.transform_parameter = false;
5130 id.transform_lang_insert_block = NULL;
5132 /* Walk the tree once to find local labels. */
5133 memset (&wi, 0, sizeof (wi));
5134 hash_set<tree> visited;
5135 wi.info = &id;
5136 wi.pset = &visited;
5137 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5139 copy = gimple_seq_copy (seq);
5141 /* Walk the copy, remapping decls. */
5142 memset (&wi, 0, sizeof (wi));
5143 wi.info = &id;
5144 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5146 /* Clean up. */
5147 delete id.decl_map;
5148 if (id.debug_map)
5149 delete id.debug_map;
5151 return copy;
5155 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5157 static tree
5158 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5160 if (*tp == data)
5161 return (tree) data;
5162 else
5163 return NULL;
5166 DEBUG_FUNCTION bool
5167 debug_find_tree (tree top, tree search)
5169 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5173 /* Declare the variables created by the inliner. Add all the variables in
5174 VARS to BIND_EXPR. */
5176 static void
5177 declare_inline_vars (tree block, tree vars)
5179 tree t;
5180 for (t = vars; t; t = DECL_CHAIN (t))
5182 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5183 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5184 add_local_decl (cfun, t);
5187 if (block)
5188 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5191 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5192 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5193 VAR_DECL translation. */
5195 static tree
5196 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5198 /* Don't generate debug information for the copy if we wouldn't have
5199 generated it for the copy either. */
5200 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5201 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5203 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5204 declaration inspired this copy. */
5205 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5207 /* The new variable/label has no RTL, yet. */
5208 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5209 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5210 SET_DECL_RTL (copy, 0);
5212 /* These args would always appear unused, if not for this. */
5213 TREE_USED (copy) = 1;
5215 /* Set the context for the new declaration. */
5216 if (!DECL_CONTEXT (decl))
5217 /* Globals stay global. */
5219 else if (DECL_CONTEXT (decl) != id->src_fn)
5220 /* Things that weren't in the scope of the function we're inlining
5221 from aren't in the scope we're inlining to, either. */
5223 else if (TREE_STATIC (decl))
5224 /* Function-scoped static variables should stay in the original
5225 function. */
5227 else
5228 /* Ordinary automatic local variables are now in the scope of the
5229 new function. */
5230 DECL_CONTEXT (copy) = id->dst_fn;
5232 return copy;
5235 static tree
5236 copy_decl_to_var (tree decl, copy_body_data *id)
5238 tree copy, type;
5240 gcc_assert (TREE_CODE (decl) == PARM_DECL
5241 || TREE_CODE (decl) == RESULT_DECL);
5243 type = TREE_TYPE (decl);
5245 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5246 VAR_DECL, DECL_NAME (decl), type);
5247 if (DECL_PT_UID_SET_P (decl))
5248 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5249 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5250 TREE_READONLY (copy) = TREE_READONLY (decl);
5251 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5252 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5254 return copy_decl_for_dup_finish (id, decl, copy);
5257 /* Like copy_decl_to_var, but create a return slot object instead of a
5258 pointer variable for return by invisible reference. */
5260 static tree
5261 copy_result_decl_to_var (tree decl, copy_body_data *id)
5263 tree copy, type;
5265 gcc_assert (TREE_CODE (decl) == PARM_DECL
5266 || TREE_CODE (decl) == RESULT_DECL);
5268 type = TREE_TYPE (decl);
5269 if (DECL_BY_REFERENCE (decl))
5270 type = TREE_TYPE (type);
5272 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5273 VAR_DECL, DECL_NAME (decl), type);
5274 if (DECL_PT_UID_SET_P (decl))
5275 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5276 TREE_READONLY (copy) = TREE_READONLY (decl);
5277 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5278 if (!DECL_BY_REFERENCE (decl))
5280 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5281 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5284 return copy_decl_for_dup_finish (id, decl, copy);
5287 tree
5288 copy_decl_no_change (tree decl, copy_body_data *id)
5290 tree copy;
5292 copy = copy_node (decl);
5294 /* The COPY is not abstract; it will be generated in DST_FN. */
5295 DECL_ABSTRACT_P (copy) = false;
5296 lang_hooks.dup_lang_specific_decl (copy);
5298 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5299 been taken; it's for internal bookkeeping in expand_goto_internal. */
5300 if (TREE_CODE (copy) == LABEL_DECL)
5302 TREE_ADDRESSABLE (copy) = 0;
5303 LABEL_DECL_UID (copy) = -1;
5306 return copy_decl_for_dup_finish (id, decl, copy);
5309 static tree
5310 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5312 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5313 return copy_decl_to_var (decl, id);
5314 else
5315 return copy_decl_no_change (decl, id);
5318 /* Return a copy of the function's argument tree. */
5319 static tree
5320 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5321 bitmap args_to_skip, tree *vars)
5323 tree arg, *parg;
5324 tree new_parm = NULL;
5325 int i = 0;
5327 parg = &new_parm;
5329 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5330 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5332 tree new_tree = remap_decl (arg, id);
5333 if (TREE_CODE (new_tree) != PARM_DECL)
5334 new_tree = id->copy_decl (arg, id);
5335 lang_hooks.dup_lang_specific_decl (new_tree);
5336 *parg = new_tree;
5337 parg = &DECL_CHAIN (new_tree);
5339 else if (!id->decl_map->get (arg))
5341 /* Make an equivalent VAR_DECL. If the argument was used
5342 as temporary variable later in function, the uses will be
5343 replaced by local variable. */
5344 tree var = copy_decl_to_var (arg, id);
5345 insert_decl_map (id, arg, var);
5346 /* Declare this new variable. */
5347 DECL_CHAIN (var) = *vars;
5348 *vars = var;
5350 return new_parm;
5353 /* Return a copy of the function's static chain. */
5354 static tree
5355 copy_static_chain (tree static_chain, copy_body_data * id)
5357 tree *chain_copy, *pvar;
5359 chain_copy = &static_chain;
5360 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5362 tree new_tree = remap_decl (*pvar, id);
5363 lang_hooks.dup_lang_specific_decl (new_tree);
5364 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5365 *pvar = new_tree;
5367 return static_chain;
5370 /* Return true if the function is allowed to be versioned.
5371 This is a guard for the versioning functionality. */
5373 bool
5374 tree_versionable_function_p (tree fndecl)
5376 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5377 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5380 /* Delete all unreachable basic blocks and update callgraph.
5381 Doing so is somewhat nontrivial because we need to update all clones and
5382 remove inline function that become unreachable. */
5384 static bool
5385 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5387 bool changed = false;
5388 basic_block b, next_bb;
5390 find_unreachable_blocks ();
5392 /* Delete all unreachable basic blocks. */
5394 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5395 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5397 next_bb = b->next_bb;
5399 if (!(b->flags & BB_REACHABLE))
5401 gimple_stmt_iterator bsi;
5403 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5405 struct cgraph_edge *e;
5406 struct cgraph_node *node;
5408 id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5410 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5411 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5413 if (!e->inline_failed)
5414 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5415 else
5416 e->remove ();
5418 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5419 && id->dst_node->clones)
5420 for (node = id->dst_node->clones; node != id->dst_node;)
5422 node->remove_stmt_references (gsi_stmt (bsi));
5423 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5424 && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5426 if (!e->inline_failed)
5427 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5428 else
5429 e->remove ();
5432 if (node->clones)
5433 node = node->clones;
5434 else if (node->next_sibling_clone)
5435 node = node->next_sibling_clone;
5436 else
5438 while (node != id->dst_node && !node->next_sibling_clone)
5439 node = node->clone_of;
5440 if (node != id->dst_node)
5441 node = node->next_sibling_clone;
5445 delete_basic_block (b);
5446 changed = true;
5450 return changed;
5453 /* Update clone info after duplication. */
5455 static void
5456 update_clone_info (copy_body_data * id)
5458 struct cgraph_node *node;
5459 if (!id->dst_node->clones)
5460 return;
5461 for (node = id->dst_node->clones; node != id->dst_node;)
5463 /* First update replace maps to match the new body. */
5464 if (node->clone.tree_map)
5466 unsigned int i;
5467 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5469 struct ipa_replace_map *replace_info;
5470 replace_info = (*node->clone.tree_map)[i];
5471 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5472 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5475 if (node->clones)
5476 node = node->clones;
5477 else if (node->next_sibling_clone)
5478 node = node->next_sibling_clone;
5479 else
5481 while (node != id->dst_node && !node->next_sibling_clone)
5482 node = node->clone_of;
5483 if (node != id->dst_node)
5484 node = node->next_sibling_clone;
5489 /* Create a copy of a function's tree.
5490 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5491 of the original function and the new copied function
5492 respectively. In case we want to replace a DECL
5493 tree with another tree while duplicating the function's
5494 body, TREE_MAP represents the mapping between these
5495 trees. If UPDATE_CLONES is set, the call_stmt fields
5496 of edges of clones of the function will be updated.
5498 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5499 from new version.
5500 If SKIP_RETURN is true, the new version will return void.
5501 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5502 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5504 void
5505 tree_function_versioning (tree old_decl, tree new_decl,
5506 vec<ipa_replace_map *, va_gc> *tree_map,
5507 bool update_clones, bitmap args_to_skip,
5508 bool skip_return, bitmap blocks_to_copy,
5509 basic_block new_entry)
5511 struct cgraph_node *old_version_node;
5512 struct cgraph_node *new_version_node;
5513 copy_body_data id;
5514 tree p;
5515 unsigned i;
5516 struct ipa_replace_map *replace_info;
5517 basic_block old_entry_block, bb;
5518 auto_vec<gimple, 10> init_stmts;
5519 tree vars = NULL_TREE;
5521 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5522 && TREE_CODE (new_decl) == FUNCTION_DECL);
5523 DECL_POSSIBLY_INLINED (old_decl) = 1;
5525 old_version_node = cgraph_node::get (old_decl);
5526 gcc_checking_assert (old_version_node);
5527 new_version_node = cgraph_node::get (new_decl);
5528 gcc_checking_assert (new_version_node);
5530 /* Copy over debug args. */
5531 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5533 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5534 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5535 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5536 old_debug_args = decl_debug_args_lookup (old_decl);
5537 if (old_debug_args)
5539 new_debug_args = decl_debug_args_insert (new_decl);
5540 *new_debug_args = vec_safe_copy (*old_debug_args);
5544 /* Output the inlining info for this abstract function, since it has been
5545 inlined. If we don't do this now, we can lose the information about the
5546 variables in the function when the blocks get blown away as soon as we
5547 remove the cgraph node. */
5548 (*debug_hooks->outlining_inline_function) (old_decl);
5550 DECL_ARTIFICIAL (new_decl) = 1;
5551 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5552 if (DECL_ORIGIN (old_decl) == old_decl)
5553 old_version_node->used_as_abstract_origin = true;
5554 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5556 /* Prepare the data structures for the tree copy. */
5557 memset (&id, 0, sizeof (id));
5559 /* Generate a new name for the new version. */
5560 id.statements_to_fold = new hash_set<gimple>;
5562 id.decl_map = new hash_map<tree, tree>;
5563 id.debug_map = NULL;
5564 id.src_fn = old_decl;
5565 id.dst_fn = new_decl;
5566 id.src_node = old_version_node;
5567 id.dst_node = new_version_node;
5568 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5569 id.blocks_to_copy = blocks_to_copy;
5571 id.copy_decl = copy_decl_no_change;
5572 id.transform_call_graph_edges
5573 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5574 id.transform_new_cfg = true;
5575 id.transform_return_to_modify = false;
5576 id.transform_parameter = false;
5577 id.transform_lang_insert_block = NULL;
5579 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5580 (DECL_STRUCT_FUNCTION (old_decl));
5581 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5582 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5583 initialize_cfun (new_decl, old_decl,
5584 old_entry_block->count);
5585 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5586 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5587 = id.src_cfun->gimple_df->ipa_pta;
5589 /* Copy the function's static chain. */
5590 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5591 if (p)
5592 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5593 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5594 &id);
5596 /* If there's a tree_map, prepare for substitution. */
5597 if (tree_map)
5598 for (i = 0; i < tree_map->length (); i++)
5600 gimple init;
5601 replace_info = (*tree_map)[i];
5602 if (replace_info->replace_p)
5604 if (!replace_info->old_tree)
5606 int i = replace_info->parm_num;
5607 tree parm;
5608 tree req_type;
5610 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5611 i --;
5612 replace_info->old_tree = parm;
5613 req_type = TREE_TYPE (parm);
5614 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5616 if (fold_convertible_p (req_type, replace_info->new_tree))
5617 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5618 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5619 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5620 else
5622 if (dump_file)
5624 fprintf (dump_file, " const ");
5625 print_generic_expr (dump_file, replace_info->new_tree, 0);
5626 fprintf (dump_file, " can't be converted to param ");
5627 print_generic_expr (dump_file, parm, 0);
5628 fprintf (dump_file, "\n");
5630 replace_info->old_tree = NULL;
5634 else
5635 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5636 if (replace_info->old_tree)
5638 init = setup_one_parameter (&id, replace_info->old_tree,
5639 replace_info->new_tree, id.src_fn,
5640 NULL,
5641 &vars);
5642 if (init)
5643 init_stmts.safe_push (init);
5647 /* Copy the function's arguments. */
5648 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5649 DECL_ARGUMENTS (new_decl) =
5650 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5651 args_to_skip, &vars);
5653 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5654 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5656 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5658 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5659 /* Add local vars. */
5660 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5662 if (DECL_RESULT (old_decl) == NULL_TREE)
5664 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5666 DECL_RESULT (new_decl)
5667 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5668 RESULT_DECL, NULL_TREE, void_type_node);
5669 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5670 cfun->returns_struct = 0;
5671 cfun->returns_pcc_struct = 0;
5673 else
5675 tree old_name;
5676 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5677 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5678 if (gimple_in_ssa_p (id.src_cfun)
5679 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5680 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5682 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5683 insert_decl_map (&id, old_name, new_name);
5684 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5685 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5689 /* Set up the destination functions loop tree. */
5690 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5692 cfun->curr_properties &= ~PROP_loops;
5693 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5694 cfun->curr_properties |= PROP_loops;
5697 /* Copy the Function's body. */
5698 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5699 ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5700 new_entry);
5702 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5703 number_blocks (new_decl);
5705 /* We want to create the BB unconditionally, so that the addition of
5706 debug stmts doesn't affect BB count, which may in the end cause
5707 codegen differences. */
5708 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5709 while (init_stmts.length ())
5710 insert_init_stmt (&id, bb, init_stmts.pop ());
5711 update_clone_info (&id);
5713 /* Remap the nonlocal_goto_save_area, if any. */
5714 if (cfun->nonlocal_goto_save_area)
5716 struct walk_stmt_info wi;
5718 memset (&wi, 0, sizeof (wi));
5719 wi.info = &id;
5720 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5723 /* Clean up. */
5724 delete id.decl_map;
5725 if (id.debug_map)
5726 delete id.debug_map;
5727 free_dominance_info (CDI_DOMINATORS);
5728 free_dominance_info (CDI_POST_DOMINATORS);
5730 fold_marked_statements (0, id.statements_to_fold);
5731 delete id.statements_to_fold;
5732 fold_cond_expr_cond ();
5733 delete_unreachable_blocks_update_callgraph (&id);
5734 if (id.dst_node->definition)
5735 cgraph_edge::rebuild_references ();
5736 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
5738 calculate_dominance_info (CDI_DOMINATORS);
5739 fix_loop_structure (NULL);
5741 update_ssa (TODO_update_ssa);
5743 /* After partial cloning we need to rescale frequencies, so they are
5744 within proper range in the cloned function. */
5745 if (new_entry)
5747 struct cgraph_edge *e;
5748 rebuild_frequencies ();
5750 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5751 for (e = new_version_node->callees; e; e = e->next_callee)
5753 basic_block bb = gimple_bb (e->call_stmt);
5754 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5755 bb);
5756 e->count = bb->count;
5758 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5760 basic_block bb = gimple_bb (e->call_stmt);
5761 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5762 bb);
5763 e->count = bb->count;
5767 free_dominance_info (CDI_DOMINATORS);
5768 free_dominance_info (CDI_POST_DOMINATORS);
5770 gcc_assert (!id.debug_stmts.exists ());
5771 pop_cfun ();
5772 return;
5775 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5776 the callee and return the inlined body on success. */
5778 tree
5779 maybe_inline_call_in_expr (tree exp)
5781 tree fn = get_callee_fndecl (exp);
5783 /* We can only try to inline "const" functions. */
5784 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5786 call_expr_arg_iterator iter;
5787 copy_body_data id;
5788 tree param, arg, t;
5789 hash_map<tree, tree> decl_map;
5791 /* Remap the parameters. */
5792 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5793 param;
5794 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5795 decl_map.put (param, arg);
5797 memset (&id, 0, sizeof (id));
5798 id.src_fn = fn;
5799 id.dst_fn = current_function_decl;
5800 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5801 id.decl_map = &decl_map;
5803 id.copy_decl = copy_decl_no_change;
5804 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5805 id.transform_new_cfg = false;
5806 id.transform_return_to_modify = true;
5807 id.transform_parameter = true;
5808 id.transform_lang_insert_block = NULL;
5810 /* Make sure not to unshare trees behind the front-end's back
5811 since front-end specific mechanisms may rely on sharing. */
5812 id.regimplify = false;
5813 id.do_not_unshare = true;
5815 /* We're not inside any EH region. */
5816 id.eh_lp_nr = 0;
5818 t = copy_tree_body (&id);
5820 /* We can only return something suitable for use in a GENERIC
5821 expression tree. */
5822 if (TREE_CODE (t) == MODIFY_EXPR)
5823 return TREE_OPERAND (t, 1);
5826 return NULL_TREE;
5829 /* Duplicate a type, fields and all. */
5831 tree
5832 build_duplicate_type (tree type)
5834 struct copy_body_data id;
5836 memset (&id, 0, sizeof (id));
5837 id.src_fn = current_function_decl;
5838 id.dst_fn = current_function_decl;
5839 id.src_cfun = cfun;
5840 id.decl_map = new hash_map<tree, tree>;
5841 id.debug_map = NULL;
5842 id.copy_decl = copy_decl_no_change;
5844 type = remap_type_1 (type, &id);
5846 delete id.decl_map;
5847 if (id.debug_map)
5848 delete id.debug_map;
5850 TYPE_CANONICAL (type) = type;
5852 return type;
5855 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
5856 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
5857 evaluation. */
5859 tree
5860 copy_fn (tree fn, tree& parms, tree& result)
5862 copy_body_data id;
5863 tree param;
5864 hash_map<tree, tree> decl_map;
5866 tree *p = &parms;
5867 *p = NULL_TREE;
5869 memset (&id, 0, sizeof (id));
5870 id.src_fn = fn;
5871 id.dst_fn = current_function_decl;
5872 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5873 id.decl_map = &decl_map;
5875 id.copy_decl = copy_decl_no_change;
5876 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5877 id.transform_new_cfg = false;
5878 id.transform_return_to_modify = false;
5879 id.transform_parameter = true;
5880 id.transform_lang_insert_block = NULL;
5882 /* Make sure not to unshare trees behind the front-end's back
5883 since front-end specific mechanisms may rely on sharing. */
5884 id.regimplify = false;
5885 id.do_not_unshare = true;
5887 /* We're not inside any EH region. */
5888 id.eh_lp_nr = 0;
5890 /* Remap the parameters and result and return them to the caller. */
5891 for (param = DECL_ARGUMENTS (fn);
5892 param;
5893 param = DECL_CHAIN (param))
5895 *p = remap_decl (param, &id);
5896 p = &DECL_CHAIN (*p);
5899 if (DECL_RESULT (fn))
5900 result = remap_decl (DECL_RESULT (fn), &id);
5901 else
5902 result = NULL_TREE;
5904 return copy_tree_body (&id);