Implement TARGET_IRA_CHANGE_PSEUDO_ALLOCNO_CLASS hook.
[official-gcc.git] / gcc / tree-inline.c
blob1fa5dabb56a5279c338884ea90f92ee5b17c98f5
1 /* Tree inlining.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "input.h"
27 #include "alias.h"
28 #include "symtab.h"
29 #include "tree.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "calls.h"
33 #include "tree-inline.h"
34 #include "flags.h"
35 #include "params.h"
36 #include "insn-config.h"
37 #include "langhooks.h"
38 #include "predict.h"
39 #include "hard-reg-set.h"
40 #include "function.h"
41 #include "dominance.h"
42 #include "cfg.h"
43 #include "cfganal.h"
44 #include "basic-block.h"
45 #include "tree-iterator.h"
46 #include "intl.h"
47 #include "tree-ssa-alias.h"
48 #include "internal-fn.h"
49 #include "gimple-fold.h"
50 #include "tree-eh.h"
51 #include "gimple-expr.h"
52 #include "is-a.h"
53 #include "gimple.h"
54 #include "gimplify.h"
55 #include "gimple-iterator.h"
56 #include "gimplify-me.h"
57 #include "gimple-walk.h"
58 #include "gimple-ssa.h"
59 #include "tree-cfg.h"
60 #include "tree-phinodes.h"
61 #include "ssa-iterators.h"
62 #include "stringpool.h"
63 #include "tree-ssanames.h"
64 #include "tree-into-ssa.h"
65 #include "rtl.h"
66 #include "expmed.h"
67 #include "dojump.h"
68 #include "explow.h"
69 #include "emit-rtl.h"
70 #include "varasm.h"
71 #include "stmt.h"
72 #include "expr.h"
73 #include "tree-dfa.h"
74 #include "tree-ssa.h"
75 #include "tree-pretty-print.h"
76 #include "except.h"
77 #include "debug.h"
78 #include "plugin-api.h"
79 #include "ipa-ref.h"
80 #include "cgraph.h"
81 #include "alloc-pool.h"
82 #include "symbol-summary.h"
83 #include "ipa-prop.h"
84 #include "value-prof.h"
85 #include "tree-pass.h"
86 #include "target.h"
87 #include "cfgloop.h"
88 #include "builtins.h"
89 #include "tree-chkp.h"
91 #include "rtl.h" /* FIXME: For asm_str_count. */
93 /* I'm not real happy about this, but we need to handle gimple and
94 non-gimple trees. */
96 /* Inlining, Cloning, Versioning, Parallelization
98 Inlining: a function body is duplicated, but the PARM_DECLs are
99 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
100 MODIFY_EXPRs that store to a dedicated returned-value variable.
101 The duplicated eh_region info of the copy will later be appended
102 to the info for the caller; the eh_region info in copied throwing
103 statements and RESX statements are adjusted accordingly.
105 Cloning: (only in C++) We have one body for a con/de/structor, and
106 multiple function decls, each with a unique parameter list.
107 Duplicate the body, using the given splay tree; some parameters
108 will become constants (like 0 or 1).
110 Versioning: a function body is duplicated and the result is a new
111 function rather than into blocks of an existing function as with
112 inlining. Some parameters will become constants.
114 Parallelization: a region of a function is duplicated resulting in
115 a new function. Variables may be replaced with complex expressions
116 to enable shared variable semantics.
118 All of these will simultaneously lookup any callgraph edges. If
119 we're going to inline the duplicated function body, and the given
120 function has some cloned callgraph nodes (one for each place this
121 function will be inlined) those callgraph edges will be duplicated.
122 If we're cloning the body, those callgraph edges will be
123 updated to point into the new body. (Note that the original
124 callgraph node and edge list will not be altered.)
126 See the CALL_EXPR handling case in copy_tree_body_r (). */
128 /* To Do:
130 o In order to make inlining-on-trees work, we pessimized
131 function-local static constants. In particular, they are now
132 always output, even when not addressed. Fix this by treating
133 function-local static constants just like global static
134 constants; the back-end already knows not to output them if they
135 are not needed.
137 o Provide heuristics to clamp inlining of recursive template
138 calls? */
141 /* Weights that estimate_num_insns uses to estimate the size of the
142 produced code. */
144 eni_weights eni_size_weights;
146 /* Weights that estimate_num_insns uses to estimate the time necessary
147 to execute the produced code. */
149 eni_weights eni_time_weights;
151 /* Prototypes. */
153 static tree declare_return_variable (copy_body_data *, tree, tree, tree,
154 basic_block);
155 static void remap_block (tree *, copy_body_data *);
156 static void copy_bind_expr (tree *, int *, copy_body_data *);
157 static void declare_inline_vars (tree, tree);
158 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
159 static void prepend_lexical_block (tree current_block, tree new_block);
160 static tree copy_decl_to_var (tree, copy_body_data *);
161 static tree copy_result_decl_to_var (tree, copy_body_data *);
162 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
163 static gimple_seq remap_gimple_stmt (gimple, copy_body_data *);
164 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
165 static void insert_init_stmt (copy_body_data *, basic_block, gimple);
167 /* Insert a tree->tree mapping for ID. Despite the name suggests
168 that the trees should be variables, it is used for more than that. */
170 void
171 insert_decl_map (copy_body_data *id, tree key, tree value)
173 id->decl_map->put (key, value);
175 /* Always insert an identity map as well. If we see this same new
176 node again, we won't want to duplicate it a second time. */
177 if (key != value)
178 id->decl_map->put (value, value);
181 /* Insert a tree->tree mapping for ID. This is only used for
182 variables. */
184 static void
185 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
187 if (!gimple_in_ssa_p (id->src_cfun))
188 return;
190 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
191 return;
193 if (!target_for_debug_bind (key))
194 return;
196 gcc_assert (TREE_CODE (key) == PARM_DECL);
197 gcc_assert (TREE_CODE (value) == VAR_DECL);
199 if (!id->debug_map)
200 id->debug_map = new hash_map<tree, tree>;
202 id->debug_map->put (key, value);
205 /* If nonzero, we're remapping the contents of inlined debug
206 statements. If negative, an error has occurred, such as a
207 reference to a variable that isn't available in the inlined
208 context. */
209 static int processing_debug_stmt = 0;
211 /* Construct new SSA name for old NAME. ID is the inline context. */
213 static tree
214 remap_ssa_name (tree name, copy_body_data *id)
216 tree new_tree, var;
217 tree *n;
219 gcc_assert (TREE_CODE (name) == SSA_NAME);
221 n = id->decl_map->get (name);
222 if (n)
223 return unshare_expr (*n);
225 if (processing_debug_stmt)
227 if (SSA_NAME_IS_DEFAULT_DEF (name)
228 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
229 && id->entry_bb == NULL
230 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
232 tree vexpr = make_node (DEBUG_EXPR_DECL);
233 gimple def_temp;
234 gimple_stmt_iterator gsi;
235 tree val = SSA_NAME_VAR (name);
237 n = id->decl_map->get (val);
238 if (n != NULL)
239 val = *n;
240 if (TREE_CODE (val) != PARM_DECL)
242 processing_debug_stmt = -1;
243 return name;
245 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
246 DECL_ARTIFICIAL (vexpr) = 1;
247 TREE_TYPE (vexpr) = TREE_TYPE (name);
248 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
249 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
250 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
251 return vexpr;
254 processing_debug_stmt = -1;
255 return name;
258 /* Remap anonymous SSA names or SSA names of anonymous decls. */
259 var = SSA_NAME_VAR (name);
260 if (!var
261 || (!SSA_NAME_IS_DEFAULT_DEF (name)
262 && TREE_CODE (var) == VAR_DECL
263 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
264 && DECL_ARTIFICIAL (var)
265 && DECL_IGNORED_P (var)
266 && !DECL_NAME (var)))
268 struct ptr_info_def *pi;
269 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
270 if (!var && SSA_NAME_IDENTIFIER (name))
271 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
272 insert_decl_map (id, name, new_tree);
273 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
274 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
275 /* At least IPA points-to info can be directly transferred. */
276 if (id->src_cfun->gimple_df
277 && id->src_cfun->gimple_df->ipa_pta
278 && (pi = SSA_NAME_PTR_INFO (name))
279 && !pi->pt.anything)
281 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
282 new_pi->pt = pi->pt;
284 return new_tree;
287 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
288 in copy_bb. */
289 new_tree = remap_decl (var, id);
291 /* We might've substituted constant or another SSA_NAME for
292 the variable.
294 Replace the SSA name representing RESULT_DECL by variable during
295 inlining: this saves us from need to introduce PHI node in a case
296 return value is just partly initialized. */
297 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
298 && (!SSA_NAME_VAR (name)
299 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
300 || !id->transform_return_to_modify))
302 struct ptr_info_def *pi;
303 new_tree = make_ssa_name (new_tree);
304 insert_decl_map (id, name, new_tree);
305 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
306 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
307 /* At least IPA points-to info can be directly transferred. */
308 if (id->src_cfun->gimple_df
309 && id->src_cfun->gimple_df->ipa_pta
310 && (pi = SSA_NAME_PTR_INFO (name))
311 && !pi->pt.anything)
313 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
314 new_pi->pt = pi->pt;
316 if (SSA_NAME_IS_DEFAULT_DEF (name))
318 /* By inlining function having uninitialized variable, we might
319 extend the lifetime (variable might get reused). This cause
320 ICE in the case we end up extending lifetime of SSA name across
321 abnormal edge, but also increase register pressure.
323 We simply initialize all uninitialized vars by 0 except
324 for case we are inlining to very first BB. We can avoid
325 this for all BBs that are not inside strongly connected
326 regions of the CFG, but this is expensive to test. */
327 if (id->entry_bb
328 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
329 && (!SSA_NAME_VAR (name)
330 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
331 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
332 0)->dest
333 || EDGE_COUNT (id->entry_bb->preds) != 1))
335 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
336 gimple init_stmt;
337 tree zero = build_zero_cst (TREE_TYPE (new_tree));
339 init_stmt = gimple_build_assign (new_tree, zero);
340 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
341 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
343 else
345 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
346 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
350 else
351 insert_decl_map (id, name, new_tree);
352 return new_tree;
355 /* Remap DECL during the copying of the BLOCK tree for the function. */
357 tree
358 remap_decl (tree decl, copy_body_data *id)
360 tree *n;
362 /* We only remap local variables in the current function. */
364 /* See if we have remapped this declaration. */
366 n = id->decl_map->get (decl);
368 if (!n && processing_debug_stmt)
370 processing_debug_stmt = -1;
371 return decl;
374 /* If we didn't already have an equivalent for this declaration,
375 create one now. */
376 if (!n)
378 /* Make a copy of the variable or label. */
379 tree t = id->copy_decl (decl, id);
381 /* Remember it, so that if we encounter this local entity again
382 we can reuse this copy. Do this early because remap_type may
383 need this decl for TYPE_STUB_DECL. */
384 insert_decl_map (id, decl, t);
386 if (!DECL_P (t))
387 return t;
389 /* Remap types, if necessary. */
390 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
391 if (TREE_CODE (t) == TYPE_DECL)
392 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
394 /* Remap sizes as necessary. */
395 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
396 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
398 /* If fields, do likewise for offset and qualifier. */
399 if (TREE_CODE (t) == FIELD_DECL)
401 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
402 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
403 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
406 return t;
409 if (id->do_not_unshare)
410 return *n;
411 else
412 return unshare_expr (*n);
415 static tree
416 remap_type_1 (tree type, copy_body_data *id)
418 tree new_tree, t;
420 /* We do need a copy. build and register it now. If this is a pointer or
421 reference type, remap the designated type and make a new pointer or
422 reference type. */
423 if (TREE_CODE (type) == POINTER_TYPE)
425 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
426 TYPE_MODE (type),
427 TYPE_REF_CAN_ALIAS_ALL (type));
428 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
429 new_tree = build_type_attribute_qual_variant (new_tree,
430 TYPE_ATTRIBUTES (type),
431 TYPE_QUALS (type));
432 insert_decl_map (id, type, new_tree);
433 return new_tree;
435 else if (TREE_CODE (type) == REFERENCE_TYPE)
437 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
438 TYPE_MODE (type),
439 TYPE_REF_CAN_ALIAS_ALL (type));
440 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
441 new_tree = build_type_attribute_qual_variant (new_tree,
442 TYPE_ATTRIBUTES (type),
443 TYPE_QUALS (type));
444 insert_decl_map (id, type, new_tree);
445 return new_tree;
447 else
448 new_tree = copy_node (type);
450 insert_decl_map (id, type, new_tree);
452 /* This is a new type, not a copy of an old type. Need to reassociate
453 variants. We can handle everything except the main variant lazily. */
454 t = TYPE_MAIN_VARIANT (type);
455 if (type != t)
457 t = remap_type (t, id);
458 TYPE_MAIN_VARIANT (new_tree) = t;
459 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
460 TYPE_NEXT_VARIANT (t) = new_tree;
462 else
464 TYPE_MAIN_VARIANT (new_tree) = new_tree;
465 TYPE_NEXT_VARIANT (new_tree) = NULL;
468 if (TYPE_STUB_DECL (type))
469 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
471 /* Lazily create pointer and reference types. */
472 TYPE_POINTER_TO (new_tree) = NULL;
473 TYPE_REFERENCE_TO (new_tree) = NULL;
475 /* Copy all types that may contain references to local variables; be sure to
476 preserve sharing in between type and its main variant when possible. */
477 switch (TREE_CODE (new_tree))
479 case INTEGER_TYPE:
480 case REAL_TYPE:
481 case FIXED_POINT_TYPE:
482 case ENUMERAL_TYPE:
483 case BOOLEAN_TYPE:
484 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
486 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
487 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
489 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
490 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
492 else
494 t = TYPE_MIN_VALUE (new_tree);
495 if (t && TREE_CODE (t) != INTEGER_CST)
496 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
498 t = TYPE_MAX_VALUE (new_tree);
499 if (t && TREE_CODE (t) != INTEGER_CST)
500 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
502 return new_tree;
504 case FUNCTION_TYPE:
505 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
506 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
507 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
508 else
509 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
510 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
511 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
512 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
513 else
514 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
515 return new_tree;
517 case ARRAY_TYPE:
518 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
519 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
520 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
521 else
522 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
524 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
526 gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
527 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
529 else
530 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
531 break;
533 case RECORD_TYPE:
534 case UNION_TYPE:
535 case QUAL_UNION_TYPE:
536 if (TYPE_MAIN_VARIANT (type) != type
537 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
538 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
539 else
541 tree f, nf = NULL;
543 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
545 t = remap_decl (f, id);
546 DECL_CONTEXT (t) = new_tree;
547 DECL_CHAIN (t) = nf;
548 nf = t;
550 TYPE_FIELDS (new_tree) = nreverse (nf);
552 break;
554 case OFFSET_TYPE:
555 default:
556 /* Shouldn't have been thought variable sized. */
557 gcc_unreachable ();
560 /* All variants of type share the same size, so use the already remaped data. */
561 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
563 gcc_checking_assert (TYPE_SIZE (type) == TYPE_SIZE (TYPE_MAIN_VARIANT (type)));
564 gcc_checking_assert (TYPE_SIZE_UNIT (type) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type)));
566 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
567 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
569 else
571 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
572 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
575 return new_tree;
578 tree
579 remap_type (tree type, copy_body_data *id)
581 tree *node;
582 tree tmp;
584 if (type == NULL)
585 return type;
587 /* See if we have remapped this type. */
588 node = id->decl_map->get (type);
589 if (node)
590 return *node;
592 /* The type only needs remapping if it's variably modified. */
593 if (! variably_modified_type_p (type, id->src_fn))
595 insert_decl_map (id, type, type);
596 return type;
599 id->remapping_type_depth++;
600 tmp = remap_type_1 (type, id);
601 id->remapping_type_depth--;
603 return tmp;
606 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
608 static bool
609 can_be_nonlocal (tree decl, copy_body_data *id)
611 /* We can not duplicate function decls. */
612 if (TREE_CODE (decl) == FUNCTION_DECL)
613 return true;
615 /* Local static vars must be non-local or we get multiple declaration
616 problems. */
617 if (TREE_CODE (decl) == VAR_DECL
618 && !auto_var_in_fn_p (decl, id->src_fn))
619 return true;
621 return false;
624 static tree
625 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
626 copy_body_data *id)
628 tree old_var;
629 tree new_decls = NULL_TREE;
631 /* Remap its variables. */
632 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
634 tree new_var;
636 if (can_be_nonlocal (old_var, id))
638 /* We need to add this variable to the local decls as otherwise
639 nothing else will do so. */
640 if (TREE_CODE (old_var) == VAR_DECL
641 && ! DECL_EXTERNAL (old_var))
642 add_local_decl (cfun, old_var);
643 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
644 && !DECL_IGNORED_P (old_var)
645 && nonlocalized_list)
646 vec_safe_push (*nonlocalized_list, old_var);
647 continue;
650 /* Remap the variable. */
651 new_var = remap_decl (old_var, id);
653 /* If we didn't remap this variable, we can't mess with its
654 TREE_CHAIN. If we remapped this variable to the return slot, it's
655 already declared somewhere else, so don't declare it here. */
657 if (new_var == id->retvar)
659 else if (!new_var)
661 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
662 && !DECL_IGNORED_P (old_var)
663 && nonlocalized_list)
664 vec_safe_push (*nonlocalized_list, old_var);
666 else
668 gcc_assert (DECL_P (new_var));
669 DECL_CHAIN (new_var) = new_decls;
670 new_decls = new_var;
672 /* Also copy value-expressions. */
673 if (TREE_CODE (new_var) == VAR_DECL
674 && DECL_HAS_VALUE_EXPR_P (new_var))
676 tree tem = DECL_VALUE_EXPR (new_var);
677 bool old_regimplify = id->regimplify;
678 id->remapping_type_depth++;
679 walk_tree (&tem, copy_tree_body_r, id, NULL);
680 id->remapping_type_depth--;
681 id->regimplify = old_regimplify;
682 SET_DECL_VALUE_EXPR (new_var, tem);
687 return nreverse (new_decls);
690 /* Copy the BLOCK to contain remapped versions of the variables
691 therein. And hook the new block into the block-tree. */
693 static void
694 remap_block (tree *block, copy_body_data *id)
696 tree old_block;
697 tree new_block;
699 /* Make the new block. */
700 old_block = *block;
701 new_block = make_node (BLOCK);
702 TREE_USED (new_block) = TREE_USED (old_block);
703 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
704 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
705 BLOCK_NONLOCALIZED_VARS (new_block)
706 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
707 *block = new_block;
709 /* Remap its variables. */
710 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
711 &BLOCK_NONLOCALIZED_VARS (new_block),
712 id);
714 if (id->transform_lang_insert_block)
715 id->transform_lang_insert_block (new_block);
717 /* Remember the remapped block. */
718 insert_decl_map (id, old_block, new_block);
721 /* Copy the whole block tree and root it in id->block. */
722 static tree
723 remap_blocks (tree block, copy_body_data *id)
725 tree t;
726 tree new_tree = block;
728 if (!block)
729 return NULL;
731 remap_block (&new_tree, id);
732 gcc_assert (new_tree != block);
733 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
734 prepend_lexical_block (new_tree, remap_blocks (t, id));
735 /* Blocks are in arbitrary order, but make things slightly prettier and do
736 not swap order when producing a copy. */
737 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
738 return new_tree;
741 /* Remap the block tree rooted at BLOCK to nothing. */
742 static void
743 remap_blocks_to_null (tree block, copy_body_data *id)
745 tree t;
746 insert_decl_map (id, block, NULL_TREE);
747 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
748 remap_blocks_to_null (t, id);
751 static void
752 copy_statement_list (tree *tp)
754 tree_stmt_iterator oi, ni;
755 tree new_tree;
757 new_tree = alloc_stmt_list ();
758 ni = tsi_start (new_tree);
759 oi = tsi_start (*tp);
760 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
761 *tp = new_tree;
763 for (; !tsi_end_p (oi); tsi_next (&oi))
765 tree stmt = tsi_stmt (oi);
766 if (TREE_CODE (stmt) == STATEMENT_LIST)
767 /* This copy is not redundant; tsi_link_after will smash this
768 STATEMENT_LIST into the end of the one we're building, and we
769 don't want to do that with the original. */
770 copy_statement_list (&stmt);
771 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
775 static void
776 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
778 tree block = BIND_EXPR_BLOCK (*tp);
779 /* Copy (and replace) the statement. */
780 copy_tree_r (tp, walk_subtrees, NULL);
781 if (block)
783 remap_block (&block, id);
784 BIND_EXPR_BLOCK (*tp) = block;
787 if (BIND_EXPR_VARS (*tp))
788 /* This will remap a lot of the same decls again, but this should be
789 harmless. */
790 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
794 /* Create a new gimple_seq by remapping all the statements in BODY
795 using the inlining information in ID. */
797 static gimple_seq
798 remap_gimple_seq (gimple_seq body, copy_body_data *id)
800 gimple_stmt_iterator si;
801 gimple_seq new_body = NULL;
803 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
805 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
806 gimple_seq_add_seq (&new_body, new_stmts);
809 return new_body;
813 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
814 block using the mapping information in ID. */
816 static gimple
817 copy_gimple_bind (gbind *stmt, copy_body_data *id)
819 gimple new_bind;
820 tree new_block, new_vars;
821 gimple_seq body, new_body;
823 /* Copy the statement. Note that we purposely don't use copy_stmt
824 here because we need to remap statements as we copy. */
825 body = gimple_bind_body (stmt);
826 new_body = remap_gimple_seq (body, id);
828 new_block = gimple_bind_block (stmt);
829 if (new_block)
830 remap_block (&new_block, id);
832 /* This will remap a lot of the same decls again, but this should be
833 harmless. */
834 new_vars = gimple_bind_vars (stmt);
835 if (new_vars)
836 new_vars = remap_decls (new_vars, NULL, id);
838 new_bind = gimple_build_bind (new_vars, new_body, new_block);
840 return new_bind;
843 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
845 static bool
846 is_parm (tree decl)
848 if (TREE_CODE (decl) == SSA_NAME)
850 decl = SSA_NAME_VAR (decl);
851 if (!decl)
852 return false;
855 return (TREE_CODE (decl) == PARM_DECL);
858 /* Remap the dependence CLIQUE from the source to the destination function
859 as specified in ID. */
861 static unsigned short
862 remap_dependence_clique (copy_body_data *id, unsigned short clique)
864 if (clique == 0)
865 return 0;
866 if (!id->dependence_map)
867 id->dependence_map
868 = new hash_map<unsigned short, unsigned short, dependence_hasher>;
869 bool existed;
870 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
871 if (!existed)
872 newc = ++cfun->last_clique;
873 return newc;
876 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
877 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
878 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
879 recursing into the children nodes of *TP. */
881 static tree
882 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
884 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
885 copy_body_data *id = (copy_body_data *) wi_p->info;
886 tree fn = id->src_fn;
888 if (TREE_CODE (*tp) == SSA_NAME)
890 *tp = remap_ssa_name (*tp, id);
891 *walk_subtrees = 0;
892 return NULL;
894 else if (auto_var_in_fn_p (*tp, fn))
896 /* Local variables and labels need to be replaced by equivalent
897 variables. We don't want to copy static variables; there's
898 only one of those, no matter how many times we inline the
899 containing function. Similarly for globals from an outer
900 function. */
901 tree new_decl;
903 /* Remap the declaration. */
904 new_decl = remap_decl (*tp, id);
905 gcc_assert (new_decl);
906 /* Replace this variable with the copy. */
907 STRIP_TYPE_NOPS (new_decl);
908 /* ??? The C++ frontend uses void * pointer zero to initialize
909 any other type. This confuses the middle-end type verification.
910 As cloned bodies do not go through gimplification again the fixup
911 there doesn't trigger. */
912 if (TREE_CODE (new_decl) == INTEGER_CST
913 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
914 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
915 *tp = new_decl;
916 *walk_subtrees = 0;
918 else if (TREE_CODE (*tp) == STATEMENT_LIST)
919 gcc_unreachable ();
920 else if (TREE_CODE (*tp) == SAVE_EXPR)
921 gcc_unreachable ();
922 else if (TREE_CODE (*tp) == LABEL_DECL
923 && (!DECL_CONTEXT (*tp)
924 || decl_function_context (*tp) == id->src_fn))
925 /* These may need to be remapped for EH handling. */
926 *tp = remap_decl (*tp, id);
927 else if (TREE_CODE (*tp) == FIELD_DECL)
929 /* If the enclosing record type is variably_modified_type_p, the field
930 has already been remapped. Otherwise, it need not be. */
931 tree *n = id->decl_map->get (*tp);
932 if (n)
933 *tp = *n;
934 *walk_subtrees = 0;
936 else if (TYPE_P (*tp))
937 /* Types may need remapping as well. */
938 *tp = remap_type (*tp, id);
939 else if (CONSTANT_CLASS_P (*tp))
941 /* If this is a constant, we have to copy the node iff the type
942 will be remapped. copy_tree_r will not copy a constant. */
943 tree new_type = remap_type (TREE_TYPE (*tp), id);
945 if (new_type == TREE_TYPE (*tp))
946 *walk_subtrees = 0;
948 else if (TREE_CODE (*tp) == INTEGER_CST)
949 *tp = wide_int_to_tree (new_type, *tp);
950 else
952 *tp = copy_node (*tp);
953 TREE_TYPE (*tp) = new_type;
956 else
958 /* Otherwise, just copy the node. Note that copy_tree_r already
959 knows not to copy VAR_DECLs, etc., so this is safe. */
961 if (TREE_CODE (*tp) == MEM_REF)
963 /* We need to re-canonicalize MEM_REFs from inline substitutions
964 that can happen when a pointer argument is an ADDR_EXPR.
965 Recurse here manually to allow that. */
966 tree ptr = TREE_OPERAND (*tp, 0);
967 tree type = remap_type (TREE_TYPE (*tp), id);
968 tree old = *tp;
969 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
970 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
971 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
972 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
973 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
974 if (MR_DEPENDENCE_CLIQUE (old) != 0)
976 MR_DEPENDENCE_CLIQUE (*tp)
977 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
978 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
980 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
981 remapped a parameter as the property might be valid only
982 for the parameter itself. */
983 if (TREE_THIS_NOTRAP (old)
984 && (!is_parm (TREE_OPERAND (old, 0))
985 || (!id->transform_parameter && is_parm (ptr))))
986 TREE_THIS_NOTRAP (*tp) = 1;
987 *walk_subtrees = 0;
988 return NULL;
991 /* Here is the "usual case". Copy this tree node, and then
992 tweak some special cases. */
993 copy_tree_r (tp, walk_subtrees, NULL);
995 if (TREE_CODE (*tp) != OMP_CLAUSE)
996 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
998 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1000 /* The copied TARGET_EXPR has never been expanded, even if the
1001 original node was expanded already. */
1002 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1003 TREE_OPERAND (*tp, 3) = NULL_TREE;
1005 else if (TREE_CODE (*tp) == ADDR_EXPR)
1007 /* Variable substitution need not be simple. In particular,
1008 the MEM_REF substitution above. Make sure that
1009 TREE_CONSTANT and friends are up-to-date. */
1010 int invariant = is_gimple_min_invariant (*tp);
1011 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1012 recompute_tree_invariant_for_addr_expr (*tp);
1014 /* If this used to be invariant, but is not any longer,
1015 then regimplification is probably needed. */
1016 if (invariant && !is_gimple_min_invariant (*tp))
1017 id->regimplify = true;
1019 *walk_subtrees = 0;
1023 /* Update the TREE_BLOCK for the cloned expr. */
1024 if (EXPR_P (*tp))
1026 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1027 tree old_block = TREE_BLOCK (*tp);
1028 if (old_block)
1030 tree *n;
1031 n = id->decl_map->get (TREE_BLOCK (*tp));
1032 if (n)
1033 new_block = *n;
1035 TREE_SET_BLOCK (*tp, new_block);
1038 /* Keep iterating. */
1039 return NULL_TREE;
1043 /* Called from copy_body_id via walk_tree. DATA is really a
1044 `copy_body_data *'. */
1046 tree
1047 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1049 copy_body_data *id = (copy_body_data *) data;
1050 tree fn = id->src_fn;
1051 tree new_block;
1053 /* Begin by recognizing trees that we'll completely rewrite for the
1054 inlining context. Our output for these trees is completely
1055 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1056 into an edge). Further down, we'll handle trees that get
1057 duplicated and/or tweaked. */
1059 /* When requested, RETURN_EXPRs should be transformed to just the
1060 contained MODIFY_EXPR. The branch semantics of the return will
1061 be handled elsewhere by manipulating the CFG rather than a statement. */
1062 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1064 tree assignment = TREE_OPERAND (*tp, 0);
1066 /* If we're returning something, just turn that into an
1067 assignment into the equivalent of the original RESULT_DECL.
1068 If the "assignment" is just the result decl, the result
1069 decl has already been set (e.g. a recent "foo (&result_decl,
1070 ...)"); just toss the entire RETURN_EXPR. */
1071 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1073 /* Replace the RETURN_EXPR with (a copy of) the
1074 MODIFY_EXPR hanging underneath. */
1075 *tp = copy_node (assignment);
1077 else /* Else the RETURN_EXPR returns no value. */
1079 *tp = NULL;
1080 return (tree) (void *)1;
1083 else if (TREE_CODE (*tp) == SSA_NAME)
1085 *tp = remap_ssa_name (*tp, id);
1086 *walk_subtrees = 0;
1087 return NULL;
1090 /* Local variables and labels need to be replaced by equivalent
1091 variables. We don't want to copy static variables; there's only
1092 one of those, no matter how many times we inline the containing
1093 function. Similarly for globals from an outer function. */
1094 else if (auto_var_in_fn_p (*tp, fn))
1096 tree new_decl;
1098 /* Remap the declaration. */
1099 new_decl = remap_decl (*tp, id);
1100 gcc_assert (new_decl);
1101 /* Replace this variable with the copy. */
1102 STRIP_TYPE_NOPS (new_decl);
1103 *tp = new_decl;
1104 *walk_subtrees = 0;
1106 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1107 copy_statement_list (tp);
1108 else if (TREE_CODE (*tp) == SAVE_EXPR
1109 || TREE_CODE (*tp) == TARGET_EXPR)
1110 remap_save_expr (tp, id->decl_map, walk_subtrees);
1111 else if (TREE_CODE (*tp) == LABEL_DECL
1112 && (! DECL_CONTEXT (*tp)
1113 || decl_function_context (*tp) == id->src_fn))
1114 /* These may need to be remapped for EH handling. */
1115 *tp = remap_decl (*tp, id);
1116 else if (TREE_CODE (*tp) == BIND_EXPR)
1117 copy_bind_expr (tp, walk_subtrees, id);
1118 /* Types may need remapping as well. */
1119 else if (TYPE_P (*tp))
1120 *tp = remap_type (*tp, id);
1122 /* If this is a constant, we have to copy the node iff the type will be
1123 remapped. copy_tree_r will not copy a constant. */
1124 else if (CONSTANT_CLASS_P (*tp))
1126 tree new_type = remap_type (TREE_TYPE (*tp), id);
1128 if (new_type == TREE_TYPE (*tp))
1129 *walk_subtrees = 0;
1131 else if (TREE_CODE (*tp) == INTEGER_CST)
1132 *tp = wide_int_to_tree (new_type, *tp);
1133 else
1135 *tp = copy_node (*tp);
1136 TREE_TYPE (*tp) = new_type;
1140 /* Otherwise, just copy the node. Note that copy_tree_r already
1141 knows not to copy VAR_DECLs, etc., so this is safe. */
1142 else
1144 /* Here we handle trees that are not completely rewritten.
1145 First we detect some inlining-induced bogosities for
1146 discarding. */
1147 if (TREE_CODE (*tp) == MODIFY_EXPR
1148 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1149 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1151 /* Some assignments VAR = VAR; don't generate any rtl code
1152 and thus don't count as variable modification. Avoid
1153 keeping bogosities like 0 = 0. */
1154 tree decl = TREE_OPERAND (*tp, 0), value;
1155 tree *n;
1157 n = id->decl_map->get (decl);
1158 if (n)
1160 value = *n;
1161 STRIP_TYPE_NOPS (value);
1162 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1164 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1165 return copy_tree_body_r (tp, walk_subtrees, data);
1169 else if (TREE_CODE (*tp) == INDIRECT_REF)
1171 /* Get rid of *& from inline substitutions that can happen when a
1172 pointer argument is an ADDR_EXPR. */
1173 tree decl = TREE_OPERAND (*tp, 0);
1174 tree *n = id->decl_map->get (decl);
1175 if (n)
1177 /* If we happen to get an ADDR_EXPR in n->value, strip
1178 it manually here as we'll eventually get ADDR_EXPRs
1179 which lie about their types pointed to. In this case
1180 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1181 but we absolutely rely on that. As fold_indirect_ref
1182 does other useful transformations, try that first, though. */
1183 tree type = TREE_TYPE (*tp);
1184 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1185 tree old = *tp;
1186 *tp = gimple_fold_indirect_ref (ptr);
1187 if (! *tp)
1189 if (TREE_CODE (ptr) == ADDR_EXPR)
1192 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1193 /* ??? We should either assert here or build
1194 a VIEW_CONVERT_EXPR instead of blindly leaking
1195 incompatible types to our IL. */
1196 if (! *tp)
1197 *tp = TREE_OPERAND (ptr, 0);
1199 else
1201 *tp = build1 (INDIRECT_REF, type, ptr);
1202 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1203 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1204 TREE_READONLY (*tp) = TREE_READONLY (old);
1205 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1206 have remapped a parameter as the property might be
1207 valid only for the parameter itself. */
1208 if (TREE_THIS_NOTRAP (old)
1209 && (!is_parm (TREE_OPERAND (old, 0))
1210 || (!id->transform_parameter && is_parm (ptr))))
1211 TREE_THIS_NOTRAP (*tp) = 1;
1214 *walk_subtrees = 0;
1215 return NULL;
1218 else if (TREE_CODE (*tp) == MEM_REF)
1220 /* We need to re-canonicalize MEM_REFs from inline substitutions
1221 that can happen when a pointer argument is an ADDR_EXPR.
1222 Recurse here manually to allow that. */
1223 tree ptr = TREE_OPERAND (*tp, 0);
1224 tree type = remap_type (TREE_TYPE (*tp), id);
1225 tree old = *tp;
1226 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1227 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1228 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1229 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1230 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1231 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1233 MR_DEPENDENCE_CLIQUE (*tp)
1234 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1235 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1237 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1238 remapped a parameter as the property might be valid only
1239 for the parameter itself. */
1240 if (TREE_THIS_NOTRAP (old)
1241 && (!is_parm (TREE_OPERAND (old, 0))
1242 || (!id->transform_parameter && is_parm (ptr))))
1243 TREE_THIS_NOTRAP (*tp) = 1;
1244 *walk_subtrees = 0;
1245 return NULL;
1248 /* Here is the "usual case". Copy this tree node, and then
1249 tweak some special cases. */
1250 copy_tree_r (tp, walk_subtrees, NULL);
1252 /* If EXPR has block defined, map it to newly constructed block.
1253 When inlining we want EXPRs without block appear in the block
1254 of function call if we are not remapping a type. */
1255 if (EXPR_P (*tp))
1257 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1258 if (TREE_BLOCK (*tp))
1260 tree *n;
1261 n = id->decl_map->get (TREE_BLOCK (*tp));
1262 if (n)
1263 new_block = *n;
1265 TREE_SET_BLOCK (*tp, new_block);
1268 if (TREE_CODE (*tp) != OMP_CLAUSE)
1269 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1271 /* The copied TARGET_EXPR has never been expanded, even if the
1272 original node was expanded already. */
1273 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1275 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1276 TREE_OPERAND (*tp, 3) = NULL_TREE;
1279 /* Variable substitution need not be simple. In particular, the
1280 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1281 and friends are up-to-date. */
1282 else if (TREE_CODE (*tp) == ADDR_EXPR)
1284 int invariant = is_gimple_min_invariant (*tp);
1285 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1287 /* Handle the case where we substituted an INDIRECT_REF
1288 into the operand of the ADDR_EXPR. */
1289 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1290 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1291 else
1292 recompute_tree_invariant_for_addr_expr (*tp);
1294 /* If this used to be invariant, but is not any longer,
1295 then regimplification is probably needed. */
1296 if (invariant && !is_gimple_min_invariant (*tp))
1297 id->regimplify = true;
1299 *walk_subtrees = 0;
1303 /* Keep iterating. */
1304 return NULL_TREE;
1307 /* Helper for remap_gimple_stmt. Given an EH region number for the
1308 source function, map that to the duplicate EH region number in
1309 the destination function. */
1311 static int
1312 remap_eh_region_nr (int old_nr, copy_body_data *id)
1314 eh_region old_r, new_r;
1316 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1317 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1319 return new_r->index;
1322 /* Similar, but operate on INTEGER_CSTs. */
1324 static tree
1325 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1327 int old_nr, new_nr;
1329 old_nr = tree_to_shwi (old_t_nr);
1330 new_nr = remap_eh_region_nr (old_nr, id);
1332 return build_int_cst (integer_type_node, new_nr);
1335 /* Helper for copy_bb. Remap statement STMT using the inlining
1336 information in ID. Return the new statement copy. */
1338 static gimple_seq
1339 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1341 gimple copy = NULL;
1342 struct walk_stmt_info wi;
1343 bool skip_first = false;
1344 gimple_seq stmts = NULL;
1346 if (is_gimple_debug (stmt)
1347 && !opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
1348 return stmts;
1350 /* Begin by recognizing trees that we'll completely rewrite for the
1351 inlining context. Our output for these trees is completely
1352 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1353 into an edge). Further down, we'll handle trees that get
1354 duplicated and/or tweaked. */
1356 /* When requested, GIMPLE_RETURNs should be transformed to just the
1357 contained GIMPLE_ASSIGN. The branch semantics of the return will
1358 be handled elsewhere by manipulating the CFG rather than the
1359 statement. */
1360 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1362 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1363 tree retbnd = gimple_return_retbnd (stmt);
1364 tree bndslot = id->retbnd;
1366 if (retbnd && bndslot)
1368 gimple bndcopy = gimple_build_assign (bndslot, retbnd);
1369 memset (&wi, 0, sizeof (wi));
1370 wi.info = id;
1371 walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1372 gimple_seq_add_stmt (&stmts, bndcopy);
1375 /* If we're returning something, just turn that into an
1376 assignment into the equivalent of the original RESULT_DECL.
1377 If RETVAL is just the result decl, the result decl has
1378 already been set (e.g. a recent "foo (&result_decl, ...)");
1379 just toss the entire GIMPLE_RETURN. */
1380 if (retval
1381 && (TREE_CODE (retval) != RESULT_DECL
1382 && (TREE_CODE (retval) != SSA_NAME
1383 || ! SSA_NAME_VAR (retval)
1384 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1386 copy = gimple_build_assign (id->do_not_unshare
1387 ? id->retvar : unshare_expr (id->retvar),
1388 retval);
1389 /* id->retvar is already substituted. Skip it on later remapping. */
1390 skip_first = true;
1392 /* We need to copy bounds if return structure with pointers into
1393 instrumented function. */
1394 if (chkp_function_instrumented_p (id->dst_fn)
1395 && !bndslot
1396 && !BOUNDED_P (id->retvar)
1397 && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
1398 id->assign_stmts.safe_push (copy);
1401 else
1402 return stmts;
1404 else if (gimple_has_substatements (stmt))
1406 gimple_seq s1, s2;
1408 /* When cloning bodies from the C++ front end, we will be handed bodies
1409 in High GIMPLE form. Handle here all the High GIMPLE statements that
1410 have embedded statements. */
1411 switch (gimple_code (stmt))
1413 case GIMPLE_BIND:
1414 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1415 break;
1417 case GIMPLE_CATCH:
1419 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1420 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1421 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1423 break;
1425 case GIMPLE_EH_FILTER:
1426 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1427 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1428 break;
1430 case GIMPLE_TRY:
1431 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1432 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1433 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1434 break;
1436 case GIMPLE_WITH_CLEANUP_EXPR:
1437 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1438 copy = gimple_build_wce (s1);
1439 break;
1441 case GIMPLE_OMP_PARALLEL:
1443 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1444 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1445 copy = gimple_build_omp_parallel
1446 (s1,
1447 gimple_omp_parallel_clauses (omp_par_stmt),
1448 gimple_omp_parallel_child_fn (omp_par_stmt),
1449 gimple_omp_parallel_data_arg (omp_par_stmt));
1451 break;
1453 case GIMPLE_OMP_TASK:
1454 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1455 copy = gimple_build_omp_task
1456 (s1,
1457 gimple_omp_task_clauses (stmt),
1458 gimple_omp_task_child_fn (stmt),
1459 gimple_omp_task_data_arg (stmt),
1460 gimple_omp_task_copy_fn (stmt),
1461 gimple_omp_task_arg_size (stmt),
1462 gimple_omp_task_arg_align (stmt));
1463 break;
1465 case GIMPLE_OMP_FOR:
1466 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1467 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1468 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1469 gimple_omp_for_clauses (stmt),
1470 gimple_omp_for_collapse (stmt), s2);
1472 size_t i;
1473 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1475 gimple_omp_for_set_index (copy, i,
1476 gimple_omp_for_index (stmt, i));
1477 gimple_omp_for_set_initial (copy, i,
1478 gimple_omp_for_initial (stmt, i));
1479 gimple_omp_for_set_final (copy, i,
1480 gimple_omp_for_final (stmt, i));
1481 gimple_omp_for_set_incr (copy, i,
1482 gimple_omp_for_incr (stmt, i));
1483 gimple_omp_for_set_cond (copy, i,
1484 gimple_omp_for_cond (stmt, i));
1487 break;
1489 case GIMPLE_OMP_MASTER:
1490 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1491 copy = gimple_build_omp_master (s1);
1492 break;
1494 case GIMPLE_OMP_TASKGROUP:
1495 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1496 copy = gimple_build_omp_taskgroup (s1);
1497 break;
1499 case GIMPLE_OMP_ORDERED:
1500 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1501 copy = gimple_build_omp_ordered (s1);
1502 break;
1504 case GIMPLE_OMP_SECTION:
1505 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1506 copy = gimple_build_omp_section (s1);
1507 break;
1509 case GIMPLE_OMP_SECTIONS:
1510 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1511 copy = gimple_build_omp_sections
1512 (s1, gimple_omp_sections_clauses (stmt));
1513 break;
1515 case GIMPLE_OMP_SINGLE:
1516 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1517 copy = gimple_build_omp_single
1518 (s1, gimple_omp_single_clauses (stmt));
1519 break;
1521 case GIMPLE_OMP_TARGET:
1522 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1523 copy = gimple_build_omp_target
1524 (s1, gimple_omp_target_kind (stmt),
1525 gimple_omp_target_clauses (stmt));
1526 break;
1528 case GIMPLE_OMP_TEAMS:
1529 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1530 copy = gimple_build_omp_teams
1531 (s1, gimple_omp_teams_clauses (stmt));
1532 break;
1534 case GIMPLE_OMP_CRITICAL:
1535 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1536 copy = gimple_build_omp_critical (s1,
1537 gimple_omp_critical_name (
1538 as_a <gomp_critical *> (stmt)));
1539 break;
1541 case GIMPLE_TRANSACTION:
1543 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1544 gtransaction *new_trans_stmt;
1545 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1546 id);
1547 copy = new_trans_stmt
1548 = gimple_build_transaction (
1550 gimple_transaction_label (old_trans_stmt));
1551 gimple_transaction_set_subcode (
1552 new_trans_stmt,
1553 gimple_transaction_subcode (old_trans_stmt));
1555 break;
1557 default:
1558 gcc_unreachable ();
1561 else
1563 if (gimple_assign_copy_p (stmt)
1564 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1565 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1567 /* Here we handle statements that are not completely rewritten.
1568 First we detect some inlining-induced bogosities for
1569 discarding. */
1571 /* Some assignments VAR = VAR; don't generate any rtl code
1572 and thus don't count as variable modification. Avoid
1573 keeping bogosities like 0 = 0. */
1574 tree decl = gimple_assign_lhs (stmt), value;
1575 tree *n;
1577 n = id->decl_map->get (decl);
1578 if (n)
1580 value = *n;
1581 STRIP_TYPE_NOPS (value);
1582 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1583 return NULL;
1587 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1588 in a block that we aren't copying during tree_function_versioning,
1589 just drop the clobber stmt. */
1590 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1592 tree lhs = gimple_assign_lhs (stmt);
1593 if (TREE_CODE (lhs) == MEM_REF
1594 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1596 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1597 if (gimple_bb (def_stmt)
1598 && !bitmap_bit_p (id->blocks_to_copy,
1599 gimple_bb (def_stmt)->index))
1600 return NULL;
1604 if (gimple_debug_bind_p (stmt))
1606 gdebug *copy
1607 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1608 gimple_debug_bind_get_value (stmt),
1609 stmt);
1610 id->debug_stmts.safe_push (copy);
1611 gimple_seq_add_stmt (&stmts, copy);
1612 return stmts;
1614 if (gimple_debug_source_bind_p (stmt))
1616 gdebug *copy = gimple_build_debug_source_bind
1617 (gimple_debug_source_bind_get_var (stmt),
1618 gimple_debug_source_bind_get_value (stmt),
1619 stmt);
1620 id->debug_stmts.safe_push (copy);
1621 gimple_seq_add_stmt (&stmts, copy);
1622 return stmts;
1625 /* Create a new deep copy of the statement. */
1626 copy = gimple_copy (stmt);
1628 /* Clear flags that need revisiting. */
1629 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1631 if (gimple_call_tail_p (call_stmt))
1632 gimple_call_set_tail (call_stmt, false);
1633 if (gimple_call_from_thunk_p (call_stmt))
1634 gimple_call_set_from_thunk (call_stmt, false);
1637 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1638 RESX and EH_DISPATCH. */
1639 if (id->eh_map)
1640 switch (gimple_code (copy))
1642 case GIMPLE_CALL:
1644 tree r, fndecl = gimple_call_fndecl (copy);
1645 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1646 switch (DECL_FUNCTION_CODE (fndecl))
1648 case BUILT_IN_EH_COPY_VALUES:
1649 r = gimple_call_arg (copy, 1);
1650 r = remap_eh_region_tree_nr (r, id);
1651 gimple_call_set_arg (copy, 1, r);
1652 /* FALLTHRU */
1654 case BUILT_IN_EH_POINTER:
1655 case BUILT_IN_EH_FILTER:
1656 r = gimple_call_arg (copy, 0);
1657 r = remap_eh_region_tree_nr (r, id);
1658 gimple_call_set_arg (copy, 0, r);
1659 break;
1661 default:
1662 break;
1665 /* Reset alias info if we didn't apply measures to
1666 keep it valid over inlining by setting DECL_PT_UID. */
1667 if (!id->src_cfun->gimple_df
1668 || !id->src_cfun->gimple_df->ipa_pta)
1669 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1671 break;
1673 case GIMPLE_RESX:
1675 gresx *resx_stmt = as_a <gresx *> (copy);
1676 int r = gimple_resx_region (resx_stmt);
1677 r = remap_eh_region_nr (r, id);
1678 gimple_resx_set_region (resx_stmt, r);
1680 break;
1682 case GIMPLE_EH_DISPATCH:
1684 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1685 int r = gimple_eh_dispatch_region (eh_dispatch);
1686 r = remap_eh_region_nr (r, id);
1687 gimple_eh_dispatch_set_region (eh_dispatch, r);
1689 break;
1691 default:
1692 break;
1696 /* If STMT has a block defined, map it to the newly constructed
1697 block. */
1698 if (gimple_block (copy))
1700 tree *n;
1701 n = id->decl_map->get (gimple_block (copy));
1702 gcc_assert (n);
1703 gimple_set_block (copy, *n);
1706 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1708 gimple_seq_add_stmt (&stmts, copy);
1709 return stmts;
1712 /* Remap all the operands in COPY. */
1713 memset (&wi, 0, sizeof (wi));
1714 wi.info = id;
1715 if (skip_first)
1716 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1717 else
1718 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1720 /* Clear the copied virtual operands. We are not remapping them here
1721 but are going to recreate them from scratch. */
1722 if (gimple_has_mem_ops (copy))
1724 gimple_set_vdef (copy, NULL_TREE);
1725 gimple_set_vuse (copy, NULL_TREE);
1728 gimple_seq_add_stmt (&stmts, copy);
1729 return stmts;
1733 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1734 later */
1736 static basic_block
1737 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1738 gcov_type count_scale)
1740 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1741 basic_block copy_basic_block;
1742 tree decl;
1743 gcov_type freq;
1744 basic_block prev;
1746 /* Search for previous copied basic block. */
1747 prev = bb->prev_bb;
1748 while (!prev->aux)
1749 prev = prev->prev_bb;
1751 /* create_basic_block() will append every new block to
1752 basic_block_info automatically. */
1753 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1754 copy_basic_block->count = apply_scale (bb->count, count_scale);
1756 /* We are going to rebuild frequencies from scratch. These values
1757 have just small importance to drive canonicalize_loop_headers. */
1758 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1760 /* We recompute frequencies after inlining, so this is quite safe. */
1761 if (freq > BB_FREQ_MAX)
1762 freq = BB_FREQ_MAX;
1763 copy_basic_block->frequency = freq;
1765 copy_gsi = gsi_start_bb (copy_basic_block);
1767 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1769 gimple_seq stmts;
1770 gimple stmt = gsi_stmt (gsi);
1771 gimple orig_stmt = stmt;
1772 gimple_stmt_iterator stmts_gsi;
1773 bool stmt_added = false;
1775 id->regimplify = false;
1776 stmts = remap_gimple_stmt (stmt, id);
1778 if (gimple_seq_empty_p (stmts))
1779 continue;
1781 seq_gsi = copy_gsi;
1783 for (stmts_gsi = gsi_start (stmts);
1784 !gsi_end_p (stmts_gsi); )
1786 stmt = gsi_stmt (stmts_gsi);
1788 /* Advance iterator now before stmt is moved to seq_gsi. */
1789 gsi_next (&stmts_gsi);
1791 if (gimple_nop_p (stmt))
1792 continue;
1794 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1795 orig_stmt);
1797 /* With return slot optimization we can end up with
1798 non-gimple (foo *)&this->m, fix that here. */
1799 if (is_gimple_assign (stmt)
1800 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1801 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1803 tree new_rhs;
1804 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1805 gimple_assign_rhs1 (stmt),
1806 true, NULL, false,
1807 GSI_CONTINUE_LINKING);
1808 gimple_assign_set_rhs1 (stmt, new_rhs);
1809 id->regimplify = false;
1812 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1814 if (id->regimplify)
1815 gimple_regimplify_operands (stmt, &seq_gsi);
1817 stmt_added = true;
1820 if (!stmt_added)
1821 continue;
1823 /* If copy_basic_block has been empty at the start of this iteration,
1824 call gsi_start_bb again to get at the newly added statements. */
1825 if (gsi_end_p (copy_gsi))
1826 copy_gsi = gsi_start_bb (copy_basic_block);
1827 else
1828 gsi_next (&copy_gsi);
1830 /* Process the new statement. The call to gimple_regimplify_operands
1831 possibly turned the statement into multiple statements, we
1832 need to process all of them. */
1835 tree fn;
1836 gcall *call_stmt;
1838 stmt = gsi_stmt (copy_gsi);
1839 call_stmt = dyn_cast <gcall *> (stmt);
1840 if (call_stmt
1841 && gimple_call_va_arg_pack_p (call_stmt)
1842 && id->call_stmt)
1844 /* __builtin_va_arg_pack () should be replaced by
1845 all arguments corresponding to ... in the caller. */
1846 tree p;
1847 gcall *new_call;
1848 vec<tree> argarray;
1849 size_t nargs = gimple_call_num_args (id->call_stmt);
1850 size_t n, i, nargs_to_copy;
1851 bool remove_bounds = false;
1853 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1854 nargs--;
1856 /* Bounds should be removed from arg pack in case
1857 we handle not instrumented call in instrumented
1858 function. */
1859 nargs_to_copy = nargs;
1860 if (gimple_call_with_bounds_p (id->call_stmt)
1861 && !gimple_call_with_bounds_p (stmt))
1863 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1864 i < gimple_call_num_args (id->call_stmt);
1865 i++)
1866 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1867 nargs_to_copy--;
1868 remove_bounds = true;
1871 /* Create the new array of arguments. */
1872 n = nargs_to_copy + gimple_call_num_args (call_stmt);
1873 argarray.create (n);
1874 argarray.safe_grow_cleared (n);
1876 /* Copy all the arguments before '...' */
1877 memcpy (argarray.address (),
1878 gimple_call_arg_ptr (call_stmt, 0),
1879 gimple_call_num_args (call_stmt) * sizeof (tree));
1881 if (remove_bounds)
1883 /* Append the rest of arguments removing bounds. */
1884 unsigned cur = gimple_call_num_args (call_stmt);
1885 i = gimple_call_num_args (id->call_stmt) - nargs;
1886 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1887 i < gimple_call_num_args (id->call_stmt);
1888 i++)
1889 if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1890 argarray[cur++] = gimple_call_arg (id->call_stmt, i);
1891 gcc_assert (cur == n);
1893 else
1895 /* Append the arguments passed in '...' */
1896 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1897 gimple_call_arg_ptr (id->call_stmt, 0)
1898 + (gimple_call_num_args (id->call_stmt) - nargs),
1899 nargs * sizeof (tree));
1902 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1903 argarray);
1905 argarray.release ();
1907 /* Copy all GIMPLE_CALL flags, location and block, except
1908 GF_CALL_VA_ARG_PACK. */
1909 gimple_call_copy_flags (new_call, call_stmt);
1910 gimple_call_set_va_arg_pack (new_call, false);
1911 gimple_set_location (new_call, gimple_location (stmt));
1912 gimple_set_block (new_call, gimple_block (stmt));
1913 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1915 gsi_replace (&copy_gsi, new_call, false);
1916 stmt = new_call;
1918 else if (call_stmt
1919 && id->call_stmt
1920 && (decl = gimple_call_fndecl (stmt))
1921 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1922 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1924 /* __builtin_va_arg_pack_len () should be replaced by
1925 the number of anonymous arguments. */
1926 size_t nargs = gimple_call_num_args (id->call_stmt), i;
1927 tree count, p;
1928 gimple new_stmt;
1930 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1931 nargs--;
1933 /* For instrumented calls we should ignore bounds. */
1934 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1935 i < gimple_call_num_args (id->call_stmt);
1936 i++)
1937 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1938 nargs--;
1940 count = build_int_cst (integer_type_node, nargs);
1941 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1942 gsi_replace (&copy_gsi, new_stmt, false);
1943 stmt = new_stmt;
1945 else if (call_stmt
1946 && id->call_stmt
1947 && gimple_call_internal_p (stmt)
1948 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1950 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
1951 gsi_remove (&copy_gsi, false);
1952 continue;
1955 /* Statements produced by inlining can be unfolded, especially
1956 when we constant propagated some operands. We can't fold
1957 them right now for two reasons:
1958 1) folding require SSA_NAME_DEF_STMTs to be correct
1959 2) we can't change function calls to builtins.
1960 So we just mark statement for later folding. We mark
1961 all new statements, instead just statements that has changed
1962 by some nontrivial substitution so even statements made
1963 foldable indirectly are updated. If this turns out to be
1964 expensive, copy_body can be told to watch for nontrivial
1965 changes. */
1966 if (id->statements_to_fold)
1967 id->statements_to_fold->add (stmt);
1969 /* We're duplicating a CALL_EXPR. Find any corresponding
1970 callgraph edges and update or duplicate them. */
1971 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
1973 struct cgraph_edge *edge;
1975 switch (id->transform_call_graph_edges)
1977 case CB_CGE_DUPLICATE:
1978 edge = id->src_node->get_edge (orig_stmt);
1979 if (edge)
1981 int edge_freq = edge->frequency;
1982 int new_freq;
1983 struct cgraph_edge *old_edge = edge;
1984 edge = edge->clone (id->dst_node, call_stmt,
1985 gimple_uid (stmt),
1986 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1987 true);
1988 /* We could also just rescale the frequency, but
1989 doing so would introduce roundoff errors and make
1990 verifier unhappy. */
1991 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
1992 copy_basic_block);
1994 /* Speculative calls consist of two edges - direct and indirect.
1995 Duplicate the whole thing and distribute frequencies accordingly. */
1996 if (edge->speculative)
1998 struct cgraph_edge *direct, *indirect;
1999 struct ipa_ref *ref;
2001 gcc_assert (!edge->indirect_unknown_callee);
2002 old_edge->speculative_call_info (direct, indirect, ref);
2003 indirect = indirect->clone (id->dst_node, call_stmt,
2004 gimple_uid (stmt),
2005 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
2006 true);
2007 if (old_edge->frequency + indirect->frequency)
2009 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
2010 (old_edge->frequency + indirect->frequency)),
2011 CGRAPH_FREQ_MAX);
2012 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
2013 (old_edge->frequency + indirect->frequency)),
2014 CGRAPH_FREQ_MAX);
2016 id->dst_node->clone_reference (ref, stmt);
2018 else
2020 edge->frequency = new_freq;
2021 if (dump_file
2022 && profile_status_for_fn (cfun) != PROFILE_ABSENT
2023 && (edge_freq > edge->frequency + 10
2024 || edge_freq < edge->frequency - 10))
2026 fprintf (dump_file, "Edge frequency estimated by "
2027 "cgraph %i diverge from inliner's estimate %i\n",
2028 edge_freq,
2029 edge->frequency);
2030 fprintf (dump_file,
2031 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
2032 bb->index,
2033 bb->frequency,
2034 copy_basic_block->frequency);
2038 break;
2040 case CB_CGE_MOVE_CLONES:
2041 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2042 call_stmt);
2043 edge = id->dst_node->get_edge (stmt);
2044 break;
2046 case CB_CGE_MOVE:
2047 edge = id->dst_node->get_edge (orig_stmt);
2048 if (edge)
2049 edge->set_call_stmt (call_stmt);
2050 break;
2052 default:
2053 gcc_unreachable ();
2056 /* Constant propagation on argument done during inlining
2057 may create new direct call. Produce an edge for it. */
2058 if ((!edge
2059 || (edge->indirect_inlining_edge
2060 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2061 && id->dst_node->definition
2062 && (fn = gimple_call_fndecl (stmt)) != NULL)
2064 struct cgraph_node *dest = cgraph_node::get (fn);
2066 /* We have missing edge in the callgraph. This can happen
2067 when previous inlining turned an indirect call into a
2068 direct call by constant propagating arguments or we are
2069 producing dead clone (for further cloning). In all
2070 other cases we hit a bug (incorrect node sharing is the
2071 most common reason for missing edges). */
2072 gcc_assert (!dest->definition
2073 || dest->address_taken
2074 || !id->src_node->definition
2075 || !id->dst_node->definition);
2076 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2077 id->dst_node->create_edge_including_clones
2078 (dest, orig_stmt, call_stmt, bb->count,
2079 compute_call_stmt_bb_frequency (id->dst_node->decl,
2080 copy_basic_block),
2081 CIF_ORIGINALLY_INDIRECT_CALL);
2082 else
2083 id->dst_node->create_edge (dest, call_stmt,
2084 bb->count,
2085 compute_call_stmt_bb_frequency
2086 (id->dst_node->decl,
2087 copy_basic_block))->inline_failed
2088 = CIF_ORIGINALLY_INDIRECT_CALL;
2089 if (dump_file)
2091 fprintf (dump_file, "Created new direct edge to %s\n",
2092 dest->name ());
2096 notice_special_calls (as_a <gcall *> (stmt));
2099 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2100 id->eh_map, id->eh_lp_nr);
2102 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
2104 ssa_op_iter i;
2105 tree def;
2107 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
2108 if (TREE_CODE (def) == SSA_NAME)
2109 SSA_NAME_DEF_STMT (def) = stmt;
2112 gsi_next (&copy_gsi);
2114 while (!gsi_end_p (copy_gsi));
2116 copy_gsi = gsi_last_bb (copy_basic_block);
2119 return copy_basic_block;
2122 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2123 form is quite easy, since dominator relationship for old basic blocks does
2124 not change.
2126 There is however exception where inlining might change dominator relation
2127 across EH edges from basic block within inlined functions destinating
2128 to landing pads in function we inline into.
2130 The function fills in PHI_RESULTs of such PHI nodes if they refer
2131 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2132 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2133 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2134 set, and this means that there will be no overlapping live ranges
2135 for the underlying symbol.
2137 This might change in future if we allow redirecting of EH edges and
2138 we might want to change way build CFG pre-inlining to include
2139 all the possible edges then. */
2140 static void
2141 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2142 bool can_throw, bool nonlocal_goto)
2144 edge e;
2145 edge_iterator ei;
2147 FOR_EACH_EDGE (e, ei, bb->succs)
2148 if (!e->dest->aux
2149 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2151 gphi *phi;
2152 gphi_iterator si;
2154 if (!nonlocal_goto)
2155 gcc_assert (e->flags & EDGE_EH);
2157 if (!can_throw)
2158 gcc_assert (!(e->flags & EDGE_EH));
2160 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2162 edge re;
2164 phi = si.phi ();
2166 /* For abnormal goto/call edges the receiver can be the
2167 ENTRY_BLOCK. Do not assert this cannot happen. */
2169 gcc_assert ((e->flags & EDGE_EH)
2170 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2172 re = find_edge (ret_bb, e->dest);
2173 gcc_checking_assert (re);
2174 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2175 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2177 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2178 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2184 /* Copy edges from BB into its copy constructed earlier, scale profile
2185 accordingly. Edges will be taken care of later. Assume aux
2186 pointers to point to the copies of each BB. Return true if any
2187 debug stmts are left after a statement that must end the basic block. */
2189 static bool
2190 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
2191 basic_block abnormal_goto_dest)
2193 basic_block new_bb = (basic_block) bb->aux;
2194 edge_iterator ei;
2195 edge old_edge;
2196 gimple_stmt_iterator si;
2197 int flags;
2198 bool need_debug_cleanup = false;
2200 /* Use the indices from the original blocks to create edges for the
2201 new ones. */
2202 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2203 if (!(old_edge->flags & EDGE_EH))
2205 edge new_edge;
2207 flags = old_edge->flags;
2209 /* Return edges do get a FALLTHRU flag when the get inlined. */
2210 if (old_edge->dest->index == EXIT_BLOCK
2211 && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2212 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2213 flags |= EDGE_FALLTHRU;
2214 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2215 new_edge->count = apply_scale (old_edge->count, count_scale);
2216 new_edge->probability = old_edge->probability;
2219 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2220 return false;
2222 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2224 gimple copy_stmt;
2225 bool can_throw, nonlocal_goto;
2227 copy_stmt = gsi_stmt (si);
2228 if (!is_gimple_debug (copy_stmt))
2229 update_stmt (copy_stmt);
2231 /* Do this before the possible split_block. */
2232 gsi_next (&si);
2234 /* If this tree could throw an exception, there are two
2235 cases where we need to add abnormal edge(s): the
2236 tree wasn't in a region and there is a "current
2237 region" in the caller; or the original tree had
2238 EH edges. In both cases split the block after the tree,
2239 and add abnormal edge(s) as needed; we need both
2240 those from the callee and the caller.
2241 We check whether the copy can throw, because the const
2242 propagation can change an INDIRECT_REF which throws
2243 into a COMPONENT_REF which doesn't. If the copy
2244 can throw, the original could also throw. */
2245 can_throw = stmt_can_throw_internal (copy_stmt);
2246 nonlocal_goto
2247 = (stmt_can_make_abnormal_goto (copy_stmt)
2248 && !computed_goto_p (copy_stmt));
2250 if (can_throw || nonlocal_goto)
2252 if (!gsi_end_p (si))
2254 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2255 gsi_next (&si);
2256 if (gsi_end_p (si))
2257 need_debug_cleanup = true;
2259 if (!gsi_end_p (si))
2260 /* Note that bb's predecessor edges aren't necessarily
2261 right at this point; split_block doesn't care. */
2263 edge e = split_block (new_bb, copy_stmt);
2265 new_bb = e->dest;
2266 new_bb->aux = e->src->aux;
2267 si = gsi_start_bb (new_bb);
2271 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2272 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2273 else if (can_throw)
2274 make_eh_edges (copy_stmt);
2276 /* If the call we inline cannot make abnormal goto do not add
2277 additional abnormal edges but only retain those already present
2278 in the original function body. */
2279 if (abnormal_goto_dest == NULL)
2280 nonlocal_goto = false;
2281 if (nonlocal_goto)
2283 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2285 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2286 nonlocal_goto = false;
2287 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2288 in OpenMP regions which aren't allowed to be left abnormally.
2289 So, no need to add abnormal edge in that case. */
2290 else if (is_gimple_call (copy_stmt)
2291 && gimple_call_internal_p (copy_stmt)
2292 && (gimple_call_internal_fn (copy_stmt)
2293 == IFN_ABNORMAL_DISPATCHER)
2294 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2295 nonlocal_goto = false;
2296 else
2297 make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2300 if ((can_throw || nonlocal_goto)
2301 && gimple_in_ssa_p (cfun))
2302 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2303 can_throw, nonlocal_goto);
2305 return need_debug_cleanup;
2308 /* Copy the PHIs. All blocks and edges are copied, some blocks
2309 was possibly split and new outgoing EH edges inserted.
2310 BB points to the block of original function and AUX pointers links
2311 the original and newly copied blocks. */
2313 static void
2314 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2316 basic_block const new_bb = (basic_block) bb->aux;
2317 edge_iterator ei;
2318 gphi *phi;
2319 gphi_iterator si;
2320 edge new_edge;
2321 bool inserted = false;
2323 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2325 tree res, new_res;
2326 gphi *new_phi;
2328 phi = si.phi ();
2329 res = PHI_RESULT (phi);
2330 new_res = res;
2331 if (!virtual_operand_p (res))
2333 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2334 new_phi = create_phi_node (new_res, new_bb);
2335 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2337 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2338 tree arg;
2339 tree new_arg;
2340 edge_iterator ei2;
2341 location_t locus;
2343 /* When doing partial cloning, we allow PHIs on the entry block
2344 as long as all the arguments are the same. Find any input
2345 edge to see argument to copy. */
2346 if (!old_edge)
2347 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2348 if (!old_edge->src->aux)
2349 break;
2351 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2352 new_arg = arg;
2353 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2354 gcc_assert (new_arg);
2355 /* With return slot optimization we can end up with
2356 non-gimple (foo *)&this->m, fix that here. */
2357 if (TREE_CODE (new_arg) != SSA_NAME
2358 && TREE_CODE (new_arg) != FUNCTION_DECL
2359 && !is_gimple_val (new_arg))
2361 gimple_seq stmts = NULL;
2362 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2363 gsi_insert_seq_on_edge (new_edge, stmts);
2364 inserted = true;
2366 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2367 if (LOCATION_BLOCK (locus))
2369 tree *n;
2370 n = id->decl_map->get (LOCATION_BLOCK (locus));
2371 gcc_assert (n);
2372 if (*n)
2373 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2374 else
2375 locus = LOCATION_LOCUS (locus);
2377 else
2378 locus = LOCATION_LOCUS (locus);
2380 add_phi_arg (new_phi, new_arg, new_edge, locus);
2385 /* Commit the delayed edge insertions. */
2386 if (inserted)
2387 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2388 gsi_commit_one_edge_insert (new_edge, NULL);
2392 /* Wrapper for remap_decl so it can be used as a callback. */
2394 static tree
2395 remap_decl_1 (tree decl, void *data)
2397 return remap_decl (decl, (copy_body_data *) data);
2400 /* Build struct function and associated datastructures for the new clone
2401 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2402 the cfun to the function of new_fndecl (and current_function_decl too). */
2404 static void
2405 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2407 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2408 gcov_type count_scale;
2410 if (!DECL_ARGUMENTS (new_fndecl))
2411 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2412 if (!DECL_RESULT (new_fndecl))
2413 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2415 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2416 count_scale
2417 = GCOV_COMPUTE_SCALE (count,
2418 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2419 else
2420 count_scale = REG_BR_PROB_BASE;
2422 /* Register specific tree functions. */
2423 gimple_register_cfg_hooks ();
2425 /* Get clean struct function. */
2426 push_struct_function (new_fndecl);
2428 /* We will rebuild these, so just sanity check that they are empty. */
2429 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2430 gcc_assert (cfun->local_decls == NULL);
2431 gcc_assert (cfun->cfg == NULL);
2432 gcc_assert (cfun->decl == new_fndecl);
2434 /* Copy items we preserve during cloning. */
2435 cfun->static_chain_decl = src_cfun->static_chain_decl;
2436 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2437 cfun->function_end_locus = src_cfun->function_end_locus;
2438 cfun->curr_properties = src_cfun->curr_properties;
2439 cfun->last_verified = src_cfun->last_verified;
2440 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2441 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2442 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2443 cfun->stdarg = src_cfun->stdarg;
2444 cfun->after_inlining = src_cfun->after_inlining;
2445 cfun->can_throw_non_call_exceptions
2446 = src_cfun->can_throw_non_call_exceptions;
2447 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2448 cfun->returns_struct = src_cfun->returns_struct;
2449 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2451 init_empty_tree_cfg ();
2453 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2454 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2455 (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2456 REG_BR_PROB_BASE);
2457 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2458 = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2459 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2460 (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2461 REG_BR_PROB_BASE);
2462 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2463 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2464 if (src_cfun->eh)
2465 init_eh_for_function ();
2467 if (src_cfun->gimple_df)
2469 init_tree_ssa (cfun);
2470 cfun->gimple_df->in_ssa_p = true;
2471 init_ssa_operands (cfun);
2475 /* Helper function for copy_cfg_body. Move debug stmts from the end
2476 of NEW_BB to the beginning of successor basic blocks when needed. If the
2477 successor has multiple predecessors, reset them, otherwise keep
2478 their value. */
2480 static void
2481 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2483 edge e;
2484 edge_iterator ei;
2485 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2487 if (gsi_end_p (si)
2488 || gsi_one_before_end_p (si)
2489 || !(stmt_can_throw_internal (gsi_stmt (si))
2490 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2491 return;
2493 FOR_EACH_EDGE (e, ei, new_bb->succs)
2495 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2496 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2497 while (is_gimple_debug (gsi_stmt (ssi)))
2499 gimple stmt = gsi_stmt (ssi);
2500 gdebug *new_stmt;
2501 tree var;
2502 tree value;
2504 /* For the last edge move the debug stmts instead of copying
2505 them. */
2506 if (ei_one_before_end_p (ei))
2508 si = ssi;
2509 gsi_prev (&ssi);
2510 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2511 gimple_debug_bind_reset_value (stmt);
2512 gsi_remove (&si, false);
2513 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2514 continue;
2517 if (gimple_debug_bind_p (stmt))
2519 var = gimple_debug_bind_get_var (stmt);
2520 if (single_pred_p (e->dest))
2522 value = gimple_debug_bind_get_value (stmt);
2523 value = unshare_expr (value);
2525 else
2526 value = NULL_TREE;
2527 new_stmt = gimple_build_debug_bind (var, value, stmt);
2529 else if (gimple_debug_source_bind_p (stmt))
2531 var = gimple_debug_source_bind_get_var (stmt);
2532 value = gimple_debug_source_bind_get_value (stmt);
2533 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2535 else
2536 gcc_unreachable ();
2537 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2538 id->debug_stmts.safe_push (new_stmt);
2539 gsi_prev (&ssi);
2544 /* Make a copy of the sub-loops of SRC_PARENT and place them
2545 as siblings of DEST_PARENT. */
2547 static void
2548 copy_loops (copy_body_data *id,
2549 struct loop *dest_parent, struct loop *src_parent)
2551 struct loop *src_loop = src_parent->inner;
2552 while (src_loop)
2554 if (!id->blocks_to_copy
2555 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2557 struct loop *dest_loop = alloc_loop ();
2559 /* Assign the new loop its header and latch and associate
2560 those with the new loop. */
2561 dest_loop->header = (basic_block)src_loop->header->aux;
2562 dest_loop->header->loop_father = dest_loop;
2563 if (src_loop->latch != NULL)
2565 dest_loop->latch = (basic_block)src_loop->latch->aux;
2566 dest_loop->latch->loop_father = dest_loop;
2569 /* Copy loop meta-data. */
2570 copy_loop_info (src_loop, dest_loop);
2572 /* Finally place it into the loop array and the loop tree. */
2573 place_new_loop (cfun, dest_loop);
2574 flow_loop_tree_node_add (dest_parent, dest_loop);
2576 dest_loop->safelen = src_loop->safelen;
2577 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2578 if (src_loop->force_vectorize)
2580 dest_loop->force_vectorize = true;
2581 cfun->has_force_vectorize_loops = true;
2583 if (src_loop->simduid)
2585 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2586 cfun->has_simduid_loops = true;
2589 /* Recurse. */
2590 copy_loops (id, dest_loop, src_loop);
2592 src_loop = src_loop->next;
2596 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2598 void
2599 redirect_all_calls (copy_body_data * id, basic_block bb)
2601 gimple_stmt_iterator si;
2602 gimple last = last_stmt (bb);
2603 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2605 gimple stmt = gsi_stmt (si);
2606 if (is_gimple_call (stmt))
2608 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2609 if (edge)
2611 edge->redirect_call_stmt_to_callee ();
2612 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2613 gimple_purge_dead_eh_edges (bb);
2619 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2620 with each bb's frequency. Used when NODE has a 0-weight entry
2621 but we are about to inline it into a non-zero count call bb.
2622 See the comments for handle_missing_profiles() in predict.c for
2623 when this can happen for COMDATs. */
2625 void
2626 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2628 basic_block bb;
2629 edge_iterator ei;
2630 edge e;
2631 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2633 FOR_ALL_BB_FN(bb, fn)
2635 bb->count = apply_scale (count,
2636 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2637 FOR_EACH_EDGE (e, ei, bb->succs)
2638 e->count = apply_probability (e->src->count, e->probability);
2642 /* Make a copy of the body of FN so that it can be inserted inline in
2643 another function. Walks FN via CFG, returns new fndecl. */
2645 static tree
2646 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2647 basic_block entry_block_map, basic_block exit_block_map,
2648 basic_block new_entry)
2650 tree callee_fndecl = id->src_fn;
2651 /* Original cfun for the callee, doesn't change. */
2652 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2653 struct function *cfun_to_copy;
2654 basic_block bb;
2655 tree new_fndecl = NULL;
2656 bool need_debug_cleanup = false;
2657 gcov_type count_scale;
2658 int last;
2659 int incoming_frequency = 0;
2660 gcov_type incoming_count = 0;
2662 /* This can happen for COMDAT routines that end up with 0 counts
2663 despite being called (see the comments for handle_missing_profiles()
2664 in predict.c as to why). Apply counts to the blocks in the callee
2665 before inlining, using the guessed edge frequencies, so that we don't
2666 end up with a 0-count inline body which can confuse downstream
2667 optimizations such as function splitting. */
2668 if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2670 /* Apply the larger of the call bb count and the total incoming
2671 call edge count to the callee. */
2672 gcov_type in_count = 0;
2673 struct cgraph_edge *in_edge;
2674 for (in_edge = id->src_node->callers; in_edge;
2675 in_edge = in_edge->next_caller)
2676 in_count += in_edge->count;
2677 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2680 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2681 count_scale
2682 = GCOV_COMPUTE_SCALE (count,
2683 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2684 else
2685 count_scale = REG_BR_PROB_BASE;
2687 /* Register specific tree functions. */
2688 gimple_register_cfg_hooks ();
2690 /* If we are inlining just region of the function, make sure to connect
2691 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2692 part of loop, we must compute frequency and probability of
2693 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2694 probabilities of edges incoming from nonduplicated region. */
2695 if (new_entry)
2697 edge e;
2698 edge_iterator ei;
2700 FOR_EACH_EDGE (e, ei, new_entry->preds)
2701 if (!e->src->aux)
2703 incoming_frequency += EDGE_FREQUENCY (e);
2704 incoming_count += e->count;
2706 incoming_count = apply_scale (incoming_count, count_scale);
2707 incoming_frequency
2708 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2709 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2710 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2713 /* Must have a CFG here at this point. */
2714 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2715 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2717 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2719 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2720 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2721 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2722 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2724 /* Duplicate any exception-handling regions. */
2725 if (cfun->eh)
2726 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2727 remap_decl_1, id);
2729 /* Use aux pointers to map the original blocks to copy. */
2730 FOR_EACH_BB_FN (bb, cfun_to_copy)
2731 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2733 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2734 bb->aux = new_bb;
2735 new_bb->aux = bb;
2736 new_bb->loop_father = entry_block_map->loop_father;
2739 last = last_basic_block_for_fn (cfun);
2741 /* Now that we've duplicated the blocks, duplicate their edges. */
2742 basic_block abnormal_goto_dest = NULL;
2743 if (id->call_stmt
2744 && stmt_can_make_abnormal_goto (id->call_stmt))
2746 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2748 bb = gimple_bb (id->call_stmt);
2749 gsi_next (&gsi);
2750 if (gsi_end_p (gsi))
2751 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2753 FOR_ALL_BB_FN (bb, cfun_to_copy)
2754 if (!id->blocks_to_copy
2755 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2756 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2757 abnormal_goto_dest);
2759 if (new_entry)
2761 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2762 e->probability = REG_BR_PROB_BASE;
2763 e->count = incoming_count;
2766 /* Duplicate the loop tree, if available and wanted. */
2767 if (loops_for_fn (src_cfun) != NULL
2768 && current_loops != NULL)
2770 copy_loops (id, entry_block_map->loop_father,
2771 get_loop (src_cfun, 0));
2772 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2773 loops_state_set (LOOPS_NEED_FIXUP);
2776 /* If the loop tree in the source function needed fixup, mark the
2777 destination loop tree for fixup, too. */
2778 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2779 loops_state_set (LOOPS_NEED_FIXUP);
2781 if (gimple_in_ssa_p (cfun))
2782 FOR_ALL_BB_FN (bb, cfun_to_copy)
2783 if (!id->blocks_to_copy
2784 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2785 copy_phis_for_bb (bb, id);
2787 FOR_ALL_BB_FN (bb, cfun_to_copy)
2788 if (bb->aux)
2790 if (need_debug_cleanup
2791 && bb->index != ENTRY_BLOCK
2792 && bb->index != EXIT_BLOCK)
2793 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2794 /* Update call edge destinations. This can not be done before loop
2795 info is updated, because we may split basic blocks. */
2796 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2797 && bb->index != ENTRY_BLOCK
2798 && bb->index != EXIT_BLOCK)
2799 redirect_all_calls (id, (basic_block)bb->aux);
2800 ((basic_block)bb->aux)->aux = NULL;
2801 bb->aux = NULL;
2804 /* Zero out AUX fields of newly created block during EH edge
2805 insertion. */
2806 for (; last < last_basic_block_for_fn (cfun); last++)
2808 if (need_debug_cleanup)
2809 maybe_move_debug_stmts_to_successors (id,
2810 BASIC_BLOCK_FOR_FN (cfun, last));
2811 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2812 /* Update call edge destinations. This can not be done before loop
2813 info is updated, because we may split basic blocks. */
2814 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2815 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2817 entry_block_map->aux = NULL;
2818 exit_block_map->aux = NULL;
2820 if (id->eh_map)
2822 delete id->eh_map;
2823 id->eh_map = NULL;
2825 if (id->dependence_map)
2827 delete id->dependence_map;
2828 id->dependence_map = NULL;
2831 return new_fndecl;
2834 /* Copy the debug STMT using ID. We deal with these statements in a
2835 special way: if any variable in their VALUE expression wasn't
2836 remapped yet, we won't remap it, because that would get decl uids
2837 out of sync, causing codegen differences between -g and -g0. If
2838 this arises, we drop the VALUE expression altogether. */
2840 static void
2841 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2843 tree t, *n;
2844 struct walk_stmt_info wi;
2846 if (gimple_block (stmt))
2848 n = id->decl_map->get (gimple_block (stmt));
2849 gimple_set_block (stmt, n ? *n : id->block);
2852 /* Remap all the operands in COPY. */
2853 memset (&wi, 0, sizeof (wi));
2854 wi.info = id;
2856 processing_debug_stmt = 1;
2858 if (gimple_debug_source_bind_p (stmt))
2859 t = gimple_debug_source_bind_get_var (stmt);
2860 else
2861 t = gimple_debug_bind_get_var (stmt);
2863 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2864 && (n = id->debug_map->get (t)))
2866 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2867 t = *n;
2869 else if (TREE_CODE (t) == VAR_DECL
2870 && !is_global_var (t)
2871 && !id->decl_map->get (t))
2872 /* T is a non-localized variable. */;
2873 else
2874 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2876 if (gimple_debug_bind_p (stmt))
2878 gimple_debug_bind_set_var (stmt, t);
2880 if (gimple_debug_bind_has_value_p (stmt))
2881 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2882 remap_gimple_op_r, &wi, NULL);
2884 /* Punt if any decl couldn't be remapped. */
2885 if (processing_debug_stmt < 0)
2886 gimple_debug_bind_reset_value (stmt);
2888 else if (gimple_debug_source_bind_p (stmt))
2890 gimple_debug_source_bind_set_var (stmt, t);
2891 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2892 remap_gimple_op_r, &wi, NULL);
2893 /* When inlining and source bind refers to one of the optimized
2894 away parameters, change the source bind into normal debug bind
2895 referring to the corresponding DEBUG_EXPR_DECL that should have
2896 been bound before the call stmt. */
2897 t = gimple_debug_source_bind_get_value (stmt);
2898 if (t != NULL_TREE
2899 && TREE_CODE (t) == PARM_DECL
2900 && id->call_stmt)
2902 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2903 unsigned int i;
2904 if (debug_args != NULL)
2906 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2907 if ((**debug_args)[i] == DECL_ORIGIN (t)
2908 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2910 t = (**debug_args)[i + 1];
2911 stmt->subcode = GIMPLE_DEBUG_BIND;
2912 gimple_debug_bind_set_value (stmt, t);
2913 break;
2919 processing_debug_stmt = 0;
2921 update_stmt (stmt);
2924 /* Process deferred debug stmts. In order to give values better odds
2925 of being successfully remapped, we delay the processing of debug
2926 stmts until all other stmts that might require remapping are
2927 processed. */
2929 static void
2930 copy_debug_stmts (copy_body_data *id)
2932 size_t i;
2933 gdebug *stmt;
2935 if (!id->debug_stmts.exists ())
2936 return;
2938 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2939 copy_debug_stmt (stmt, id);
2941 id->debug_stmts.release ();
2944 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2945 another function. */
2947 static tree
2948 copy_tree_body (copy_body_data *id)
2950 tree fndecl = id->src_fn;
2951 tree body = DECL_SAVED_TREE (fndecl);
2953 walk_tree (&body, copy_tree_body_r, id, NULL);
2955 return body;
2958 /* Make a copy of the body of FN so that it can be inserted inline in
2959 another function. */
2961 static tree
2962 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2963 basic_block entry_block_map, basic_block exit_block_map,
2964 basic_block new_entry)
2966 tree fndecl = id->src_fn;
2967 tree body;
2969 /* If this body has a CFG, walk CFG and copy. */
2970 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2971 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2972 new_entry);
2973 copy_debug_stmts (id);
2975 return body;
2978 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2979 defined in function FN, or of a data member thereof. */
2981 static bool
2982 self_inlining_addr_expr (tree value, tree fn)
2984 tree var;
2986 if (TREE_CODE (value) != ADDR_EXPR)
2987 return false;
2989 var = get_base_address (TREE_OPERAND (value, 0));
2991 return var && auto_var_in_fn_p (var, fn);
2994 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2995 lexical block and line number information from base_stmt, if given,
2996 or from the last stmt of the block otherwise. */
2998 static gimple
2999 insert_init_debug_bind (copy_body_data *id,
3000 basic_block bb, tree var, tree value,
3001 gimple base_stmt)
3003 gimple note;
3004 gimple_stmt_iterator gsi;
3005 tree tracked_var;
3007 if (!gimple_in_ssa_p (id->src_cfun))
3008 return NULL;
3010 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3011 return NULL;
3013 tracked_var = target_for_debug_bind (var);
3014 if (!tracked_var)
3015 return NULL;
3017 if (bb)
3019 gsi = gsi_last_bb (bb);
3020 if (!base_stmt && !gsi_end_p (gsi))
3021 base_stmt = gsi_stmt (gsi);
3024 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3026 if (bb)
3028 if (!gsi_end_p (gsi))
3029 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3030 else
3031 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3034 return note;
3037 static void
3038 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
3040 /* If VAR represents a zero-sized variable, it's possible that the
3041 assignment statement may result in no gimple statements. */
3042 if (init_stmt)
3044 gimple_stmt_iterator si = gsi_last_bb (bb);
3046 /* We can end up with init statements that store to a non-register
3047 from a rhs with a conversion. Handle that here by forcing the
3048 rhs into a temporary. gimple_regimplify_operands is not
3049 prepared to do this for us. */
3050 if (!is_gimple_debug (init_stmt)
3051 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3052 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3053 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3055 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3056 gimple_expr_type (init_stmt),
3057 gimple_assign_rhs1 (init_stmt));
3058 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3059 GSI_NEW_STMT);
3060 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3061 gimple_assign_set_rhs1 (init_stmt, rhs);
3063 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3064 gimple_regimplify_operands (init_stmt, &si);
3066 if (!is_gimple_debug (init_stmt))
3068 tree def = gimple_assign_lhs (init_stmt);
3069 insert_init_debug_bind (id, bb, def, def, init_stmt);
3074 /* Initialize parameter P with VALUE. If needed, produce init statement
3075 at the end of BB. When BB is NULL, we return init statement to be
3076 output later. */
3077 static gimple
3078 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3079 basic_block bb, tree *vars)
3081 gimple init_stmt = NULL;
3082 tree var;
3083 tree rhs = value;
3084 tree def = (gimple_in_ssa_p (cfun)
3085 ? ssa_default_def (id->src_cfun, p) : NULL);
3087 if (value
3088 && value != error_mark_node
3089 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3091 /* If we can match up types by promotion/demotion do so. */
3092 if (fold_convertible_p (TREE_TYPE (p), value))
3093 rhs = fold_convert (TREE_TYPE (p), value);
3094 else
3096 /* ??? For valid programs we should not end up here.
3097 Still if we end up with truly mismatched types here, fall back
3098 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3099 GIMPLE to the following passes. */
3100 if (!is_gimple_reg_type (TREE_TYPE (value))
3101 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3102 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3103 else
3104 rhs = build_zero_cst (TREE_TYPE (p));
3108 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3109 here since the type of this decl must be visible to the calling
3110 function. */
3111 var = copy_decl_to_var (p, id);
3113 /* Declare this new variable. */
3114 DECL_CHAIN (var) = *vars;
3115 *vars = var;
3117 /* Make gimplifier happy about this variable. */
3118 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3120 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3121 we would not need to create a new variable here at all, if it
3122 weren't for debug info. Still, we can just use the argument
3123 value. */
3124 if (TREE_READONLY (p)
3125 && !TREE_ADDRESSABLE (p)
3126 && value && !TREE_SIDE_EFFECTS (value)
3127 && !def)
3129 /* We may produce non-gimple trees by adding NOPs or introduce
3130 invalid sharing when operand is not really constant.
3131 It is not big deal to prohibit constant propagation here as
3132 we will constant propagate in DOM1 pass anyway. */
3133 if (is_gimple_min_invariant (value)
3134 && useless_type_conversion_p (TREE_TYPE (p),
3135 TREE_TYPE (value))
3136 /* We have to be very careful about ADDR_EXPR. Make sure
3137 the base variable isn't a local variable of the inlined
3138 function, e.g., when doing recursive inlining, direct or
3139 mutually-recursive or whatever, which is why we don't
3140 just test whether fn == current_function_decl. */
3141 && ! self_inlining_addr_expr (value, fn))
3143 insert_decl_map (id, p, value);
3144 insert_debug_decl_map (id, p, var);
3145 return insert_init_debug_bind (id, bb, var, value, NULL);
3149 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3150 that way, when the PARM_DECL is encountered, it will be
3151 automatically replaced by the VAR_DECL. */
3152 insert_decl_map (id, p, var);
3154 /* Even if P was TREE_READONLY, the new VAR should not be.
3155 In the original code, we would have constructed a
3156 temporary, and then the function body would have never
3157 changed the value of P. However, now, we will be
3158 constructing VAR directly. The constructor body may
3159 change its value multiple times as it is being
3160 constructed. Therefore, it must not be TREE_READONLY;
3161 the back-end assumes that TREE_READONLY variable is
3162 assigned to only once. */
3163 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3164 TREE_READONLY (var) = 0;
3166 /* If there is no setup required and we are in SSA, take the easy route
3167 replacing all SSA names representing the function parameter by the
3168 SSA name passed to function.
3170 We need to construct map for the variable anyway as it might be used
3171 in different SSA names when parameter is set in function.
3173 Do replacement at -O0 for const arguments replaced by constant.
3174 This is important for builtin_constant_p and other construct requiring
3175 constant argument to be visible in inlined function body. */
3176 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3177 && (optimize
3178 || (TREE_READONLY (p)
3179 && is_gimple_min_invariant (rhs)))
3180 && (TREE_CODE (rhs) == SSA_NAME
3181 || is_gimple_min_invariant (rhs))
3182 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3184 insert_decl_map (id, def, rhs);
3185 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3188 /* If the value of argument is never used, don't care about initializing
3189 it. */
3190 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3192 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3193 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3196 /* Initialize this VAR_DECL from the equivalent argument. Convert
3197 the argument to the proper type in case it was promoted. */
3198 if (value)
3200 if (rhs == error_mark_node)
3202 insert_decl_map (id, p, var);
3203 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3206 STRIP_USELESS_TYPE_CONVERSION (rhs);
3208 /* If we are in SSA form properly remap the default definition
3209 or assign to a dummy SSA name if the parameter is unused and
3210 we are not optimizing. */
3211 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3213 if (def)
3215 def = remap_ssa_name (def, id);
3216 init_stmt = gimple_build_assign (def, rhs);
3217 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3218 set_ssa_default_def (cfun, var, NULL);
3220 else if (!optimize)
3222 def = make_ssa_name (var);
3223 init_stmt = gimple_build_assign (def, rhs);
3226 else
3227 init_stmt = gimple_build_assign (var, rhs);
3229 if (bb && init_stmt)
3230 insert_init_stmt (id, bb, init_stmt);
3232 return init_stmt;
3235 /* Generate code to initialize the parameters of the function at the
3236 top of the stack in ID from the GIMPLE_CALL STMT. */
3238 static void
3239 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
3240 tree fn, basic_block bb)
3242 tree parms;
3243 size_t i;
3244 tree p;
3245 tree vars = NULL_TREE;
3246 tree static_chain = gimple_call_chain (stmt);
3248 /* Figure out what the parameters are. */
3249 parms = DECL_ARGUMENTS (fn);
3251 /* Loop through the parameter declarations, replacing each with an
3252 equivalent VAR_DECL, appropriately initialized. */
3253 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3255 tree val;
3256 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3257 setup_one_parameter (id, p, val, fn, bb, &vars);
3259 /* After remapping parameters remap their types. This has to be done
3260 in a second loop over all parameters to appropriately remap
3261 variable sized arrays when the size is specified in a
3262 parameter following the array. */
3263 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3265 tree *varp = id->decl_map->get (p);
3266 if (varp
3267 && TREE_CODE (*varp) == VAR_DECL)
3269 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3270 ? ssa_default_def (id->src_cfun, p) : NULL);
3271 tree var = *varp;
3272 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3273 /* Also remap the default definition if it was remapped
3274 to the default definition of the parameter replacement
3275 by the parameter setup. */
3276 if (def)
3278 tree *defp = id->decl_map->get (def);
3279 if (defp
3280 && TREE_CODE (*defp) == SSA_NAME
3281 && SSA_NAME_VAR (*defp) == var)
3282 TREE_TYPE (*defp) = TREE_TYPE (var);
3287 /* Initialize the static chain. */
3288 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3289 gcc_assert (fn != current_function_decl);
3290 if (p)
3292 /* No static chain? Seems like a bug in tree-nested.c. */
3293 gcc_assert (static_chain);
3295 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3298 declare_inline_vars (id->block, vars);
3302 /* Declare a return variable to replace the RESULT_DECL for the
3303 function we are calling. An appropriate DECL_STMT is returned.
3304 The USE_STMT is filled to contain a use of the declaration to
3305 indicate the return value of the function.
3307 RETURN_SLOT, if non-null is place where to store the result. It
3308 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3309 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3311 RETURN_BOUNDS holds a destination for returned bounds.
3313 The return value is a (possibly null) value that holds the result
3314 as seen by the caller. */
3316 static tree
3317 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3318 tree return_bounds, basic_block entry_bb)
3320 tree callee = id->src_fn;
3321 tree result = DECL_RESULT (callee);
3322 tree callee_type = TREE_TYPE (result);
3323 tree caller_type;
3324 tree var, use;
3326 /* Handle type-mismatches in the function declaration return type
3327 vs. the call expression. */
3328 if (modify_dest)
3329 caller_type = TREE_TYPE (modify_dest);
3330 else
3331 caller_type = TREE_TYPE (TREE_TYPE (callee));
3333 /* We don't need to do anything for functions that don't return anything. */
3334 if (VOID_TYPE_P (callee_type))
3335 return NULL_TREE;
3337 /* If there was a return slot, then the return value is the
3338 dereferenced address of that object. */
3339 if (return_slot)
3341 /* The front end shouldn't have used both return_slot and
3342 a modify expression. */
3343 gcc_assert (!modify_dest);
3344 if (DECL_BY_REFERENCE (result))
3346 tree return_slot_addr = build_fold_addr_expr (return_slot);
3347 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3349 /* We are going to construct *&return_slot and we can't do that
3350 for variables believed to be not addressable.
3352 FIXME: This check possibly can match, because values returned
3353 via return slot optimization are not believed to have address
3354 taken by alias analysis. */
3355 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3356 var = return_slot_addr;
3358 else
3360 var = return_slot;
3361 gcc_assert (TREE_CODE (var) != SSA_NAME);
3362 if (TREE_ADDRESSABLE (result))
3363 mark_addressable (var);
3365 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3366 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3367 && !DECL_GIMPLE_REG_P (result)
3368 && DECL_P (var))
3369 DECL_GIMPLE_REG_P (var) = 0;
3370 use = NULL;
3371 goto done;
3374 /* All types requiring non-trivial constructors should have been handled. */
3375 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3377 /* Attempt to avoid creating a new temporary variable. */
3378 if (modify_dest
3379 && TREE_CODE (modify_dest) != SSA_NAME)
3381 bool use_it = false;
3383 /* We can't use MODIFY_DEST if there's type promotion involved. */
3384 if (!useless_type_conversion_p (callee_type, caller_type))
3385 use_it = false;
3387 /* ??? If we're assigning to a variable sized type, then we must
3388 reuse the destination variable, because we've no good way to
3389 create variable sized temporaries at this point. */
3390 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3391 use_it = true;
3393 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3394 reuse it as the result of the call directly. Don't do this if
3395 it would promote MODIFY_DEST to addressable. */
3396 else if (TREE_ADDRESSABLE (result))
3397 use_it = false;
3398 else
3400 tree base_m = get_base_address (modify_dest);
3402 /* If the base isn't a decl, then it's a pointer, and we don't
3403 know where that's going to go. */
3404 if (!DECL_P (base_m))
3405 use_it = false;
3406 else if (is_global_var (base_m))
3407 use_it = false;
3408 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3409 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3410 && !DECL_GIMPLE_REG_P (result)
3411 && DECL_GIMPLE_REG_P (base_m))
3412 use_it = false;
3413 else if (!TREE_ADDRESSABLE (base_m))
3414 use_it = true;
3417 if (use_it)
3419 var = modify_dest;
3420 use = NULL;
3421 goto done;
3425 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3427 var = copy_result_decl_to_var (result, id);
3428 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3430 /* Do not have the rest of GCC warn about this variable as it should
3431 not be visible to the user. */
3432 TREE_NO_WARNING (var) = 1;
3434 declare_inline_vars (id->block, var);
3436 /* Build the use expr. If the return type of the function was
3437 promoted, convert it back to the expected type. */
3438 use = var;
3439 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3441 /* If we can match up types by promotion/demotion do so. */
3442 if (fold_convertible_p (caller_type, var))
3443 use = fold_convert (caller_type, var);
3444 else
3446 /* ??? For valid programs we should not end up here.
3447 Still if we end up with truly mismatched types here, fall back
3448 to using a MEM_REF to not leak invalid GIMPLE to the following
3449 passes. */
3450 /* Prevent var from being written into SSA form. */
3451 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3452 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3453 DECL_GIMPLE_REG_P (var) = false;
3454 else if (is_gimple_reg_type (TREE_TYPE (var)))
3455 TREE_ADDRESSABLE (var) = true;
3456 use = fold_build2 (MEM_REF, caller_type,
3457 build_fold_addr_expr (var),
3458 build_int_cst (ptr_type_node, 0));
3462 STRIP_USELESS_TYPE_CONVERSION (use);
3464 if (DECL_BY_REFERENCE (result))
3466 TREE_ADDRESSABLE (var) = 1;
3467 var = build_fold_addr_expr (var);
3470 done:
3471 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3472 way, when the RESULT_DECL is encountered, it will be
3473 automatically replaced by the VAR_DECL.
3475 When returning by reference, ensure that RESULT_DECL remaps to
3476 gimple_val. */
3477 if (DECL_BY_REFERENCE (result)
3478 && !is_gimple_val (var))
3480 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3481 insert_decl_map (id, result, temp);
3482 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3483 it's default_def SSA_NAME. */
3484 if (gimple_in_ssa_p (id->src_cfun)
3485 && is_gimple_reg (result))
3487 temp = make_ssa_name (temp);
3488 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3490 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3492 else
3493 insert_decl_map (id, result, var);
3495 /* Remember this so we can ignore it in remap_decls. */
3496 id->retvar = var;
3498 /* If returned bounds are used, then make var for them. */
3499 if (return_bounds)
3501 tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
3502 DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
3503 TREE_NO_WARNING (bndtemp) = 1;
3504 declare_inline_vars (id->block, bndtemp);
3506 id->retbnd = bndtemp;
3507 insert_init_stmt (id, entry_bb,
3508 gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
3511 return use;
3514 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3515 to a local label. */
3517 static tree
3518 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3520 tree node = *nodep;
3521 tree fn = (tree) fnp;
3523 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3524 return node;
3526 if (TYPE_P (node))
3527 *walk_subtrees = 0;
3529 return NULL_TREE;
3532 /* Determine if the function can be copied. If so return NULL. If
3533 not return a string describng the reason for failure. */
3535 const char *
3536 copy_forbidden (struct function *fun, tree fndecl)
3538 const char *reason = fun->cannot_be_copied_reason;
3539 tree decl;
3540 unsigned ix;
3542 /* Only examine the function once. */
3543 if (fun->cannot_be_copied_set)
3544 return reason;
3546 /* We cannot copy a function that receives a non-local goto
3547 because we cannot remap the destination label used in the
3548 function that is performing the non-local goto. */
3549 /* ??? Actually, this should be possible, if we work at it.
3550 No doubt there's just a handful of places that simply
3551 assume it doesn't happen and don't substitute properly. */
3552 if (fun->has_nonlocal_label)
3554 reason = G_("function %q+F can never be copied "
3555 "because it receives a non-local goto");
3556 goto fail;
3559 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3560 if (TREE_CODE (decl) == VAR_DECL
3561 && TREE_STATIC (decl)
3562 && !DECL_EXTERNAL (decl)
3563 && DECL_INITIAL (decl)
3564 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3565 has_label_address_in_static_1,
3566 fndecl))
3568 reason = G_("function %q+F can never be copied because it saves "
3569 "address of local label in a static variable");
3570 goto fail;
3573 fail:
3574 fun->cannot_be_copied_reason = reason;
3575 fun->cannot_be_copied_set = true;
3576 return reason;
3580 static const char *inline_forbidden_reason;
3582 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3583 iff a function can not be inlined. Also sets the reason why. */
3585 static tree
3586 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3587 struct walk_stmt_info *wip)
3589 tree fn = (tree) wip->info;
3590 tree t;
3591 gimple stmt = gsi_stmt (*gsi);
3593 switch (gimple_code (stmt))
3595 case GIMPLE_CALL:
3596 /* Refuse to inline alloca call unless user explicitly forced so as
3597 this may change program's memory overhead drastically when the
3598 function using alloca is called in loop. In GCC present in
3599 SPEC2000 inlining into schedule_block cause it to require 2GB of
3600 RAM instead of 256MB. Don't do so for alloca calls emitted for
3601 VLA objects as those can't cause unbounded growth (they're always
3602 wrapped inside stack_save/stack_restore regions. */
3603 if (gimple_alloca_call_p (stmt)
3604 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3605 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3607 inline_forbidden_reason
3608 = G_("function %q+F can never be inlined because it uses "
3609 "alloca (override using the always_inline attribute)");
3610 *handled_ops_p = true;
3611 return fn;
3614 t = gimple_call_fndecl (stmt);
3615 if (t == NULL_TREE)
3616 break;
3618 /* We cannot inline functions that call setjmp. */
3619 if (setjmp_call_p (t))
3621 inline_forbidden_reason
3622 = G_("function %q+F can never be inlined because it uses setjmp");
3623 *handled_ops_p = true;
3624 return t;
3627 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3628 switch (DECL_FUNCTION_CODE (t))
3630 /* We cannot inline functions that take a variable number of
3631 arguments. */
3632 case BUILT_IN_VA_START:
3633 case BUILT_IN_NEXT_ARG:
3634 case BUILT_IN_VA_END:
3635 inline_forbidden_reason
3636 = G_("function %q+F can never be inlined because it "
3637 "uses variable argument lists");
3638 *handled_ops_p = true;
3639 return t;
3641 case BUILT_IN_LONGJMP:
3642 /* We can't inline functions that call __builtin_longjmp at
3643 all. The non-local goto machinery really requires the
3644 destination be in a different function. If we allow the
3645 function calling __builtin_longjmp to be inlined into the
3646 function calling __builtin_setjmp, Things will Go Awry. */
3647 inline_forbidden_reason
3648 = G_("function %q+F can never be inlined because "
3649 "it uses setjmp-longjmp exception handling");
3650 *handled_ops_p = true;
3651 return t;
3653 case BUILT_IN_NONLOCAL_GOTO:
3654 /* Similarly. */
3655 inline_forbidden_reason
3656 = G_("function %q+F can never be inlined because "
3657 "it uses non-local goto");
3658 *handled_ops_p = true;
3659 return t;
3661 case BUILT_IN_RETURN:
3662 case BUILT_IN_APPLY_ARGS:
3663 /* If a __builtin_apply_args caller would be inlined,
3664 it would be saving arguments of the function it has
3665 been inlined into. Similarly __builtin_return would
3666 return from the function the inline has been inlined into. */
3667 inline_forbidden_reason
3668 = G_("function %q+F can never be inlined because "
3669 "it uses __builtin_return or __builtin_apply_args");
3670 *handled_ops_p = true;
3671 return t;
3673 default:
3674 break;
3676 break;
3678 case GIMPLE_GOTO:
3679 t = gimple_goto_dest (stmt);
3681 /* We will not inline a function which uses computed goto. The
3682 addresses of its local labels, which may be tucked into
3683 global storage, are of course not constant across
3684 instantiations, which causes unexpected behavior. */
3685 if (TREE_CODE (t) != LABEL_DECL)
3687 inline_forbidden_reason
3688 = G_("function %q+F can never be inlined "
3689 "because it contains a computed goto");
3690 *handled_ops_p = true;
3691 return t;
3693 break;
3695 default:
3696 break;
3699 *handled_ops_p = false;
3700 return NULL_TREE;
3703 /* Return true if FNDECL is a function that cannot be inlined into
3704 another one. */
3706 static bool
3707 inline_forbidden_p (tree fndecl)
3709 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3710 struct walk_stmt_info wi;
3711 basic_block bb;
3712 bool forbidden_p = false;
3714 /* First check for shared reasons not to copy the code. */
3715 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3716 if (inline_forbidden_reason != NULL)
3717 return true;
3719 /* Next, walk the statements of the function looking for
3720 constraucts we can't handle, or are non-optimal for inlining. */
3721 hash_set<tree> visited_nodes;
3722 memset (&wi, 0, sizeof (wi));
3723 wi.info = (void *) fndecl;
3724 wi.pset = &visited_nodes;
3726 FOR_EACH_BB_FN (bb, fun)
3728 gimple ret;
3729 gimple_seq seq = bb_seq (bb);
3730 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3731 forbidden_p = (ret != NULL);
3732 if (forbidden_p)
3733 break;
3736 return forbidden_p;
3739 /* Return false if the function FNDECL cannot be inlined on account of its
3740 attributes, true otherwise. */
3741 static bool
3742 function_attribute_inlinable_p (const_tree fndecl)
3744 if (targetm.attribute_table)
3746 const_tree a;
3748 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3750 const_tree name = TREE_PURPOSE (a);
3751 int i;
3753 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3754 if (is_attribute_p (targetm.attribute_table[i].name, name))
3755 return targetm.function_attribute_inlinable_p (fndecl);
3759 return true;
3762 /* Returns nonzero if FN is a function that does not have any
3763 fundamental inline blocking properties. */
3765 bool
3766 tree_inlinable_function_p (tree fn)
3768 bool inlinable = true;
3769 bool do_warning;
3770 tree always_inline;
3772 /* If we've already decided this function shouldn't be inlined,
3773 there's no need to check again. */
3774 if (DECL_UNINLINABLE (fn))
3775 return false;
3777 /* We only warn for functions declared `inline' by the user. */
3778 do_warning = (warn_inline
3779 && DECL_DECLARED_INLINE_P (fn)
3780 && !DECL_NO_INLINE_WARNING_P (fn)
3781 && !DECL_IN_SYSTEM_HEADER (fn));
3783 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3785 if (flag_no_inline
3786 && always_inline == NULL)
3788 if (do_warning)
3789 warning (OPT_Winline, "function %q+F can never be inlined because it "
3790 "is suppressed using -fno-inline", fn);
3791 inlinable = false;
3794 else if (!function_attribute_inlinable_p (fn))
3796 if (do_warning)
3797 warning (OPT_Winline, "function %q+F can never be inlined because it "
3798 "uses attributes conflicting with inlining", fn);
3799 inlinable = false;
3802 else if (inline_forbidden_p (fn))
3804 /* See if we should warn about uninlinable functions. Previously,
3805 some of these warnings would be issued while trying to expand
3806 the function inline, but that would cause multiple warnings
3807 about functions that would for example call alloca. But since
3808 this a property of the function, just one warning is enough.
3809 As a bonus we can now give more details about the reason why a
3810 function is not inlinable. */
3811 if (always_inline)
3812 error (inline_forbidden_reason, fn);
3813 else if (do_warning)
3814 warning (OPT_Winline, inline_forbidden_reason, fn);
3816 inlinable = false;
3819 /* Squirrel away the result so that we don't have to check again. */
3820 DECL_UNINLINABLE (fn) = !inlinable;
3822 return inlinable;
3825 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3826 word size and take possible memcpy call into account and return
3827 cost based on whether optimizing for size or speed according to SPEED_P. */
3830 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3832 HOST_WIDE_INT size;
3834 gcc_assert (!VOID_TYPE_P (type));
3836 if (TREE_CODE (type) == VECTOR_TYPE)
3838 machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3839 machine_mode simd
3840 = targetm.vectorize.preferred_simd_mode (inner);
3841 int simd_mode_size = GET_MODE_SIZE (simd);
3842 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3843 / simd_mode_size);
3846 size = int_size_in_bytes (type);
3848 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3849 /* Cost of a memcpy call, 3 arguments and the call. */
3850 return 4;
3851 else
3852 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3855 /* Returns cost of operation CODE, according to WEIGHTS */
3857 static int
3858 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3859 tree op1 ATTRIBUTE_UNUSED, tree op2)
3861 switch (code)
3863 /* These are "free" conversions, or their presumed cost
3864 is folded into other operations. */
3865 case RANGE_EXPR:
3866 CASE_CONVERT:
3867 case COMPLEX_EXPR:
3868 case PAREN_EXPR:
3869 case VIEW_CONVERT_EXPR:
3870 return 0;
3872 /* Assign cost of 1 to usual operations.
3873 ??? We may consider mapping RTL costs to this. */
3874 case COND_EXPR:
3875 case VEC_COND_EXPR:
3876 case VEC_PERM_EXPR:
3878 case PLUS_EXPR:
3879 case POINTER_PLUS_EXPR:
3880 case MINUS_EXPR:
3881 case MULT_EXPR:
3882 case MULT_HIGHPART_EXPR:
3883 case FMA_EXPR:
3885 case ADDR_SPACE_CONVERT_EXPR:
3886 case FIXED_CONVERT_EXPR:
3887 case FIX_TRUNC_EXPR:
3889 case NEGATE_EXPR:
3890 case FLOAT_EXPR:
3891 case MIN_EXPR:
3892 case MAX_EXPR:
3893 case ABS_EXPR:
3895 case LSHIFT_EXPR:
3896 case RSHIFT_EXPR:
3897 case LROTATE_EXPR:
3898 case RROTATE_EXPR:
3900 case BIT_IOR_EXPR:
3901 case BIT_XOR_EXPR:
3902 case BIT_AND_EXPR:
3903 case BIT_NOT_EXPR:
3905 case TRUTH_ANDIF_EXPR:
3906 case TRUTH_ORIF_EXPR:
3907 case TRUTH_AND_EXPR:
3908 case TRUTH_OR_EXPR:
3909 case TRUTH_XOR_EXPR:
3910 case TRUTH_NOT_EXPR:
3912 case LT_EXPR:
3913 case LE_EXPR:
3914 case GT_EXPR:
3915 case GE_EXPR:
3916 case EQ_EXPR:
3917 case NE_EXPR:
3918 case ORDERED_EXPR:
3919 case UNORDERED_EXPR:
3921 case UNLT_EXPR:
3922 case UNLE_EXPR:
3923 case UNGT_EXPR:
3924 case UNGE_EXPR:
3925 case UNEQ_EXPR:
3926 case LTGT_EXPR:
3928 case CONJ_EXPR:
3930 case PREDECREMENT_EXPR:
3931 case PREINCREMENT_EXPR:
3932 case POSTDECREMENT_EXPR:
3933 case POSTINCREMENT_EXPR:
3935 case REALIGN_LOAD_EXPR:
3937 case REDUC_MAX_EXPR:
3938 case REDUC_MIN_EXPR:
3939 case REDUC_PLUS_EXPR:
3940 case WIDEN_SUM_EXPR:
3941 case WIDEN_MULT_EXPR:
3942 case DOT_PROD_EXPR:
3943 case SAD_EXPR:
3944 case WIDEN_MULT_PLUS_EXPR:
3945 case WIDEN_MULT_MINUS_EXPR:
3946 case WIDEN_LSHIFT_EXPR:
3948 case VEC_WIDEN_MULT_HI_EXPR:
3949 case VEC_WIDEN_MULT_LO_EXPR:
3950 case VEC_WIDEN_MULT_EVEN_EXPR:
3951 case VEC_WIDEN_MULT_ODD_EXPR:
3952 case VEC_UNPACK_HI_EXPR:
3953 case VEC_UNPACK_LO_EXPR:
3954 case VEC_UNPACK_FLOAT_HI_EXPR:
3955 case VEC_UNPACK_FLOAT_LO_EXPR:
3956 case VEC_PACK_TRUNC_EXPR:
3957 case VEC_PACK_SAT_EXPR:
3958 case VEC_PACK_FIX_TRUNC_EXPR:
3959 case VEC_WIDEN_LSHIFT_HI_EXPR:
3960 case VEC_WIDEN_LSHIFT_LO_EXPR:
3962 return 1;
3964 /* Few special cases of expensive operations. This is useful
3965 to avoid inlining on functions having too many of these. */
3966 case TRUNC_DIV_EXPR:
3967 case CEIL_DIV_EXPR:
3968 case FLOOR_DIV_EXPR:
3969 case ROUND_DIV_EXPR:
3970 case EXACT_DIV_EXPR:
3971 case TRUNC_MOD_EXPR:
3972 case CEIL_MOD_EXPR:
3973 case FLOOR_MOD_EXPR:
3974 case ROUND_MOD_EXPR:
3975 case RDIV_EXPR:
3976 if (TREE_CODE (op2) != INTEGER_CST)
3977 return weights->div_mod_cost;
3978 return 1;
3980 default:
3981 /* We expect a copy assignment with no operator. */
3982 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3983 return 0;
3988 /* Estimate number of instructions that will be created by expanding
3989 the statements in the statement sequence STMTS.
3990 WEIGHTS contains weights attributed to various constructs. */
3992 static
3993 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3995 int cost;
3996 gimple_stmt_iterator gsi;
3998 cost = 0;
3999 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4000 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4002 return cost;
4006 /* Estimate number of instructions that will be created by expanding STMT.
4007 WEIGHTS contains weights attributed to various constructs. */
4010 estimate_num_insns (gimple stmt, eni_weights *weights)
4012 unsigned cost, i;
4013 enum gimple_code code = gimple_code (stmt);
4014 tree lhs;
4015 tree rhs;
4017 switch (code)
4019 case GIMPLE_ASSIGN:
4020 /* Try to estimate the cost of assignments. We have three cases to
4021 deal with:
4022 1) Simple assignments to registers;
4023 2) Stores to things that must live in memory. This includes
4024 "normal" stores to scalars, but also assignments of large
4025 structures, or constructors of big arrays;
4027 Let us look at the first two cases, assuming we have "a = b + C":
4028 <GIMPLE_ASSIGN <var_decl "a">
4029 <plus_expr <var_decl "b"> <constant C>>
4030 If "a" is a GIMPLE register, the assignment to it is free on almost
4031 any target, because "a" usually ends up in a real register. Hence
4032 the only cost of this expression comes from the PLUS_EXPR, and we
4033 can ignore the GIMPLE_ASSIGN.
4034 If "a" is not a GIMPLE register, the assignment to "a" will most
4035 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4036 of moving something into "a", which we compute using the function
4037 estimate_move_cost. */
4038 if (gimple_clobber_p (stmt))
4039 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4041 lhs = gimple_assign_lhs (stmt);
4042 rhs = gimple_assign_rhs1 (stmt);
4044 cost = 0;
4046 /* Account for the cost of moving to / from memory. */
4047 if (gimple_store_p (stmt))
4048 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4049 if (gimple_assign_load_p (stmt))
4050 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4052 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4053 gimple_assign_rhs1 (stmt),
4054 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4055 == GIMPLE_BINARY_RHS
4056 ? gimple_assign_rhs2 (stmt) : NULL);
4057 break;
4059 case GIMPLE_COND:
4060 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4061 gimple_op (stmt, 0),
4062 gimple_op (stmt, 1));
4063 break;
4065 case GIMPLE_SWITCH:
4067 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4068 /* Take into account cost of the switch + guess 2 conditional jumps for
4069 each case label.
4071 TODO: once the switch expansion logic is sufficiently separated, we can
4072 do better job on estimating cost of the switch. */
4073 if (weights->time_based)
4074 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4075 else
4076 cost = gimple_switch_num_labels (switch_stmt) * 2;
4078 break;
4080 case GIMPLE_CALL:
4082 tree decl;
4084 if (gimple_call_internal_p (stmt))
4085 return 0;
4086 else if ((decl = gimple_call_fndecl (stmt))
4087 && DECL_BUILT_IN (decl))
4089 /* Do not special case builtins where we see the body.
4090 This just confuse inliner. */
4091 struct cgraph_node *node;
4092 if (!(node = cgraph_node::get (decl))
4093 || node->definition)
4095 /* For buitins that are likely expanded to nothing or
4096 inlined do not account operand costs. */
4097 else if (is_simple_builtin (decl))
4098 return 0;
4099 else if (is_inexpensive_builtin (decl))
4100 return weights->target_builtin_call_cost;
4101 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
4103 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4104 specialize the cheap expansion we do here.
4105 ??? This asks for a more general solution. */
4106 switch (DECL_FUNCTION_CODE (decl))
4108 case BUILT_IN_POW:
4109 case BUILT_IN_POWF:
4110 case BUILT_IN_POWL:
4111 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4112 && REAL_VALUES_EQUAL
4113 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
4114 return estimate_operator_cost
4115 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4116 gimple_call_arg (stmt, 0));
4117 break;
4119 default:
4120 break;
4125 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4126 if (gimple_call_lhs (stmt))
4127 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4128 weights->time_based);
4129 for (i = 0; i < gimple_call_num_args (stmt); i++)
4131 tree arg = gimple_call_arg (stmt, i);
4132 cost += estimate_move_cost (TREE_TYPE (arg),
4133 weights->time_based);
4135 break;
4138 case GIMPLE_RETURN:
4139 return weights->return_cost;
4141 case GIMPLE_GOTO:
4142 case GIMPLE_LABEL:
4143 case GIMPLE_NOP:
4144 case GIMPLE_PHI:
4145 case GIMPLE_PREDICT:
4146 case GIMPLE_DEBUG:
4147 return 0;
4149 case GIMPLE_ASM:
4151 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4152 /* 1000 means infinity. This avoids overflows later
4153 with very long asm statements. */
4154 if (count > 1000)
4155 count = 1000;
4156 return count;
4159 case GIMPLE_RESX:
4160 /* This is either going to be an external function call with one
4161 argument, or two register copy statements plus a goto. */
4162 return 2;
4164 case GIMPLE_EH_DISPATCH:
4165 /* ??? This is going to turn into a switch statement. Ideally
4166 we'd have a look at the eh region and estimate the number of
4167 edges involved. */
4168 return 10;
4170 case GIMPLE_BIND:
4171 return estimate_num_insns_seq (
4172 gimple_bind_body (as_a <gbind *> (stmt)),
4173 weights);
4175 case GIMPLE_EH_FILTER:
4176 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4178 case GIMPLE_CATCH:
4179 return estimate_num_insns_seq (gimple_catch_handler (
4180 as_a <gcatch *> (stmt)),
4181 weights);
4183 case GIMPLE_TRY:
4184 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4185 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4187 /* OMP directives are generally very expensive. */
4189 case GIMPLE_OMP_RETURN:
4190 case GIMPLE_OMP_SECTIONS_SWITCH:
4191 case GIMPLE_OMP_ATOMIC_STORE:
4192 case GIMPLE_OMP_CONTINUE:
4193 /* ...except these, which are cheap. */
4194 return 0;
4196 case GIMPLE_OMP_ATOMIC_LOAD:
4197 return weights->omp_cost;
4199 case GIMPLE_OMP_FOR:
4200 return (weights->omp_cost
4201 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4202 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4204 case GIMPLE_OMP_PARALLEL:
4205 case GIMPLE_OMP_TASK:
4206 case GIMPLE_OMP_CRITICAL:
4207 case GIMPLE_OMP_MASTER:
4208 case GIMPLE_OMP_TASKGROUP:
4209 case GIMPLE_OMP_ORDERED:
4210 case GIMPLE_OMP_SECTION:
4211 case GIMPLE_OMP_SECTIONS:
4212 case GIMPLE_OMP_SINGLE:
4213 case GIMPLE_OMP_TARGET:
4214 case GIMPLE_OMP_TEAMS:
4215 return (weights->omp_cost
4216 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4218 case GIMPLE_TRANSACTION:
4219 return (weights->tm_cost
4220 + estimate_num_insns_seq (gimple_transaction_body (
4221 as_a <gtransaction *> (stmt)),
4222 weights));
4224 default:
4225 gcc_unreachable ();
4228 return cost;
4231 /* Estimate number of instructions that will be created by expanding
4232 function FNDECL. WEIGHTS contains weights attributed to various
4233 constructs. */
4236 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4238 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4239 gimple_stmt_iterator bsi;
4240 basic_block bb;
4241 int n = 0;
4243 gcc_assert (my_function && my_function->cfg);
4244 FOR_EACH_BB_FN (bb, my_function)
4246 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4247 n += estimate_num_insns (gsi_stmt (bsi), weights);
4250 return n;
4254 /* Initializes weights used by estimate_num_insns. */
4256 void
4257 init_inline_once (void)
4259 eni_size_weights.call_cost = 1;
4260 eni_size_weights.indirect_call_cost = 3;
4261 eni_size_weights.target_builtin_call_cost = 1;
4262 eni_size_weights.div_mod_cost = 1;
4263 eni_size_weights.omp_cost = 40;
4264 eni_size_weights.tm_cost = 10;
4265 eni_size_weights.time_based = false;
4266 eni_size_weights.return_cost = 1;
4268 /* Estimating time for call is difficult, since we have no idea what the
4269 called function does. In the current uses of eni_time_weights,
4270 underestimating the cost does less harm than overestimating it, so
4271 we choose a rather small value here. */
4272 eni_time_weights.call_cost = 10;
4273 eni_time_weights.indirect_call_cost = 15;
4274 eni_time_weights.target_builtin_call_cost = 1;
4275 eni_time_weights.div_mod_cost = 10;
4276 eni_time_weights.omp_cost = 40;
4277 eni_time_weights.tm_cost = 40;
4278 eni_time_weights.time_based = true;
4279 eni_time_weights.return_cost = 2;
4282 /* Estimate the number of instructions in a gimple_seq. */
4285 count_insns_seq (gimple_seq seq, eni_weights *weights)
4287 gimple_stmt_iterator gsi;
4288 int n = 0;
4289 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
4290 n += estimate_num_insns (gsi_stmt (gsi), weights);
4292 return n;
4296 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4298 static void
4299 prepend_lexical_block (tree current_block, tree new_block)
4301 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4302 BLOCK_SUBBLOCKS (current_block) = new_block;
4303 BLOCK_SUPERCONTEXT (new_block) = current_block;
4306 /* Add local variables from CALLEE to CALLER. */
4308 static inline void
4309 add_local_variables (struct function *callee, struct function *caller,
4310 copy_body_data *id)
4312 tree var;
4313 unsigned ix;
4315 FOR_EACH_LOCAL_DECL (callee, ix, var)
4316 if (!can_be_nonlocal (var, id))
4318 tree new_var = remap_decl (var, id);
4320 /* Remap debug-expressions. */
4321 if (TREE_CODE (new_var) == VAR_DECL
4322 && DECL_HAS_DEBUG_EXPR_P (var)
4323 && new_var != var)
4325 tree tem = DECL_DEBUG_EXPR (var);
4326 bool old_regimplify = id->regimplify;
4327 id->remapping_type_depth++;
4328 walk_tree (&tem, copy_tree_body_r, id, NULL);
4329 id->remapping_type_depth--;
4330 id->regimplify = old_regimplify;
4331 SET_DECL_DEBUG_EXPR (new_var, tem);
4332 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4334 add_local_decl (caller, new_var);
4338 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4339 have brought in or introduced any debug stmts for SRCVAR. */
4341 static inline void
4342 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4344 tree *remappedvarp = id->decl_map->get (srcvar);
4346 if (!remappedvarp)
4347 return;
4349 if (TREE_CODE (*remappedvarp) != VAR_DECL)
4350 return;
4352 if (*remappedvarp == id->retvar || *remappedvarp == id->retbnd)
4353 return;
4355 tree tvar = target_for_debug_bind (*remappedvarp);
4356 if (!tvar)
4357 return;
4359 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4360 id->call_stmt);
4361 gimple_seq_add_stmt (bindings, stmt);
4364 /* For each inlined variable for which we may have debug bind stmts,
4365 add before GSI a final debug stmt resetting it, marking the end of
4366 its life, so that var-tracking knows it doesn't have to compute
4367 further locations for it. */
4369 static inline void
4370 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4372 tree var;
4373 unsigned ix;
4374 gimple_seq bindings = NULL;
4376 if (!gimple_in_ssa_p (id->src_cfun))
4377 return;
4379 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4380 return;
4382 for (var = DECL_ARGUMENTS (id->src_fn);
4383 var; var = DECL_CHAIN (var))
4384 reset_debug_binding (id, var, &bindings);
4386 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4387 reset_debug_binding (id, var, &bindings);
4389 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4392 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4394 static bool
4395 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
4397 tree use_retvar;
4398 tree fn;
4399 hash_map<tree, tree> *dst;
4400 hash_map<tree, tree> *st = NULL;
4401 tree return_slot;
4402 tree modify_dest;
4403 tree return_bounds = NULL;
4404 location_t saved_location;
4405 struct cgraph_edge *cg_edge;
4406 cgraph_inline_failed_t reason;
4407 basic_block return_block;
4408 edge e;
4409 gimple_stmt_iterator gsi, stmt_gsi;
4410 bool successfully_inlined = FALSE;
4411 bool purge_dead_abnormal_edges;
4412 gcall *call_stmt;
4413 unsigned int i;
4415 /* Set input_location here so we get the right instantiation context
4416 if we call instantiate_decl from inlinable_function_p. */
4417 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4418 saved_location = input_location;
4419 input_location = gimple_location (stmt);
4421 /* From here on, we're only interested in CALL_EXPRs. */
4422 call_stmt = dyn_cast <gcall *> (stmt);
4423 if (!call_stmt)
4424 goto egress;
4426 cg_edge = id->dst_node->get_edge (stmt);
4427 gcc_checking_assert (cg_edge);
4428 /* First, see if we can figure out what function is being called.
4429 If we cannot, then there is no hope of inlining the function. */
4430 if (cg_edge->indirect_unknown_callee)
4431 goto egress;
4432 fn = cg_edge->callee->decl;
4433 gcc_checking_assert (fn);
4435 /* If FN is a declaration of a function in a nested scope that was
4436 globally declared inline, we don't set its DECL_INITIAL.
4437 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4438 C++ front-end uses it for cdtors to refer to their internal
4439 declarations, that are not real functions. Fortunately those
4440 don't have trees to be saved, so we can tell by checking their
4441 gimple_body. */
4442 if (!DECL_INITIAL (fn)
4443 && DECL_ABSTRACT_ORIGIN (fn)
4444 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4445 fn = DECL_ABSTRACT_ORIGIN (fn);
4447 /* Don't try to inline functions that are not well-suited to inlining. */
4448 if (cg_edge->inline_failed)
4450 reason = cg_edge->inline_failed;
4451 /* If this call was originally indirect, we do not want to emit any
4452 inlining related warnings or sorry messages because there are no
4453 guarantees regarding those. */
4454 if (cg_edge->indirect_inlining_edge)
4455 goto egress;
4457 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4458 /* For extern inline functions that get redefined we always
4459 silently ignored always_inline flag. Better behaviour would
4460 be to be able to keep both bodies and use extern inline body
4461 for inlining, but we can't do that because frontends overwrite
4462 the body. */
4463 && !cg_edge->callee->local.redefined_extern_inline
4464 /* During early inline pass, report only when optimization is
4465 not turned on. */
4466 && (symtab->global_info_ready
4467 || !optimize
4468 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4469 /* PR 20090218-1_0.c. Body can be provided by another module. */
4470 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4472 error ("inlining failed in call to always_inline %q+F: %s", fn,
4473 cgraph_inline_failed_string (reason));
4474 error ("called from here");
4476 else if (warn_inline
4477 && DECL_DECLARED_INLINE_P (fn)
4478 && !DECL_NO_INLINE_WARNING_P (fn)
4479 && !DECL_IN_SYSTEM_HEADER (fn)
4480 && reason != CIF_UNSPECIFIED
4481 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4482 /* Do not warn about not inlined recursive calls. */
4483 && !cg_edge->recursive_p ()
4484 /* Avoid warnings during early inline pass. */
4485 && symtab->global_info_ready)
4487 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4488 fn, _(cgraph_inline_failed_string (reason)));
4489 warning (OPT_Winline, "called from here");
4491 goto egress;
4493 fn = cg_edge->callee->decl;
4494 cg_edge->callee->get_untransformed_body ();
4496 #ifdef ENABLE_CHECKING
4497 if (cg_edge->callee->decl != id->dst_node->decl)
4498 cg_edge->callee->verify ();
4499 #endif
4501 /* We will be inlining this callee. */
4502 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4503 id->assign_stmts.create (0);
4505 /* Update the callers EH personality. */
4506 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4507 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4508 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4510 /* Split the block holding the GIMPLE_CALL. */
4511 e = split_block (bb, stmt);
4512 bb = e->src;
4513 return_block = e->dest;
4514 remove_edge (e);
4516 /* split_block splits after the statement; work around this by
4517 moving the call into the second block manually. Not pretty,
4518 but seems easier than doing the CFG manipulation by hand
4519 when the GIMPLE_CALL is in the last statement of BB. */
4520 stmt_gsi = gsi_last_bb (bb);
4521 gsi_remove (&stmt_gsi, false);
4523 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4524 been the source of abnormal edges. In this case, schedule
4525 the removal of dead abnormal edges. */
4526 gsi = gsi_start_bb (return_block);
4527 if (gsi_end_p (gsi))
4529 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4530 purge_dead_abnormal_edges = true;
4532 else
4534 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4535 purge_dead_abnormal_edges = false;
4538 stmt_gsi = gsi_start_bb (return_block);
4540 /* Build a block containing code to initialize the arguments, the
4541 actual inline expansion of the body, and a label for the return
4542 statements within the function to jump to. The type of the
4543 statement expression is the return type of the function call.
4544 ??? If the call does not have an associated block then we will
4545 remap all callee blocks to NULL, effectively dropping most of
4546 its debug information. This should only happen for calls to
4547 artificial decls inserted by the compiler itself. We need to
4548 either link the inlined blocks into the caller block tree or
4549 not refer to them in any way to not break GC for locations. */
4550 if (gimple_block (stmt))
4552 id->block = make_node (BLOCK);
4553 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4554 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4555 prepend_lexical_block (gimple_block (stmt), id->block);
4558 /* Local declarations will be replaced by their equivalents in this
4559 map. */
4560 st = id->decl_map;
4561 id->decl_map = new hash_map<tree, tree>;
4562 dst = id->debug_map;
4563 id->debug_map = NULL;
4565 /* Record the function we are about to inline. */
4566 id->src_fn = fn;
4567 id->src_node = cg_edge->callee;
4568 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4569 id->call_stmt = stmt;
4571 /* If the the src function contains an IFN_VA_ARG, then so will the dst
4572 function after inlining. */
4573 if ((id->src_cfun->curr_properties & PROP_gimple_lva) == 0)
4575 struct function *dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4576 dst_cfun->curr_properties &= ~PROP_gimple_lva;
4579 gcc_assert (!id->src_cfun->after_inlining);
4581 id->entry_bb = bb;
4582 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4584 gimple_stmt_iterator si = gsi_last_bb (bb);
4585 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4586 NOT_TAKEN),
4587 GSI_NEW_STMT);
4589 initialize_inlined_parameters (id, stmt, fn, bb);
4591 if (DECL_INITIAL (fn))
4593 if (gimple_block (stmt))
4595 tree *var;
4597 prepend_lexical_block (id->block,
4598 remap_blocks (DECL_INITIAL (fn), id));
4599 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4600 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4601 == NULL_TREE));
4602 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4603 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4604 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4605 under it. The parameters can be then evaluated in the debugger,
4606 but don't show in backtraces. */
4607 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4608 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4610 tree v = *var;
4611 *var = TREE_CHAIN (v);
4612 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4613 BLOCK_VARS (id->block) = v;
4615 else
4616 var = &TREE_CHAIN (*var);
4618 else
4619 remap_blocks_to_null (DECL_INITIAL (fn), id);
4622 /* Return statements in the function body will be replaced by jumps
4623 to the RET_LABEL. */
4624 gcc_assert (DECL_INITIAL (fn));
4625 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4627 /* Find the LHS to which the result of this call is assigned. */
4628 return_slot = NULL;
4629 if (gimple_call_lhs (stmt))
4631 modify_dest = gimple_call_lhs (stmt);
4633 /* Remember where to copy returned bounds. */
4634 if (gimple_call_with_bounds_p (stmt)
4635 && TREE_CODE (modify_dest) == SSA_NAME)
4637 gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
4638 if (retbnd)
4640 return_bounds = gimple_call_lhs (retbnd);
4641 /* If returned bounds are not used then just
4642 remove unused call. */
4643 if (!return_bounds)
4645 gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
4646 gsi_remove (&iter, true);
4651 /* The function which we are inlining might not return a value,
4652 in which case we should issue a warning that the function
4653 does not return a value. In that case the optimizers will
4654 see that the variable to which the value is assigned was not
4655 initialized. We do not want to issue a warning about that
4656 uninitialized variable. */
4657 if (DECL_P (modify_dest))
4658 TREE_NO_WARNING (modify_dest) = 1;
4660 if (gimple_call_return_slot_opt_p (call_stmt))
4662 return_slot = modify_dest;
4663 modify_dest = NULL;
4666 else
4667 modify_dest = NULL;
4669 /* If we are inlining a call to the C++ operator new, we don't want
4670 to use type based alias analysis on the return value. Otherwise
4671 we may get confused if the compiler sees that the inlined new
4672 function returns a pointer which was just deleted. See bug
4673 33407. */
4674 if (DECL_IS_OPERATOR_NEW (fn))
4676 return_slot = NULL;
4677 modify_dest = NULL;
4680 /* Declare the return variable for the function. */
4681 use_retvar = declare_return_variable (id, return_slot, modify_dest,
4682 return_bounds, bb);
4684 /* Add local vars in this inlined callee to caller. */
4685 add_local_variables (id->src_cfun, cfun, id);
4687 if (dump_file && (dump_flags & TDF_DETAILS))
4689 fprintf (dump_file, "Inlining ");
4690 print_generic_expr (dump_file, id->src_fn, 0);
4691 fprintf (dump_file, " to ");
4692 print_generic_expr (dump_file, id->dst_fn, 0);
4693 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4696 /* This is it. Duplicate the callee body. Assume callee is
4697 pre-gimplified. Note that we must not alter the caller
4698 function in any way before this point, as this CALL_EXPR may be
4699 a self-referential call; if we're calling ourselves, we need to
4700 duplicate our body before altering anything. */
4701 copy_body (id, cg_edge->callee->count,
4702 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4703 bb, return_block, NULL);
4705 reset_debug_bindings (id, stmt_gsi);
4707 /* Reset the escaped solution. */
4708 if (cfun->gimple_df)
4709 pt_solution_reset (&cfun->gimple_df->escaped);
4711 /* Clean up. */
4712 if (id->debug_map)
4714 delete id->debug_map;
4715 id->debug_map = dst;
4717 delete id->decl_map;
4718 id->decl_map = st;
4720 /* Unlink the calls virtual operands before replacing it. */
4721 unlink_stmt_vdef (stmt);
4722 if (gimple_vdef (stmt)
4723 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4724 release_ssa_name (gimple_vdef (stmt));
4726 /* If the inlined function returns a result that we care about,
4727 substitute the GIMPLE_CALL with an assignment of the return
4728 variable to the LHS of the call. That is, if STMT was
4729 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4730 if (use_retvar && gimple_call_lhs (stmt))
4732 gimple old_stmt = stmt;
4733 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4734 gsi_replace (&stmt_gsi, stmt, false);
4735 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4737 /* Copy bounds if we copy structure with bounds. */
4738 if (chkp_function_instrumented_p (id->dst_fn)
4739 && !BOUNDED_P (use_retvar)
4740 && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
4741 id->assign_stmts.safe_push (stmt);
4743 else
4745 /* Handle the case of inlining a function with no return
4746 statement, which causes the return value to become undefined. */
4747 if (gimple_call_lhs (stmt)
4748 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4750 tree name = gimple_call_lhs (stmt);
4751 tree var = SSA_NAME_VAR (name);
4752 tree def = ssa_default_def (cfun, var);
4754 if (def)
4756 /* If the variable is used undefined, make this name
4757 undefined via a move. */
4758 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4759 gsi_replace (&stmt_gsi, stmt, true);
4761 else
4763 /* Otherwise make this variable undefined. */
4764 gsi_remove (&stmt_gsi, true);
4765 set_ssa_default_def (cfun, var, name);
4766 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4769 else
4770 gsi_remove (&stmt_gsi, true);
4773 /* Put returned bounds into the correct place if required. */
4774 if (return_bounds)
4776 gimple old_stmt = SSA_NAME_DEF_STMT (return_bounds);
4777 gimple new_stmt = gimple_build_assign (return_bounds, id->retbnd);
4778 gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
4779 unlink_stmt_vdef (old_stmt);
4780 gsi_replace (&bnd_gsi, new_stmt, false);
4781 maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
4782 cgraph_update_edges_for_call_stmt (old_stmt,
4783 gimple_call_fndecl (old_stmt),
4784 new_stmt);
4787 if (purge_dead_abnormal_edges)
4789 gimple_purge_dead_eh_edges (return_block);
4790 gimple_purge_dead_abnormal_call_edges (return_block);
4793 /* If the value of the new expression is ignored, that's OK. We
4794 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4795 the equivalent inlined version either. */
4796 if (is_gimple_assign (stmt))
4798 gcc_assert (gimple_assign_single_p (stmt)
4799 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4800 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4803 /* Copy bounds for all generated assigns that need it. */
4804 for (i = 0; i < id->assign_stmts.length (); i++)
4805 chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
4806 id->assign_stmts.release ();
4808 /* Output the inlining info for this abstract function, since it has been
4809 inlined. If we don't do this now, we can lose the information about the
4810 variables in the function when the blocks get blown away as soon as we
4811 remove the cgraph node. */
4812 if (gimple_block (stmt))
4813 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4815 /* Update callgraph if needed. */
4816 cg_edge->callee->remove ();
4818 id->block = NULL_TREE;
4819 successfully_inlined = TRUE;
4821 egress:
4822 input_location = saved_location;
4823 return successfully_inlined;
4826 /* Expand call statements reachable from STMT_P.
4827 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4828 in a MODIFY_EXPR. */
4830 static bool
4831 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4833 gimple_stmt_iterator gsi;
4834 bool inlined = false;
4836 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4838 gimple stmt = gsi_stmt (gsi);
4839 gsi_prev (&gsi);
4841 if (is_gimple_call (stmt)
4842 && !gimple_call_internal_p (stmt))
4843 inlined |= expand_call_inline (bb, stmt, id);
4846 return inlined;
4850 /* Walk all basic blocks created after FIRST and try to fold every statement
4851 in the STATEMENTS pointer set. */
4853 static void
4854 fold_marked_statements (int first, hash_set<gimple> *statements)
4856 for (; first < n_basic_blocks_for_fn (cfun); first++)
4857 if (BASIC_BLOCK_FOR_FN (cfun, first))
4859 gimple_stmt_iterator gsi;
4861 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4862 !gsi_end_p (gsi);
4863 gsi_next (&gsi))
4864 if (statements->contains (gsi_stmt (gsi)))
4866 gimple old_stmt = gsi_stmt (gsi);
4867 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4869 if (old_decl && DECL_BUILT_IN (old_decl))
4871 /* Folding builtins can create multiple instructions,
4872 we need to look at all of them. */
4873 gimple_stmt_iterator i2 = gsi;
4874 gsi_prev (&i2);
4875 if (fold_stmt (&gsi))
4877 gimple new_stmt;
4878 /* If a builtin at the end of a bb folded into nothing,
4879 the following loop won't work. */
4880 if (gsi_end_p (gsi))
4882 cgraph_update_edges_for_call_stmt (old_stmt,
4883 old_decl, NULL);
4884 break;
4886 if (gsi_end_p (i2))
4887 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4888 else
4889 gsi_next (&i2);
4890 while (1)
4892 new_stmt = gsi_stmt (i2);
4893 update_stmt (new_stmt);
4894 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4895 new_stmt);
4897 if (new_stmt == gsi_stmt (gsi))
4899 /* It is okay to check only for the very last
4900 of these statements. If it is a throwing
4901 statement nothing will change. If it isn't
4902 this can remove EH edges. If that weren't
4903 correct then because some intermediate stmts
4904 throw, but not the last one. That would mean
4905 we'd have to split the block, which we can't
4906 here and we'd loose anyway. And as builtins
4907 probably never throw, this all
4908 is mood anyway. */
4909 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4910 new_stmt))
4911 gimple_purge_dead_eh_edges (
4912 BASIC_BLOCK_FOR_FN (cfun, first));
4913 break;
4915 gsi_next (&i2);
4919 else if (fold_stmt (&gsi))
4921 /* Re-read the statement from GSI as fold_stmt() may
4922 have changed it. */
4923 gimple new_stmt = gsi_stmt (gsi);
4924 update_stmt (new_stmt);
4926 if (is_gimple_call (old_stmt)
4927 || is_gimple_call (new_stmt))
4928 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4929 new_stmt);
4931 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4932 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4933 first));
4939 /* Expand calls to inline functions in the body of FN. */
4941 unsigned int
4942 optimize_inline_calls (tree fn)
4944 copy_body_data id;
4945 basic_block bb;
4946 int last = n_basic_blocks_for_fn (cfun);
4947 bool inlined_p = false;
4949 /* Clear out ID. */
4950 memset (&id, 0, sizeof (id));
4952 id.src_node = id.dst_node = cgraph_node::get (fn);
4953 gcc_assert (id.dst_node->definition);
4954 id.dst_fn = fn;
4955 /* Or any functions that aren't finished yet. */
4956 if (current_function_decl)
4957 id.dst_fn = current_function_decl;
4959 id.copy_decl = copy_decl_maybe_to_var;
4960 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4961 id.transform_new_cfg = false;
4962 id.transform_return_to_modify = true;
4963 id.transform_parameter = true;
4964 id.transform_lang_insert_block = NULL;
4965 id.statements_to_fold = new hash_set<gimple>;
4967 push_gimplify_context ();
4969 /* We make no attempts to keep dominance info up-to-date. */
4970 free_dominance_info (CDI_DOMINATORS);
4971 free_dominance_info (CDI_POST_DOMINATORS);
4973 /* Register specific gimple functions. */
4974 gimple_register_cfg_hooks ();
4976 /* Reach the trees by walking over the CFG, and note the
4977 enclosing basic-blocks in the call edges. */
4978 /* We walk the blocks going forward, because inlined function bodies
4979 will split id->current_basic_block, and the new blocks will
4980 follow it; we'll trudge through them, processing their CALL_EXPRs
4981 along the way. */
4982 FOR_EACH_BB_FN (bb, cfun)
4983 inlined_p |= gimple_expand_calls_inline (bb, &id);
4985 pop_gimplify_context (NULL);
4987 #ifdef ENABLE_CHECKING
4989 struct cgraph_edge *e;
4991 id.dst_node->verify ();
4993 /* Double check that we inlined everything we are supposed to inline. */
4994 for (e = id.dst_node->callees; e; e = e->next_callee)
4995 gcc_assert (e->inline_failed);
4997 #endif
4999 /* Fold queued statements. */
5000 fold_marked_statements (last, id.statements_to_fold);
5001 delete id.statements_to_fold;
5003 gcc_assert (!id.debug_stmts.exists ());
5005 /* If we didn't inline into the function there is nothing to do. */
5006 if (!inlined_p)
5007 return 0;
5009 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5010 number_blocks (fn);
5012 delete_unreachable_blocks_update_callgraph (&id);
5013 #ifdef ENABLE_CHECKING
5014 id.dst_node->verify ();
5015 #endif
5017 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5018 not possible yet - the IPA passes might make various functions to not
5019 throw and they don't care to proactively update local EH info. This is
5020 done later in fixup_cfg pass that also execute the verification. */
5021 return (TODO_update_ssa
5022 | TODO_cleanup_cfg
5023 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5024 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5025 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5026 ? TODO_rebuild_frequencies : 0));
5029 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5031 tree
5032 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5034 enum tree_code code = TREE_CODE (*tp);
5035 enum tree_code_class cl = TREE_CODE_CLASS (code);
5037 /* We make copies of most nodes. */
5038 if (IS_EXPR_CODE_CLASS (cl)
5039 || code == TREE_LIST
5040 || code == TREE_VEC
5041 || code == TYPE_DECL
5042 || code == OMP_CLAUSE)
5044 /* Because the chain gets clobbered when we make a copy, we save it
5045 here. */
5046 tree chain = NULL_TREE, new_tree;
5048 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5049 chain = TREE_CHAIN (*tp);
5051 /* Copy the node. */
5052 new_tree = copy_node (*tp);
5054 *tp = new_tree;
5056 /* Now, restore the chain, if appropriate. That will cause
5057 walk_tree to walk into the chain as well. */
5058 if (code == PARM_DECL
5059 || code == TREE_LIST
5060 || code == OMP_CLAUSE)
5061 TREE_CHAIN (*tp) = chain;
5063 /* For now, we don't update BLOCKs when we make copies. So, we
5064 have to nullify all BIND_EXPRs. */
5065 if (TREE_CODE (*tp) == BIND_EXPR)
5066 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5068 else if (code == CONSTRUCTOR)
5070 /* CONSTRUCTOR nodes need special handling because
5071 we need to duplicate the vector of elements. */
5072 tree new_tree;
5074 new_tree = copy_node (*tp);
5075 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5076 *tp = new_tree;
5078 else if (code == STATEMENT_LIST)
5079 /* We used to just abort on STATEMENT_LIST, but we can run into them
5080 with statement-expressions (c++/40975). */
5081 copy_statement_list (tp);
5082 else if (TREE_CODE_CLASS (code) == tcc_type)
5083 *walk_subtrees = 0;
5084 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5085 *walk_subtrees = 0;
5086 else if (TREE_CODE_CLASS (code) == tcc_constant)
5087 *walk_subtrees = 0;
5088 return NULL_TREE;
5091 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5092 information indicating to what new SAVE_EXPR this one should be mapped,
5093 use that one. Otherwise, create a new node and enter it in ST. FN is
5094 the function into which the copy will be placed. */
5096 static void
5097 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5099 tree *n;
5100 tree t;
5102 /* See if we already encountered this SAVE_EXPR. */
5103 n = st->get (*tp);
5105 /* If we didn't already remap this SAVE_EXPR, do so now. */
5106 if (!n)
5108 t = copy_node (*tp);
5110 /* Remember this SAVE_EXPR. */
5111 st->put (*tp, t);
5112 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5113 st->put (t, t);
5115 else
5117 /* We've already walked into this SAVE_EXPR; don't do it again. */
5118 *walk_subtrees = 0;
5119 t = *n;
5122 /* Replace this SAVE_EXPR with the copy. */
5123 *tp = t;
5126 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5127 label, copies the declaration and enters it in the splay_tree in DATA (which
5128 is really a 'copy_body_data *'. */
5130 static tree
5131 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5132 bool *handled_ops_p ATTRIBUTE_UNUSED,
5133 struct walk_stmt_info *wi)
5135 copy_body_data *id = (copy_body_data *) wi->info;
5136 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5138 if (stmt)
5140 tree decl = gimple_label_label (stmt);
5142 /* Copy the decl and remember the copy. */
5143 insert_decl_map (id, decl, id->copy_decl (decl, id));
5146 return NULL_TREE;
5150 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5151 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5152 remaps all local declarations to appropriate replacements in gimple
5153 operands. */
5155 static tree
5156 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5158 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5159 copy_body_data *id = (copy_body_data *) wi->info;
5160 hash_map<tree, tree> *st = id->decl_map;
5161 tree *n;
5162 tree expr = *tp;
5164 /* Only a local declaration (variable or label). */
5165 if ((TREE_CODE (expr) == VAR_DECL
5166 && !TREE_STATIC (expr))
5167 || TREE_CODE (expr) == LABEL_DECL)
5169 /* Lookup the declaration. */
5170 n = st->get (expr);
5172 /* If it's there, remap it. */
5173 if (n)
5174 *tp = *n;
5175 *walk_subtrees = 0;
5177 else if (TREE_CODE (expr) == STATEMENT_LIST
5178 || TREE_CODE (expr) == BIND_EXPR
5179 || TREE_CODE (expr) == SAVE_EXPR)
5180 gcc_unreachable ();
5181 else if (TREE_CODE (expr) == TARGET_EXPR)
5183 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5184 It's OK for this to happen if it was part of a subtree that
5185 isn't immediately expanded, such as operand 2 of another
5186 TARGET_EXPR. */
5187 if (!TREE_OPERAND (expr, 1))
5189 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5190 TREE_OPERAND (expr, 3) = NULL_TREE;
5194 /* Keep iterating. */
5195 return NULL_TREE;
5199 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5200 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5201 remaps all local declarations to appropriate replacements in gimple
5202 statements. */
5204 static tree
5205 replace_locals_stmt (gimple_stmt_iterator *gsip,
5206 bool *handled_ops_p ATTRIBUTE_UNUSED,
5207 struct walk_stmt_info *wi)
5209 copy_body_data *id = (copy_body_data *) wi->info;
5210 gimple gs = gsi_stmt (*gsip);
5212 if (gbind *stmt = dyn_cast <gbind *> (gs))
5214 tree block = gimple_bind_block (stmt);
5216 if (block)
5218 remap_block (&block, id);
5219 gimple_bind_set_block (stmt, block);
5222 /* This will remap a lot of the same decls again, but this should be
5223 harmless. */
5224 if (gimple_bind_vars (stmt))
5225 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
5226 NULL, id));
5229 /* Keep iterating. */
5230 return NULL_TREE;
5234 /* Copies everything in SEQ and replaces variables and labels local to
5235 current_function_decl. */
5237 gimple_seq
5238 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5240 copy_body_data id;
5241 struct walk_stmt_info wi;
5242 gimple_seq copy;
5244 /* There's nothing to do for NULL_TREE. */
5245 if (seq == NULL)
5246 return seq;
5248 /* Set up ID. */
5249 memset (&id, 0, sizeof (id));
5250 id.src_fn = current_function_decl;
5251 id.dst_fn = current_function_decl;
5252 id.decl_map = new hash_map<tree, tree>;
5253 id.debug_map = NULL;
5255 id.copy_decl = copy_decl_no_change;
5256 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5257 id.transform_new_cfg = false;
5258 id.transform_return_to_modify = false;
5259 id.transform_parameter = false;
5260 id.transform_lang_insert_block = NULL;
5262 /* Walk the tree once to find local labels. */
5263 memset (&wi, 0, sizeof (wi));
5264 hash_set<tree> visited;
5265 wi.info = &id;
5266 wi.pset = &visited;
5267 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5269 copy = gimple_seq_copy (seq);
5271 /* Walk the copy, remapping decls. */
5272 memset (&wi, 0, sizeof (wi));
5273 wi.info = &id;
5274 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5276 /* Clean up. */
5277 delete id.decl_map;
5278 if (id.debug_map)
5279 delete id.debug_map;
5280 if (id.dependence_map)
5282 delete id.dependence_map;
5283 id.dependence_map = NULL;
5286 return copy;
5290 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5292 static tree
5293 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5295 if (*tp == data)
5296 return (tree) data;
5297 else
5298 return NULL;
5301 DEBUG_FUNCTION bool
5302 debug_find_tree (tree top, tree search)
5304 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5308 /* Declare the variables created by the inliner. Add all the variables in
5309 VARS to BIND_EXPR. */
5311 static void
5312 declare_inline_vars (tree block, tree vars)
5314 tree t;
5315 for (t = vars; t; t = DECL_CHAIN (t))
5317 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5318 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5319 add_local_decl (cfun, t);
5322 if (block)
5323 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5326 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5327 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5328 VAR_DECL translation. */
5330 static tree
5331 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5333 /* Don't generate debug information for the copy if we wouldn't have
5334 generated it for the copy either. */
5335 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5336 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5338 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5339 declaration inspired this copy. */
5340 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5342 /* The new variable/label has no RTL, yet. */
5343 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5344 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5345 SET_DECL_RTL (copy, 0);
5347 /* These args would always appear unused, if not for this. */
5348 TREE_USED (copy) = 1;
5350 /* Set the context for the new declaration. */
5351 if (!DECL_CONTEXT (decl))
5352 /* Globals stay global. */
5354 else if (DECL_CONTEXT (decl) != id->src_fn)
5355 /* Things that weren't in the scope of the function we're inlining
5356 from aren't in the scope we're inlining to, either. */
5358 else if (TREE_STATIC (decl))
5359 /* Function-scoped static variables should stay in the original
5360 function. */
5362 else
5363 /* Ordinary automatic local variables are now in the scope of the
5364 new function. */
5365 DECL_CONTEXT (copy) = id->dst_fn;
5367 return copy;
5370 static tree
5371 copy_decl_to_var (tree decl, copy_body_data *id)
5373 tree copy, type;
5375 gcc_assert (TREE_CODE (decl) == PARM_DECL
5376 || TREE_CODE (decl) == RESULT_DECL);
5378 type = TREE_TYPE (decl);
5380 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5381 VAR_DECL, DECL_NAME (decl), type);
5382 if (DECL_PT_UID_SET_P (decl))
5383 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5384 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5385 TREE_READONLY (copy) = TREE_READONLY (decl);
5386 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5387 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5389 return copy_decl_for_dup_finish (id, decl, copy);
5392 /* Like copy_decl_to_var, but create a return slot object instead of a
5393 pointer variable for return by invisible reference. */
5395 static tree
5396 copy_result_decl_to_var (tree decl, copy_body_data *id)
5398 tree copy, type;
5400 gcc_assert (TREE_CODE (decl) == PARM_DECL
5401 || TREE_CODE (decl) == RESULT_DECL);
5403 type = TREE_TYPE (decl);
5404 if (DECL_BY_REFERENCE (decl))
5405 type = TREE_TYPE (type);
5407 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5408 VAR_DECL, DECL_NAME (decl), type);
5409 if (DECL_PT_UID_SET_P (decl))
5410 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5411 TREE_READONLY (copy) = TREE_READONLY (decl);
5412 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5413 if (!DECL_BY_REFERENCE (decl))
5415 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5416 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5419 return copy_decl_for_dup_finish (id, decl, copy);
5422 tree
5423 copy_decl_no_change (tree decl, copy_body_data *id)
5425 tree copy;
5427 copy = copy_node (decl);
5429 /* The COPY is not abstract; it will be generated in DST_FN. */
5430 DECL_ABSTRACT_P (copy) = false;
5431 lang_hooks.dup_lang_specific_decl (copy);
5433 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5434 been taken; it's for internal bookkeeping in expand_goto_internal. */
5435 if (TREE_CODE (copy) == LABEL_DECL)
5437 TREE_ADDRESSABLE (copy) = 0;
5438 LABEL_DECL_UID (copy) = -1;
5441 return copy_decl_for_dup_finish (id, decl, copy);
5444 static tree
5445 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5447 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5448 return copy_decl_to_var (decl, id);
5449 else
5450 return copy_decl_no_change (decl, id);
5453 /* Return a copy of the function's argument tree. */
5454 static tree
5455 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5456 bitmap args_to_skip, tree *vars)
5458 tree arg, *parg;
5459 tree new_parm = NULL;
5460 int i = 0;
5462 parg = &new_parm;
5464 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5465 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5467 tree new_tree = remap_decl (arg, id);
5468 if (TREE_CODE (new_tree) != PARM_DECL)
5469 new_tree = id->copy_decl (arg, id);
5470 lang_hooks.dup_lang_specific_decl (new_tree);
5471 *parg = new_tree;
5472 parg = &DECL_CHAIN (new_tree);
5474 else if (!id->decl_map->get (arg))
5476 /* Make an equivalent VAR_DECL. If the argument was used
5477 as temporary variable later in function, the uses will be
5478 replaced by local variable. */
5479 tree var = copy_decl_to_var (arg, id);
5480 insert_decl_map (id, arg, var);
5481 /* Declare this new variable. */
5482 DECL_CHAIN (var) = *vars;
5483 *vars = var;
5485 return new_parm;
5488 /* Return a copy of the function's static chain. */
5489 static tree
5490 copy_static_chain (tree static_chain, copy_body_data * id)
5492 tree *chain_copy, *pvar;
5494 chain_copy = &static_chain;
5495 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5497 tree new_tree = remap_decl (*pvar, id);
5498 lang_hooks.dup_lang_specific_decl (new_tree);
5499 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5500 *pvar = new_tree;
5502 return static_chain;
5505 /* Return true if the function is allowed to be versioned.
5506 This is a guard for the versioning functionality. */
5508 bool
5509 tree_versionable_function_p (tree fndecl)
5511 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5512 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5515 /* Delete all unreachable basic blocks and update callgraph.
5516 Doing so is somewhat nontrivial because we need to update all clones and
5517 remove inline function that become unreachable. */
5519 static bool
5520 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5522 bool changed = false;
5523 basic_block b, next_bb;
5525 find_unreachable_blocks ();
5527 /* Delete all unreachable basic blocks. */
5529 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5530 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5532 next_bb = b->next_bb;
5534 if (!(b->flags & BB_REACHABLE))
5536 gimple_stmt_iterator bsi;
5538 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5540 struct cgraph_edge *e;
5541 struct cgraph_node *node;
5543 id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5545 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5546 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5548 if (!e->inline_failed)
5549 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5550 else
5551 e->remove ();
5553 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5554 && id->dst_node->clones)
5555 for (node = id->dst_node->clones; node != id->dst_node;)
5557 node->remove_stmt_references (gsi_stmt (bsi));
5558 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5559 && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5561 if (!e->inline_failed)
5562 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5563 else
5564 e->remove ();
5567 if (node->clones)
5568 node = node->clones;
5569 else if (node->next_sibling_clone)
5570 node = node->next_sibling_clone;
5571 else
5573 while (node != id->dst_node && !node->next_sibling_clone)
5574 node = node->clone_of;
5575 if (node != id->dst_node)
5576 node = node->next_sibling_clone;
5580 delete_basic_block (b);
5581 changed = true;
5585 return changed;
5588 /* Update clone info after duplication. */
5590 static void
5591 update_clone_info (copy_body_data * id)
5593 struct cgraph_node *node;
5594 if (!id->dst_node->clones)
5595 return;
5596 for (node = id->dst_node->clones; node != id->dst_node;)
5598 /* First update replace maps to match the new body. */
5599 if (node->clone.tree_map)
5601 unsigned int i;
5602 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5604 struct ipa_replace_map *replace_info;
5605 replace_info = (*node->clone.tree_map)[i];
5606 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5607 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5610 if (node->clones)
5611 node = node->clones;
5612 else if (node->next_sibling_clone)
5613 node = node->next_sibling_clone;
5614 else
5616 while (node != id->dst_node && !node->next_sibling_clone)
5617 node = node->clone_of;
5618 if (node != id->dst_node)
5619 node = node->next_sibling_clone;
5624 /* Create a copy of a function's tree.
5625 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5626 of the original function and the new copied function
5627 respectively. In case we want to replace a DECL
5628 tree with another tree while duplicating the function's
5629 body, TREE_MAP represents the mapping between these
5630 trees. If UPDATE_CLONES is set, the call_stmt fields
5631 of edges of clones of the function will be updated.
5633 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5634 from new version.
5635 If SKIP_RETURN is true, the new version will return void.
5636 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5637 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5639 void
5640 tree_function_versioning (tree old_decl, tree new_decl,
5641 vec<ipa_replace_map *, va_gc> *tree_map,
5642 bool update_clones, bitmap args_to_skip,
5643 bool skip_return, bitmap blocks_to_copy,
5644 basic_block new_entry)
5646 struct cgraph_node *old_version_node;
5647 struct cgraph_node *new_version_node;
5648 copy_body_data id;
5649 tree p;
5650 unsigned i;
5651 struct ipa_replace_map *replace_info;
5652 basic_block old_entry_block, bb;
5653 auto_vec<gimple, 10> init_stmts;
5654 tree vars = NULL_TREE;
5656 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5657 && TREE_CODE (new_decl) == FUNCTION_DECL);
5658 DECL_POSSIBLY_INLINED (old_decl) = 1;
5660 old_version_node = cgraph_node::get (old_decl);
5661 gcc_checking_assert (old_version_node);
5662 new_version_node = cgraph_node::get (new_decl);
5663 gcc_checking_assert (new_version_node);
5665 /* Copy over debug args. */
5666 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5668 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5669 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5670 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5671 old_debug_args = decl_debug_args_lookup (old_decl);
5672 if (old_debug_args)
5674 new_debug_args = decl_debug_args_insert (new_decl);
5675 *new_debug_args = vec_safe_copy (*old_debug_args);
5679 /* Output the inlining info for this abstract function, since it has been
5680 inlined. If we don't do this now, we can lose the information about the
5681 variables in the function when the blocks get blown away as soon as we
5682 remove the cgraph node. */
5683 (*debug_hooks->outlining_inline_function) (old_decl);
5685 DECL_ARTIFICIAL (new_decl) = 1;
5686 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5687 if (DECL_ORIGIN (old_decl) == old_decl)
5688 old_version_node->used_as_abstract_origin = true;
5689 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5691 /* Prepare the data structures for the tree copy. */
5692 memset (&id, 0, sizeof (id));
5694 /* Generate a new name for the new version. */
5695 id.statements_to_fold = new hash_set<gimple>;
5697 id.decl_map = new hash_map<tree, tree>;
5698 id.debug_map = NULL;
5699 id.src_fn = old_decl;
5700 id.dst_fn = new_decl;
5701 id.src_node = old_version_node;
5702 id.dst_node = new_version_node;
5703 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5704 id.blocks_to_copy = blocks_to_copy;
5706 id.copy_decl = copy_decl_no_change;
5707 id.transform_call_graph_edges
5708 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5709 id.transform_new_cfg = true;
5710 id.transform_return_to_modify = false;
5711 id.transform_parameter = false;
5712 id.transform_lang_insert_block = NULL;
5714 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5715 (DECL_STRUCT_FUNCTION (old_decl));
5716 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5717 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5718 initialize_cfun (new_decl, old_decl,
5719 old_entry_block->count);
5720 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5721 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5722 = id.src_cfun->gimple_df->ipa_pta;
5724 /* Copy the function's static chain. */
5725 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5726 if (p)
5727 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5728 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5729 &id);
5731 /* If there's a tree_map, prepare for substitution. */
5732 if (tree_map)
5733 for (i = 0; i < tree_map->length (); i++)
5735 gimple init;
5736 replace_info = (*tree_map)[i];
5737 if (replace_info->replace_p)
5739 if (!replace_info->old_tree)
5741 int i = replace_info->parm_num;
5742 tree parm;
5743 tree req_type;
5745 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5746 i --;
5747 replace_info->old_tree = parm;
5748 req_type = TREE_TYPE (parm);
5749 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5751 if (fold_convertible_p (req_type, replace_info->new_tree))
5752 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5753 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5754 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5755 else
5757 if (dump_file)
5759 fprintf (dump_file, " const ");
5760 print_generic_expr (dump_file, replace_info->new_tree, 0);
5761 fprintf (dump_file, " can't be converted to param ");
5762 print_generic_expr (dump_file, parm, 0);
5763 fprintf (dump_file, "\n");
5765 replace_info->old_tree = NULL;
5769 else
5770 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5771 if (replace_info->old_tree)
5773 init = setup_one_parameter (&id, replace_info->old_tree,
5774 replace_info->new_tree, id.src_fn,
5775 NULL,
5776 &vars);
5777 if (init)
5778 init_stmts.safe_push (init);
5782 /* Copy the function's arguments. */
5783 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5784 DECL_ARGUMENTS (new_decl) =
5785 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5786 args_to_skip, &vars);
5788 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5789 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5791 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5793 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5794 /* Add local vars. */
5795 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5797 if (DECL_RESULT (old_decl) == NULL_TREE)
5799 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5801 DECL_RESULT (new_decl)
5802 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5803 RESULT_DECL, NULL_TREE, void_type_node);
5804 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5805 cfun->returns_struct = 0;
5806 cfun->returns_pcc_struct = 0;
5808 else
5810 tree old_name;
5811 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5812 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5813 if (gimple_in_ssa_p (id.src_cfun)
5814 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5815 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5817 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
5818 insert_decl_map (&id, old_name, new_name);
5819 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5820 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5824 /* Set up the destination functions loop tree. */
5825 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5827 cfun->curr_properties &= ~PROP_loops;
5828 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5829 cfun->curr_properties |= PROP_loops;
5832 /* Copy the Function's body. */
5833 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5834 ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5835 new_entry);
5837 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5838 number_blocks (new_decl);
5840 /* We want to create the BB unconditionally, so that the addition of
5841 debug stmts doesn't affect BB count, which may in the end cause
5842 codegen differences. */
5843 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5844 while (init_stmts.length ())
5845 insert_init_stmt (&id, bb, init_stmts.pop ());
5846 update_clone_info (&id);
5848 /* Remap the nonlocal_goto_save_area, if any. */
5849 if (cfun->nonlocal_goto_save_area)
5851 struct walk_stmt_info wi;
5853 memset (&wi, 0, sizeof (wi));
5854 wi.info = &id;
5855 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5858 /* Clean up. */
5859 delete id.decl_map;
5860 if (id.debug_map)
5861 delete id.debug_map;
5862 free_dominance_info (CDI_DOMINATORS);
5863 free_dominance_info (CDI_POST_DOMINATORS);
5865 fold_marked_statements (0, id.statements_to_fold);
5866 delete id.statements_to_fold;
5867 fold_cond_expr_cond ();
5868 delete_unreachable_blocks_update_callgraph (&id);
5869 if (id.dst_node->definition)
5870 cgraph_edge::rebuild_references ();
5871 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
5873 calculate_dominance_info (CDI_DOMINATORS);
5874 fix_loop_structure (NULL);
5876 update_ssa (TODO_update_ssa);
5878 /* After partial cloning we need to rescale frequencies, so they are
5879 within proper range in the cloned function. */
5880 if (new_entry)
5882 struct cgraph_edge *e;
5883 rebuild_frequencies ();
5885 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5886 for (e = new_version_node->callees; e; e = e->next_callee)
5888 basic_block bb = gimple_bb (e->call_stmt);
5889 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5890 bb);
5891 e->count = bb->count;
5893 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5895 basic_block bb = gimple_bb (e->call_stmt);
5896 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5897 bb);
5898 e->count = bb->count;
5902 free_dominance_info (CDI_DOMINATORS);
5903 free_dominance_info (CDI_POST_DOMINATORS);
5905 gcc_assert (!id.debug_stmts.exists ());
5906 pop_cfun ();
5907 return;
5910 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5911 the callee and return the inlined body on success. */
5913 tree
5914 maybe_inline_call_in_expr (tree exp)
5916 tree fn = get_callee_fndecl (exp);
5918 /* We can only try to inline "const" functions. */
5919 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5921 call_expr_arg_iterator iter;
5922 copy_body_data id;
5923 tree param, arg, t;
5924 hash_map<tree, tree> decl_map;
5926 /* Remap the parameters. */
5927 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5928 param;
5929 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5930 decl_map.put (param, arg);
5932 memset (&id, 0, sizeof (id));
5933 id.src_fn = fn;
5934 id.dst_fn = current_function_decl;
5935 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5936 id.decl_map = &decl_map;
5938 id.copy_decl = copy_decl_no_change;
5939 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5940 id.transform_new_cfg = false;
5941 id.transform_return_to_modify = true;
5942 id.transform_parameter = true;
5943 id.transform_lang_insert_block = NULL;
5945 /* Make sure not to unshare trees behind the front-end's back
5946 since front-end specific mechanisms may rely on sharing. */
5947 id.regimplify = false;
5948 id.do_not_unshare = true;
5950 /* We're not inside any EH region. */
5951 id.eh_lp_nr = 0;
5953 t = copy_tree_body (&id);
5955 /* We can only return something suitable for use in a GENERIC
5956 expression tree. */
5957 if (TREE_CODE (t) == MODIFY_EXPR)
5958 return TREE_OPERAND (t, 1);
5961 return NULL_TREE;
5964 /* Duplicate a type, fields and all. */
5966 tree
5967 build_duplicate_type (tree type)
5969 struct copy_body_data id;
5971 memset (&id, 0, sizeof (id));
5972 id.src_fn = current_function_decl;
5973 id.dst_fn = current_function_decl;
5974 id.src_cfun = cfun;
5975 id.decl_map = new hash_map<tree, tree>;
5976 id.debug_map = NULL;
5977 id.copy_decl = copy_decl_no_change;
5979 type = remap_type_1 (type, &id);
5981 delete id.decl_map;
5982 if (id.debug_map)
5983 delete id.debug_map;
5985 TYPE_CANONICAL (type) = type;
5987 return type;
5990 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
5991 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
5992 evaluation. */
5994 tree
5995 copy_fn (tree fn, tree& parms, tree& result)
5997 copy_body_data id;
5998 tree param;
5999 hash_map<tree, tree> decl_map;
6001 tree *p = &parms;
6002 *p = NULL_TREE;
6004 memset (&id, 0, sizeof (id));
6005 id.src_fn = fn;
6006 id.dst_fn = current_function_decl;
6007 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6008 id.decl_map = &decl_map;
6010 id.copy_decl = copy_decl_no_change;
6011 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6012 id.transform_new_cfg = false;
6013 id.transform_return_to_modify = false;
6014 id.transform_parameter = true;
6015 id.transform_lang_insert_block = NULL;
6017 /* Make sure not to unshare trees behind the front-end's back
6018 since front-end specific mechanisms may rely on sharing. */
6019 id.regimplify = false;
6020 id.do_not_unshare = true;
6022 /* We're not inside any EH region. */
6023 id.eh_lp_nr = 0;
6025 /* Remap the parameters and result and return them to the caller. */
6026 for (param = DECL_ARGUMENTS (fn);
6027 param;
6028 param = DECL_CHAIN (param))
6030 *p = remap_decl (param, &id);
6031 p = &DECL_CHAIN (*p);
6034 if (DECL_RESULT (fn))
6035 result = remap_decl (DECL_RESULT (fn), &id);
6036 else
6037 result = NULL_TREE;
6039 return copy_tree_body (&id);