[ARM] fix big.LITTLE spec rewriting
[official-gcc.git] / gcc / tree-inline.c
bloba3175b34484899fc858a3f1c0308c9ac58f8acef
1 /* Tree inlining.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "calls.h"
29 #include "tree-inline.h"
30 #include "flags.h"
31 #include "params.h"
32 #include "input.h"
33 #include "insn-config.h"
34 #include "hashtab.h"
35 #include "langhooks.h"
36 #include "basic-block.h"
37 #include "tree-iterator.h"
38 #include "intl.h"
39 #include "pointer-set.h"
40 #include "tree-ssa-alias.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimple-expr.h"
45 #include "is-a.h"
46 #include "gimple.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "gimplify-me.h"
50 #include "gimple-walk.h"
51 #include "gimple-ssa.h"
52 #include "tree-cfg.h"
53 #include "tree-phinodes.h"
54 #include "ssa-iterators.h"
55 #include "stringpool.h"
56 #include "tree-ssanames.h"
57 #include "tree-into-ssa.h"
58 #include "expr.h"
59 #include "tree-dfa.h"
60 #include "tree-ssa.h"
61 #include "function.h"
62 #include "tree-pretty-print.h"
63 #include "except.h"
64 #include "debug.h"
65 #include "ipa-prop.h"
66 #include "value-prof.h"
67 #include "tree-pass.h"
68 #include "target.h"
69 #include "cfgloop.h"
71 #include "rtl.h" /* FIXME: For asm_str_count. */
73 /* I'm not real happy about this, but we need to handle gimple and
74 non-gimple trees. */
76 /* Inlining, Cloning, Versioning, Parallelization
78 Inlining: a function body is duplicated, but the PARM_DECLs are
79 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
80 MODIFY_EXPRs that store to a dedicated returned-value variable.
81 The duplicated eh_region info of the copy will later be appended
82 to the info for the caller; the eh_region info in copied throwing
83 statements and RESX statements are adjusted accordingly.
85 Cloning: (only in C++) We have one body for a con/de/structor, and
86 multiple function decls, each with a unique parameter list.
87 Duplicate the body, using the given splay tree; some parameters
88 will become constants (like 0 or 1).
90 Versioning: a function body is duplicated and the result is a new
91 function rather than into blocks of an existing function as with
92 inlining. Some parameters will become constants.
94 Parallelization: a region of a function is duplicated resulting in
95 a new function. Variables may be replaced with complex expressions
96 to enable shared variable semantics.
98 All of these will simultaneously lookup any callgraph edges. If
99 we're going to inline the duplicated function body, and the given
100 function has some cloned callgraph nodes (one for each place this
101 function will be inlined) those callgraph edges will be duplicated.
102 If we're cloning the body, those callgraph edges will be
103 updated to point into the new body. (Note that the original
104 callgraph node and edge list will not be altered.)
106 See the CALL_EXPR handling case in copy_tree_body_r (). */
108 /* To Do:
110 o In order to make inlining-on-trees work, we pessimized
111 function-local static constants. In particular, they are now
112 always output, even when not addressed. Fix this by treating
113 function-local static constants just like global static
114 constants; the back-end already knows not to output them if they
115 are not needed.
117 o Provide heuristics to clamp inlining of recursive template
118 calls? */
121 /* Weights that estimate_num_insns uses to estimate the size of the
122 produced code. */
124 eni_weights eni_size_weights;
126 /* Weights that estimate_num_insns uses to estimate the time necessary
127 to execute the produced code. */
129 eni_weights eni_time_weights;
131 /* Prototypes. */
133 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
134 static void remap_block (tree *, copy_body_data *);
135 static void copy_bind_expr (tree *, int *, copy_body_data *);
136 static void declare_inline_vars (tree, tree);
137 static void remap_save_expr (tree *, void *, int *);
138 static void prepend_lexical_block (tree current_block, tree new_block);
139 static tree copy_decl_to_var (tree, copy_body_data *);
140 static tree copy_result_decl_to_var (tree, copy_body_data *);
141 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
142 static gimple remap_gimple_stmt (gimple, copy_body_data *);
143 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
145 /* Insert a tree->tree mapping for ID. Despite the name suggests
146 that the trees should be variables, it is used for more than that. */
148 void
149 insert_decl_map (copy_body_data *id, tree key, tree value)
151 *pointer_map_insert (id->decl_map, key) = value;
153 /* Always insert an identity map as well. If we see this same new
154 node again, we won't want to duplicate it a second time. */
155 if (key != value)
156 *pointer_map_insert (id->decl_map, value) = value;
159 /* Insert a tree->tree mapping for ID. This is only used for
160 variables. */
162 static void
163 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
165 if (!gimple_in_ssa_p (id->src_cfun))
166 return;
168 if (!MAY_HAVE_DEBUG_STMTS)
169 return;
171 if (!target_for_debug_bind (key))
172 return;
174 gcc_assert (TREE_CODE (key) == PARM_DECL);
175 gcc_assert (TREE_CODE (value) == VAR_DECL);
177 if (!id->debug_map)
178 id->debug_map = pointer_map_create ();
180 *pointer_map_insert (id->debug_map, key) = value;
183 /* If nonzero, we're remapping the contents of inlined debug
184 statements. If negative, an error has occurred, such as a
185 reference to a variable that isn't available in the inlined
186 context. */
187 static int processing_debug_stmt = 0;
189 /* Construct new SSA name for old NAME. ID is the inline context. */
191 static tree
192 remap_ssa_name (tree name, copy_body_data *id)
194 tree new_tree, var;
195 tree *n;
197 gcc_assert (TREE_CODE (name) == SSA_NAME);
199 n = (tree *) pointer_map_contains (id->decl_map, name);
200 if (n)
201 return unshare_expr (*n);
203 if (processing_debug_stmt)
205 if (SSA_NAME_IS_DEFAULT_DEF (name)
206 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
207 && id->entry_bb == NULL
208 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
210 tree vexpr = make_node (DEBUG_EXPR_DECL);
211 gimple def_temp;
212 gimple_stmt_iterator gsi;
213 tree val = SSA_NAME_VAR (name);
215 n = (tree *) pointer_map_contains (id->decl_map, val);
216 if (n != NULL)
217 val = *n;
218 if (TREE_CODE (val) != PARM_DECL)
220 processing_debug_stmt = -1;
221 return name;
223 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
224 DECL_ARTIFICIAL (vexpr) = 1;
225 TREE_TYPE (vexpr) = TREE_TYPE (name);
226 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
227 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
228 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
229 return vexpr;
232 processing_debug_stmt = -1;
233 return name;
236 /* Remap anonymous SSA names or SSA names of anonymous decls. */
237 var = SSA_NAME_VAR (name);
238 if (!var
239 || (!SSA_NAME_IS_DEFAULT_DEF (name)
240 && TREE_CODE (var) == VAR_DECL
241 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
242 && DECL_ARTIFICIAL (var)
243 && DECL_IGNORED_P (var)
244 && !DECL_NAME (var)))
246 struct ptr_info_def *pi;
247 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id), NULL);
248 if (!var && SSA_NAME_IDENTIFIER (name))
249 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
250 insert_decl_map (id, name, new_tree);
251 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
252 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
253 /* At least IPA points-to info can be directly transferred. */
254 if (id->src_cfun->gimple_df
255 && id->src_cfun->gimple_df->ipa_pta
256 && (pi = SSA_NAME_PTR_INFO (name))
257 && !pi->pt.anything)
259 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
260 new_pi->pt = pi->pt;
262 return new_tree;
265 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
266 in copy_bb. */
267 new_tree = remap_decl (var, id);
269 /* We might've substituted constant or another SSA_NAME for
270 the variable.
272 Replace the SSA name representing RESULT_DECL by variable during
273 inlining: this saves us from need to introduce PHI node in a case
274 return value is just partly initialized. */
275 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
276 && (!SSA_NAME_VAR (name)
277 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
278 || !id->transform_return_to_modify))
280 struct ptr_info_def *pi;
281 new_tree = make_ssa_name (new_tree, NULL);
282 insert_decl_map (id, name, new_tree);
283 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
284 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
285 /* At least IPA points-to info can be directly transferred. */
286 if (id->src_cfun->gimple_df
287 && id->src_cfun->gimple_df->ipa_pta
288 && (pi = SSA_NAME_PTR_INFO (name))
289 && !pi->pt.anything)
291 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
292 new_pi->pt = pi->pt;
294 if (SSA_NAME_IS_DEFAULT_DEF (name))
296 /* By inlining function having uninitialized variable, we might
297 extend the lifetime (variable might get reused). This cause
298 ICE in the case we end up extending lifetime of SSA name across
299 abnormal edge, but also increase register pressure.
301 We simply initialize all uninitialized vars by 0 except
302 for case we are inlining to very first BB. We can avoid
303 this for all BBs that are not inside strongly connected
304 regions of the CFG, but this is expensive to test. */
305 if (id->entry_bb
306 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
307 && (!SSA_NAME_VAR (name)
308 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
309 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
310 0)->dest
311 || EDGE_COUNT (id->entry_bb->preds) != 1))
313 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
314 gimple init_stmt;
315 tree zero = build_zero_cst (TREE_TYPE (new_tree));
317 init_stmt = gimple_build_assign (new_tree, zero);
318 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
319 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
321 else
323 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
324 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
328 else
329 insert_decl_map (id, name, new_tree);
330 return new_tree;
333 /* Remap DECL during the copying of the BLOCK tree for the function. */
335 tree
336 remap_decl (tree decl, copy_body_data *id)
338 tree *n;
340 /* We only remap local variables in the current function. */
342 /* See if we have remapped this declaration. */
344 n = (tree *) pointer_map_contains (id->decl_map, decl);
346 if (!n && processing_debug_stmt)
348 processing_debug_stmt = -1;
349 return decl;
352 /* If we didn't already have an equivalent for this declaration,
353 create one now. */
354 if (!n)
356 /* Make a copy of the variable or label. */
357 tree t = id->copy_decl (decl, id);
359 /* Remember it, so that if we encounter this local entity again
360 we can reuse this copy. Do this early because remap_type may
361 need this decl for TYPE_STUB_DECL. */
362 insert_decl_map (id, decl, t);
364 if (!DECL_P (t))
365 return t;
367 /* Remap types, if necessary. */
368 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
369 if (TREE_CODE (t) == TYPE_DECL)
370 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
372 /* Remap sizes as necessary. */
373 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
374 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
376 /* If fields, do likewise for offset and qualifier. */
377 if (TREE_CODE (t) == FIELD_DECL)
379 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
380 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
381 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
384 return t;
387 if (id->do_not_unshare)
388 return *n;
389 else
390 return unshare_expr (*n);
393 static tree
394 remap_type_1 (tree type, copy_body_data *id)
396 tree new_tree, t;
398 /* We do need a copy. build and register it now. If this is a pointer or
399 reference type, remap the designated type and make a new pointer or
400 reference type. */
401 if (TREE_CODE (type) == POINTER_TYPE)
403 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
404 TYPE_MODE (type),
405 TYPE_REF_CAN_ALIAS_ALL (type));
406 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
407 new_tree = build_type_attribute_qual_variant (new_tree,
408 TYPE_ATTRIBUTES (type),
409 TYPE_QUALS (type));
410 insert_decl_map (id, type, new_tree);
411 return new_tree;
413 else if (TREE_CODE (type) == REFERENCE_TYPE)
415 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
416 TYPE_MODE (type),
417 TYPE_REF_CAN_ALIAS_ALL (type));
418 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
419 new_tree = build_type_attribute_qual_variant (new_tree,
420 TYPE_ATTRIBUTES (type),
421 TYPE_QUALS (type));
422 insert_decl_map (id, type, new_tree);
423 return new_tree;
425 else
426 new_tree = copy_node (type);
428 insert_decl_map (id, type, new_tree);
430 /* This is a new type, not a copy of an old type. Need to reassociate
431 variants. We can handle everything except the main variant lazily. */
432 t = TYPE_MAIN_VARIANT (type);
433 if (type != t)
435 t = remap_type (t, id);
436 TYPE_MAIN_VARIANT (new_tree) = t;
437 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
438 TYPE_NEXT_VARIANT (t) = new_tree;
440 else
442 TYPE_MAIN_VARIANT (new_tree) = new_tree;
443 TYPE_NEXT_VARIANT (new_tree) = NULL;
446 if (TYPE_STUB_DECL (type))
447 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
449 /* Lazily create pointer and reference types. */
450 TYPE_POINTER_TO (new_tree) = NULL;
451 TYPE_REFERENCE_TO (new_tree) = NULL;
453 switch (TREE_CODE (new_tree))
455 case INTEGER_TYPE:
456 case REAL_TYPE:
457 case FIXED_POINT_TYPE:
458 case ENUMERAL_TYPE:
459 case BOOLEAN_TYPE:
460 t = TYPE_MIN_VALUE (new_tree);
461 if (t && TREE_CODE (t) != INTEGER_CST)
462 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
464 t = TYPE_MAX_VALUE (new_tree);
465 if (t && TREE_CODE (t) != INTEGER_CST)
466 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
467 return new_tree;
469 case FUNCTION_TYPE:
470 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
471 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
472 return new_tree;
474 case ARRAY_TYPE:
475 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
476 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
477 break;
479 case RECORD_TYPE:
480 case UNION_TYPE:
481 case QUAL_UNION_TYPE:
483 tree f, nf = NULL;
485 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
487 t = remap_decl (f, id);
488 DECL_CONTEXT (t) = new_tree;
489 DECL_CHAIN (t) = nf;
490 nf = t;
492 TYPE_FIELDS (new_tree) = nreverse (nf);
494 break;
496 case OFFSET_TYPE:
497 default:
498 /* Shouldn't have been thought variable sized. */
499 gcc_unreachable ();
502 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
503 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
505 return new_tree;
508 tree
509 remap_type (tree type, copy_body_data *id)
511 tree *node;
512 tree tmp;
514 if (type == NULL)
515 return type;
517 /* See if we have remapped this type. */
518 node = (tree *) pointer_map_contains (id->decl_map, type);
519 if (node)
520 return *node;
522 /* The type only needs remapping if it's variably modified. */
523 if (! variably_modified_type_p (type, id->src_fn))
525 insert_decl_map (id, type, type);
526 return type;
529 id->remapping_type_depth++;
530 tmp = remap_type_1 (type, id);
531 id->remapping_type_depth--;
533 return tmp;
536 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
538 static bool
539 can_be_nonlocal (tree decl, copy_body_data *id)
541 /* We can not duplicate function decls. */
542 if (TREE_CODE (decl) == FUNCTION_DECL)
543 return true;
545 /* Local static vars must be non-local or we get multiple declaration
546 problems. */
547 if (TREE_CODE (decl) == VAR_DECL
548 && !auto_var_in_fn_p (decl, id->src_fn))
549 return true;
551 return false;
554 static tree
555 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
556 copy_body_data *id)
558 tree old_var;
559 tree new_decls = NULL_TREE;
561 /* Remap its variables. */
562 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
564 tree new_var;
566 if (can_be_nonlocal (old_var, id))
568 /* We need to add this variable to the local decls as otherwise
569 nothing else will do so. */
570 if (TREE_CODE (old_var) == VAR_DECL
571 && ! DECL_EXTERNAL (old_var))
572 add_local_decl (cfun, old_var);
573 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
574 && !DECL_IGNORED_P (old_var)
575 && nonlocalized_list)
576 vec_safe_push (*nonlocalized_list, old_var);
577 continue;
580 /* Remap the variable. */
581 new_var = remap_decl (old_var, id);
583 /* If we didn't remap this variable, we can't mess with its
584 TREE_CHAIN. If we remapped this variable to the return slot, it's
585 already declared somewhere else, so don't declare it here. */
587 if (new_var == id->retvar)
589 else if (!new_var)
591 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
592 && !DECL_IGNORED_P (old_var)
593 && nonlocalized_list)
594 vec_safe_push (*nonlocalized_list, old_var);
596 else
598 gcc_assert (DECL_P (new_var));
599 DECL_CHAIN (new_var) = new_decls;
600 new_decls = new_var;
602 /* Also copy value-expressions. */
603 if (TREE_CODE (new_var) == VAR_DECL
604 && DECL_HAS_VALUE_EXPR_P (new_var))
606 tree tem = DECL_VALUE_EXPR (new_var);
607 bool old_regimplify = id->regimplify;
608 id->remapping_type_depth++;
609 walk_tree (&tem, copy_tree_body_r, id, NULL);
610 id->remapping_type_depth--;
611 id->regimplify = old_regimplify;
612 SET_DECL_VALUE_EXPR (new_var, tem);
617 return nreverse (new_decls);
620 /* Copy the BLOCK to contain remapped versions of the variables
621 therein. And hook the new block into the block-tree. */
623 static void
624 remap_block (tree *block, copy_body_data *id)
626 tree old_block;
627 tree new_block;
629 /* Make the new block. */
630 old_block = *block;
631 new_block = make_node (BLOCK);
632 TREE_USED (new_block) = TREE_USED (old_block);
633 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
634 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
635 BLOCK_NONLOCALIZED_VARS (new_block)
636 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
637 *block = new_block;
639 /* Remap its variables. */
640 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
641 &BLOCK_NONLOCALIZED_VARS (new_block),
642 id);
644 if (id->transform_lang_insert_block)
645 id->transform_lang_insert_block (new_block);
647 /* Remember the remapped block. */
648 insert_decl_map (id, old_block, new_block);
651 /* Copy the whole block tree and root it in id->block. */
652 static tree
653 remap_blocks (tree block, copy_body_data *id)
655 tree t;
656 tree new_tree = block;
658 if (!block)
659 return NULL;
661 remap_block (&new_tree, id);
662 gcc_assert (new_tree != block);
663 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
664 prepend_lexical_block (new_tree, remap_blocks (t, id));
665 /* Blocks are in arbitrary order, but make things slightly prettier and do
666 not swap order when producing a copy. */
667 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
668 return new_tree;
671 /* Remap the block tree rooted at BLOCK to nothing. */
672 static void
673 remap_blocks_to_null (tree block, copy_body_data *id)
675 tree t;
676 insert_decl_map (id, block, NULL_TREE);
677 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
678 remap_blocks_to_null (t, id);
681 static void
682 copy_statement_list (tree *tp)
684 tree_stmt_iterator oi, ni;
685 tree new_tree;
687 new_tree = alloc_stmt_list ();
688 ni = tsi_start (new_tree);
689 oi = tsi_start (*tp);
690 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
691 *tp = new_tree;
693 for (; !tsi_end_p (oi); tsi_next (&oi))
695 tree stmt = tsi_stmt (oi);
696 if (TREE_CODE (stmt) == STATEMENT_LIST)
697 /* This copy is not redundant; tsi_link_after will smash this
698 STATEMENT_LIST into the end of the one we're building, and we
699 don't want to do that with the original. */
700 copy_statement_list (&stmt);
701 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
705 static void
706 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
708 tree block = BIND_EXPR_BLOCK (*tp);
709 /* Copy (and replace) the statement. */
710 copy_tree_r (tp, walk_subtrees, NULL);
711 if (block)
713 remap_block (&block, id);
714 BIND_EXPR_BLOCK (*tp) = block;
717 if (BIND_EXPR_VARS (*tp))
718 /* This will remap a lot of the same decls again, but this should be
719 harmless. */
720 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
724 /* Create a new gimple_seq by remapping all the statements in BODY
725 using the inlining information in ID. */
727 static gimple_seq
728 remap_gimple_seq (gimple_seq body, copy_body_data *id)
730 gimple_stmt_iterator si;
731 gimple_seq new_body = NULL;
733 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
735 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
736 gimple_seq_add_stmt (&new_body, new_stmt);
739 return new_body;
743 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
744 block using the mapping information in ID. */
746 static gimple
747 copy_gimple_bind (gimple stmt, copy_body_data *id)
749 gimple new_bind;
750 tree new_block, new_vars;
751 gimple_seq body, new_body;
753 /* Copy the statement. Note that we purposely don't use copy_stmt
754 here because we need to remap statements as we copy. */
755 body = gimple_bind_body (stmt);
756 new_body = remap_gimple_seq (body, id);
758 new_block = gimple_bind_block (stmt);
759 if (new_block)
760 remap_block (&new_block, id);
762 /* This will remap a lot of the same decls again, but this should be
763 harmless. */
764 new_vars = gimple_bind_vars (stmt);
765 if (new_vars)
766 new_vars = remap_decls (new_vars, NULL, id);
768 new_bind = gimple_build_bind (new_vars, new_body, new_block);
770 return new_bind;
773 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
775 static bool
776 is_parm (tree decl)
778 if (TREE_CODE (decl) == SSA_NAME)
780 decl = SSA_NAME_VAR (decl);
781 if (!decl)
782 return false;
785 return (TREE_CODE (decl) == PARM_DECL);
788 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
789 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
790 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
791 recursing into the children nodes of *TP. */
793 static tree
794 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
796 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
797 copy_body_data *id = (copy_body_data *) wi_p->info;
798 tree fn = id->src_fn;
800 if (TREE_CODE (*tp) == SSA_NAME)
802 *tp = remap_ssa_name (*tp, id);
803 *walk_subtrees = 0;
804 return NULL;
806 else if (auto_var_in_fn_p (*tp, fn))
808 /* Local variables and labels need to be replaced by equivalent
809 variables. We don't want to copy static variables; there's
810 only one of those, no matter how many times we inline the
811 containing function. Similarly for globals from an outer
812 function. */
813 tree new_decl;
815 /* Remap the declaration. */
816 new_decl = remap_decl (*tp, id);
817 gcc_assert (new_decl);
818 /* Replace this variable with the copy. */
819 STRIP_TYPE_NOPS (new_decl);
820 /* ??? The C++ frontend uses void * pointer zero to initialize
821 any other type. This confuses the middle-end type verification.
822 As cloned bodies do not go through gimplification again the fixup
823 there doesn't trigger. */
824 if (TREE_CODE (new_decl) == INTEGER_CST
825 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
826 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
827 *tp = new_decl;
828 *walk_subtrees = 0;
830 else if (TREE_CODE (*tp) == STATEMENT_LIST)
831 gcc_unreachable ();
832 else if (TREE_CODE (*tp) == SAVE_EXPR)
833 gcc_unreachable ();
834 else if (TREE_CODE (*tp) == LABEL_DECL
835 && (!DECL_CONTEXT (*tp)
836 || decl_function_context (*tp) == id->src_fn))
837 /* These may need to be remapped for EH handling. */
838 *tp = remap_decl (*tp, id);
839 else if (TREE_CODE (*tp) == FIELD_DECL)
841 /* If the enclosing record type is variably_modified_type_p, the field
842 has already been remapped. Otherwise, it need not be. */
843 tree *n = (tree *) pointer_map_contains (id->decl_map, *tp);
844 if (n)
845 *tp = *n;
846 *walk_subtrees = 0;
848 else if (TYPE_P (*tp))
849 /* Types may need remapping as well. */
850 *tp = remap_type (*tp, id);
851 else if (CONSTANT_CLASS_P (*tp))
853 /* If this is a constant, we have to copy the node iff the type
854 will be remapped. copy_tree_r will not copy a constant. */
855 tree new_type = remap_type (TREE_TYPE (*tp), id);
857 if (new_type == TREE_TYPE (*tp))
858 *walk_subtrees = 0;
860 else if (TREE_CODE (*tp) == INTEGER_CST)
861 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
862 TREE_INT_CST_HIGH (*tp));
863 else
865 *tp = copy_node (*tp);
866 TREE_TYPE (*tp) = new_type;
869 else
871 /* Otherwise, just copy the node. Note that copy_tree_r already
872 knows not to copy VAR_DECLs, etc., so this is safe. */
874 if (TREE_CODE (*tp) == MEM_REF)
876 /* We need to re-canonicalize MEM_REFs from inline substitutions
877 that can happen when a pointer argument is an ADDR_EXPR.
878 Recurse here manually to allow that. */
879 tree ptr = TREE_OPERAND (*tp, 0);
880 tree type = remap_type (TREE_TYPE (*tp), id);
881 tree old = *tp;
882 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
883 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
884 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
885 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
886 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
887 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
888 remapped a parameter as the property might be valid only
889 for the parameter itself. */
890 if (TREE_THIS_NOTRAP (old)
891 && (!is_parm (TREE_OPERAND (old, 0))
892 || (!id->transform_parameter && is_parm (ptr))))
893 TREE_THIS_NOTRAP (*tp) = 1;
894 *walk_subtrees = 0;
895 return NULL;
898 /* Here is the "usual case". Copy this tree node, and then
899 tweak some special cases. */
900 copy_tree_r (tp, walk_subtrees, NULL);
902 if (TREE_CODE (*tp) != OMP_CLAUSE)
903 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
905 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
907 /* The copied TARGET_EXPR has never been expanded, even if the
908 original node was expanded already. */
909 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
910 TREE_OPERAND (*tp, 3) = NULL_TREE;
912 else if (TREE_CODE (*tp) == ADDR_EXPR)
914 /* Variable substitution need not be simple. In particular,
915 the MEM_REF substitution above. Make sure that
916 TREE_CONSTANT and friends are up-to-date. */
917 int invariant = is_gimple_min_invariant (*tp);
918 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
919 recompute_tree_invariant_for_addr_expr (*tp);
921 /* If this used to be invariant, but is not any longer,
922 then regimplification is probably needed. */
923 if (invariant && !is_gimple_min_invariant (*tp))
924 id->regimplify = true;
926 *walk_subtrees = 0;
930 /* Update the TREE_BLOCK for the cloned expr. */
931 if (EXPR_P (*tp))
933 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
934 tree old_block = TREE_BLOCK (*tp);
935 if (old_block)
937 tree *n;
938 n = (tree *) pointer_map_contains (id->decl_map,
939 TREE_BLOCK (*tp));
940 if (n)
941 new_block = *n;
943 TREE_SET_BLOCK (*tp, new_block);
946 /* Keep iterating. */
947 return NULL_TREE;
951 /* Called from copy_body_id via walk_tree. DATA is really a
952 `copy_body_data *'. */
954 tree
955 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
957 copy_body_data *id = (copy_body_data *) data;
958 tree fn = id->src_fn;
959 tree new_block;
961 /* Begin by recognizing trees that we'll completely rewrite for the
962 inlining context. Our output for these trees is completely
963 different from out input (e.g. RETURN_EXPR is deleted, and morphs
964 into an edge). Further down, we'll handle trees that get
965 duplicated and/or tweaked. */
967 /* When requested, RETURN_EXPRs should be transformed to just the
968 contained MODIFY_EXPR. The branch semantics of the return will
969 be handled elsewhere by manipulating the CFG rather than a statement. */
970 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
972 tree assignment = TREE_OPERAND (*tp, 0);
974 /* If we're returning something, just turn that into an
975 assignment into the equivalent of the original RESULT_DECL.
976 If the "assignment" is just the result decl, the result
977 decl has already been set (e.g. a recent "foo (&result_decl,
978 ...)"); just toss the entire RETURN_EXPR. */
979 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
981 /* Replace the RETURN_EXPR with (a copy of) the
982 MODIFY_EXPR hanging underneath. */
983 *tp = copy_node (assignment);
985 else /* Else the RETURN_EXPR returns no value. */
987 *tp = NULL;
988 return (tree) (void *)1;
991 else if (TREE_CODE (*tp) == SSA_NAME)
993 *tp = remap_ssa_name (*tp, id);
994 *walk_subtrees = 0;
995 return NULL;
998 /* Local variables and labels need to be replaced by equivalent
999 variables. We don't want to copy static variables; there's only
1000 one of those, no matter how many times we inline the containing
1001 function. Similarly for globals from an outer function. */
1002 else if (auto_var_in_fn_p (*tp, fn))
1004 tree new_decl;
1006 /* Remap the declaration. */
1007 new_decl = remap_decl (*tp, id);
1008 gcc_assert (new_decl);
1009 /* Replace this variable with the copy. */
1010 STRIP_TYPE_NOPS (new_decl);
1011 *tp = new_decl;
1012 *walk_subtrees = 0;
1014 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1015 copy_statement_list (tp);
1016 else if (TREE_CODE (*tp) == SAVE_EXPR
1017 || TREE_CODE (*tp) == TARGET_EXPR)
1018 remap_save_expr (tp, id->decl_map, walk_subtrees);
1019 else if (TREE_CODE (*tp) == LABEL_DECL
1020 && (! DECL_CONTEXT (*tp)
1021 || decl_function_context (*tp) == id->src_fn))
1022 /* These may need to be remapped for EH handling. */
1023 *tp = remap_decl (*tp, id);
1024 else if (TREE_CODE (*tp) == BIND_EXPR)
1025 copy_bind_expr (tp, walk_subtrees, id);
1026 /* Types may need remapping as well. */
1027 else if (TYPE_P (*tp))
1028 *tp = remap_type (*tp, id);
1030 /* If this is a constant, we have to copy the node iff the type will be
1031 remapped. copy_tree_r will not copy a constant. */
1032 else if (CONSTANT_CLASS_P (*tp))
1034 tree new_type = remap_type (TREE_TYPE (*tp), id);
1036 if (new_type == TREE_TYPE (*tp))
1037 *walk_subtrees = 0;
1039 else if (TREE_CODE (*tp) == INTEGER_CST)
1040 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1041 TREE_INT_CST_HIGH (*tp));
1042 else
1044 *tp = copy_node (*tp);
1045 TREE_TYPE (*tp) = new_type;
1049 /* Otherwise, just copy the node. Note that copy_tree_r already
1050 knows not to copy VAR_DECLs, etc., so this is safe. */
1051 else
1053 /* Here we handle trees that are not completely rewritten.
1054 First we detect some inlining-induced bogosities for
1055 discarding. */
1056 if (TREE_CODE (*tp) == MODIFY_EXPR
1057 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1058 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1060 /* Some assignments VAR = VAR; don't generate any rtl code
1061 and thus don't count as variable modification. Avoid
1062 keeping bogosities like 0 = 0. */
1063 tree decl = TREE_OPERAND (*tp, 0), value;
1064 tree *n;
1066 n = (tree *) pointer_map_contains (id->decl_map, decl);
1067 if (n)
1069 value = *n;
1070 STRIP_TYPE_NOPS (value);
1071 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1073 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1074 return copy_tree_body_r (tp, walk_subtrees, data);
1078 else if (TREE_CODE (*tp) == INDIRECT_REF)
1080 /* Get rid of *& from inline substitutions that can happen when a
1081 pointer argument is an ADDR_EXPR. */
1082 tree decl = TREE_OPERAND (*tp, 0);
1083 tree *n = (tree *) pointer_map_contains (id->decl_map, decl);
1084 if (n)
1086 /* If we happen to get an ADDR_EXPR in n->value, strip
1087 it manually here as we'll eventually get ADDR_EXPRs
1088 which lie about their types pointed to. In this case
1089 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1090 but we absolutely rely on that. As fold_indirect_ref
1091 does other useful transformations, try that first, though. */
1092 tree type = TREE_TYPE (*tp);
1093 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1094 tree old = *tp;
1095 *tp = gimple_fold_indirect_ref (ptr);
1096 if (! *tp)
1098 if (TREE_CODE (ptr) == ADDR_EXPR)
1101 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1102 /* ??? We should either assert here or build
1103 a VIEW_CONVERT_EXPR instead of blindly leaking
1104 incompatible types to our IL. */
1105 if (! *tp)
1106 *tp = TREE_OPERAND (ptr, 0);
1108 else
1110 *tp = build1 (INDIRECT_REF, type, ptr);
1111 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1112 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1113 TREE_READONLY (*tp) = TREE_READONLY (old);
1114 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1115 have remapped a parameter as the property might be
1116 valid only for the parameter itself. */
1117 if (TREE_THIS_NOTRAP (old)
1118 && (!is_parm (TREE_OPERAND (old, 0))
1119 || (!id->transform_parameter && is_parm (ptr))))
1120 TREE_THIS_NOTRAP (*tp) = 1;
1123 *walk_subtrees = 0;
1124 return NULL;
1127 else if (TREE_CODE (*tp) == MEM_REF)
1129 /* We need to re-canonicalize MEM_REFs from inline substitutions
1130 that can happen when a pointer argument is an ADDR_EXPR.
1131 Recurse here manually to allow that. */
1132 tree ptr = TREE_OPERAND (*tp, 0);
1133 tree type = remap_type (TREE_TYPE (*tp), id);
1134 tree old = *tp;
1135 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1136 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1137 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1138 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1139 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1140 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1141 remapped a parameter as the property might be valid only
1142 for the parameter itself. */
1143 if (TREE_THIS_NOTRAP (old)
1144 && (!is_parm (TREE_OPERAND (old, 0))
1145 || (!id->transform_parameter && is_parm (ptr))))
1146 TREE_THIS_NOTRAP (*tp) = 1;
1147 *walk_subtrees = 0;
1148 return NULL;
1151 /* Here is the "usual case". Copy this tree node, and then
1152 tweak some special cases. */
1153 copy_tree_r (tp, walk_subtrees, NULL);
1155 /* If EXPR has block defined, map it to newly constructed block.
1156 When inlining we want EXPRs without block appear in the block
1157 of function call if we are not remapping a type. */
1158 if (EXPR_P (*tp))
1160 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1161 if (TREE_BLOCK (*tp))
1163 tree *n;
1164 n = (tree *) pointer_map_contains (id->decl_map,
1165 TREE_BLOCK (*tp));
1166 if (n)
1167 new_block = *n;
1169 TREE_SET_BLOCK (*tp, new_block);
1172 if (TREE_CODE (*tp) != OMP_CLAUSE)
1173 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1175 /* The copied TARGET_EXPR has never been expanded, even if the
1176 original node was expanded already. */
1177 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1179 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1180 TREE_OPERAND (*tp, 3) = NULL_TREE;
1183 /* Variable substitution need not be simple. In particular, the
1184 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1185 and friends are up-to-date. */
1186 else if (TREE_CODE (*tp) == ADDR_EXPR)
1188 int invariant = is_gimple_min_invariant (*tp);
1189 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1191 /* Handle the case where we substituted an INDIRECT_REF
1192 into the operand of the ADDR_EXPR. */
1193 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1194 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1195 else
1196 recompute_tree_invariant_for_addr_expr (*tp);
1198 /* If this used to be invariant, but is not any longer,
1199 then regimplification is probably needed. */
1200 if (invariant && !is_gimple_min_invariant (*tp))
1201 id->regimplify = true;
1203 *walk_subtrees = 0;
1207 /* Keep iterating. */
1208 return NULL_TREE;
1211 /* Helper for remap_gimple_stmt. Given an EH region number for the
1212 source function, map that to the duplicate EH region number in
1213 the destination function. */
1215 static int
1216 remap_eh_region_nr (int old_nr, copy_body_data *id)
1218 eh_region old_r, new_r;
1219 void **slot;
1221 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1222 slot = pointer_map_contains (id->eh_map, old_r);
1223 new_r = (eh_region) *slot;
1225 return new_r->index;
1228 /* Similar, but operate on INTEGER_CSTs. */
1230 static tree
1231 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1233 int old_nr, new_nr;
1235 old_nr = tree_to_shwi (old_t_nr);
1236 new_nr = remap_eh_region_nr (old_nr, id);
1238 return build_int_cst (integer_type_node, new_nr);
1241 /* Helper for copy_bb. Remap statement STMT using the inlining
1242 information in ID. Return the new statement copy. */
1244 static gimple
1245 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1247 gimple copy = NULL;
1248 struct walk_stmt_info wi;
1249 bool skip_first = false;
1251 /* Begin by recognizing trees that we'll completely rewrite for the
1252 inlining context. Our output for these trees is completely
1253 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1254 into an edge). Further down, we'll handle trees that get
1255 duplicated and/or tweaked. */
1257 /* When requested, GIMPLE_RETURNs should be transformed to just the
1258 contained GIMPLE_ASSIGN. The branch semantics of the return will
1259 be handled elsewhere by manipulating the CFG rather than the
1260 statement. */
1261 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1263 tree retval = gimple_return_retval (stmt);
1265 /* If we're returning something, just turn that into an
1266 assignment into the equivalent of the original RESULT_DECL.
1267 If RETVAL is just the result decl, the result decl has
1268 already been set (e.g. a recent "foo (&result_decl, ...)");
1269 just toss the entire GIMPLE_RETURN. */
1270 if (retval
1271 && (TREE_CODE (retval) != RESULT_DECL
1272 && (TREE_CODE (retval) != SSA_NAME
1273 || ! SSA_NAME_VAR (retval)
1274 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1276 copy = gimple_build_assign (id->do_not_unshare
1277 ? id->retvar : unshare_expr (id->retvar),
1278 retval);
1279 /* id->retvar is already substituted. Skip it on later remapping. */
1280 skip_first = true;
1282 else
1283 return gimple_build_nop ();
1285 else if (gimple_has_substatements (stmt))
1287 gimple_seq s1, s2;
1289 /* When cloning bodies from the C++ front end, we will be handed bodies
1290 in High GIMPLE form. Handle here all the High GIMPLE statements that
1291 have embedded statements. */
1292 switch (gimple_code (stmt))
1294 case GIMPLE_BIND:
1295 copy = copy_gimple_bind (stmt, id);
1296 break;
1298 case GIMPLE_CATCH:
1299 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1300 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1301 break;
1303 case GIMPLE_EH_FILTER:
1304 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1305 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1306 break;
1308 case GIMPLE_TRY:
1309 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1310 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1311 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1312 break;
1314 case GIMPLE_WITH_CLEANUP_EXPR:
1315 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1316 copy = gimple_build_wce (s1);
1317 break;
1319 case GIMPLE_OMP_PARALLEL:
1320 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1321 copy = gimple_build_omp_parallel
1322 (s1,
1323 gimple_omp_parallel_clauses (stmt),
1324 gimple_omp_parallel_child_fn (stmt),
1325 gimple_omp_parallel_data_arg (stmt));
1326 break;
1328 case GIMPLE_OMP_TASK:
1329 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1330 copy = gimple_build_omp_task
1331 (s1,
1332 gimple_omp_task_clauses (stmt),
1333 gimple_omp_task_child_fn (stmt),
1334 gimple_omp_task_data_arg (stmt),
1335 gimple_omp_task_copy_fn (stmt),
1336 gimple_omp_task_arg_size (stmt),
1337 gimple_omp_task_arg_align (stmt));
1338 break;
1340 case GIMPLE_OMP_FOR:
1341 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1342 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1343 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1344 gimple_omp_for_clauses (stmt),
1345 gimple_omp_for_collapse (stmt), s2);
1347 size_t i;
1348 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1350 gimple_omp_for_set_index (copy, i,
1351 gimple_omp_for_index (stmt, i));
1352 gimple_omp_for_set_initial (copy, i,
1353 gimple_omp_for_initial (stmt, i));
1354 gimple_omp_for_set_final (copy, i,
1355 gimple_omp_for_final (stmt, i));
1356 gimple_omp_for_set_incr (copy, i,
1357 gimple_omp_for_incr (stmt, i));
1358 gimple_omp_for_set_cond (copy, i,
1359 gimple_omp_for_cond (stmt, i));
1362 break;
1364 case GIMPLE_OMP_MASTER:
1365 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1366 copy = gimple_build_omp_master (s1);
1367 break;
1369 case GIMPLE_OMP_TASKGROUP:
1370 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1371 copy = gimple_build_omp_taskgroup (s1);
1372 break;
1374 case GIMPLE_OMP_ORDERED:
1375 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1376 copy = gimple_build_omp_ordered (s1);
1377 break;
1379 case GIMPLE_OMP_SECTION:
1380 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1381 copy = gimple_build_omp_section (s1);
1382 break;
1384 case GIMPLE_OMP_SECTIONS:
1385 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1386 copy = gimple_build_omp_sections
1387 (s1, gimple_omp_sections_clauses (stmt));
1388 break;
1390 case GIMPLE_OMP_SINGLE:
1391 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1392 copy = gimple_build_omp_single
1393 (s1, gimple_omp_single_clauses (stmt));
1394 break;
1396 case GIMPLE_OMP_TARGET:
1397 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1398 copy = gimple_build_omp_target
1399 (s1, gimple_omp_target_kind (stmt),
1400 gimple_omp_target_clauses (stmt));
1401 break;
1403 case GIMPLE_OMP_TEAMS:
1404 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1405 copy = gimple_build_omp_teams
1406 (s1, gimple_omp_teams_clauses (stmt));
1407 break;
1409 case GIMPLE_OMP_CRITICAL:
1410 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1411 copy
1412 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1413 break;
1415 case GIMPLE_TRANSACTION:
1416 s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1417 copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1418 gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1419 break;
1421 default:
1422 gcc_unreachable ();
1425 else
1427 if (gimple_assign_copy_p (stmt)
1428 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1429 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1431 /* Here we handle statements that are not completely rewritten.
1432 First we detect some inlining-induced bogosities for
1433 discarding. */
1435 /* Some assignments VAR = VAR; don't generate any rtl code
1436 and thus don't count as variable modification. Avoid
1437 keeping bogosities like 0 = 0. */
1438 tree decl = gimple_assign_lhs (stmt), value;
1439 tree *n;
1441 n = (tree *) pointer_map_contains (id->decl_map, decl);
1442 if (n)
1444 value = *n;
1445 STRIP_TYPE_NOPS (value);
1446 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1447 return gimple_build_nop ();
1451 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1452 in a block that we aren't copying during tree_function_versioning,
1453 just drop the clobber stmt. */
1454 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1456 tree lhs = gimple_assign_lhs (stmt);
1457 if (TREE_CODE (lhs) == MEM_REF
1458 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1460 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1461 if (gimple_bb (def_stmt)
1462 && !bitmap_bit_p (id->blocks_to_copy,
1463 gimple_bb (def_stmt)->index))
1464 return gimple_build_nop ();
1468 if (gimple_debug_bind_p (stmt))
1470 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1471 gimple_debug_bind_get_value (stmt),
1472 stmt);
1473 id->debug_stmts.safe_push (copy);
1474 return copy;
1476 if (gimple_debug_source_bind_p (stmt))
1478 copy = gimple_build_debug_source_bind
1479 (gimple_debug_source_bind_get_var (stmt),
1480 gimple_debug_source_bind_get_value (stmt), stmt);
1481 id->debug_stmts.safe_push (copy);
1482 return copy;
1485 /* Create a new deep copy of the statement. */
1486 copy = gimple_copy (stmt);
1488 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1489 RESX and EH_DISPATCH. */
1490 if (id->eh_map)
1491 switch (gimple_code (copy))
1493 case GIMPLE_CALL:
1495 tree r, fndecl = gimple_call_fndecl (copy);
1496 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1497 switch (DECL_FUNCTION_CODE (fndecl))
1499 case BUILT_IN_EH_COPY_VALUES:
1500 r = gimple_call_arg (copy, 1);
1501 r = remap_eh_region_tree_nr (r, id);
1502 gimple_call_set_arg (copy, 1, r);
1503 /* FALLTHRU */
1505 case BUILT_IN_EH_POINTER:
1506 case BUILT_IN_EH_FILTER:
1507 r = gimple_call_arg (copy, 0);
1508 r = remap_eh_region_tree_nr (r, id);
1509 gimple_call_set_arg (copy, 0, r);
1510 break;
1512 default:
1513 break;
1516 /* Reset alias info if we didn't apply measures to
1517 keep it valid over inlining by setting DECL_PT_UID. */
1518 if (!id->src_cfun->gimple_df
1519 || !id->src_cfun->gimple_df->ipa_pta)
1520 gimple_call_reset_alias_info (copy);
1522 break;
1524 case GIMPLE_RESX:
1526 int r = gimple_resx_region (copy);
1527 r = remap_eh_region_nr (r, id);
1528 gimple_resx_set_region (copy, r);
1530 break;
1532 case GIMPLE_EH_DISPATCH:
1534 int r = gimple_eh_dispatch_region (copy);
1535 r = remap_eh_region_nr (r, id);
1536 gimple_eh_dispatch_set_region (copy, r);
1538 break;
1540 default:
1541 break;
1545 /* If STMT has a block defined, map it to the newly constructed
1546 block. */
1547 if (gimple_block (copy))
1549 tree *n;
1550 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1551 gcc_assert (n);
1552 gimple_set_block (copy, *n);
1555 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1556 return copy;
1558 /* Remap all the operands in COPY. */
1559 memset (&wi, 0, sizeof (wi));
1560 wi.info = id;
1561 if (skip_first)
1562 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1563 else
1564 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1566 /* Clear the copied virtual operands. We are not remapping them here
1567 but are going to recreate them from scratch. */
1568 if (gimple_has_mem_ops (copy))
1570 gimple_set_vdef (copy, NULL_TREE);
1571 gimple_set_vuse (copy, NULL_TREE);
1574 return copy;
1578 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1579 later */
1581 static basic_block
1582 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1583 gcov_type count_scale)
1585 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1586 basic_block copy_basic_block;
1587 tree decl;
1588 gcov_type freq;
1589 basic_block prev;
1591 /* Search for previous copied basic block. */
1592 prev = bb->prev_bb;
1593 while (!prev->aux)
1594 prev = prev->prev_bb;
1596 /* create_basic_block() will append every new block to
1597 basic_block_info automatically. */
1598 copy_basic_block = create_basic_block (NULL, (void *) 0,
1599 (basic_block) prev->aux);
1600 copy_basic_block->count = apply_scale (bb->count, count_scale);
1602 /* We are going to rebuild frequencies from scratch. These values
1603 have just small importance to drive canonicalize_loop_headers. */
1604 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1606 /* We recompute frequencies after inlining, so this is quite safe. */
1607 if (freq > BB_FREQ_MAX)
1608 freq = BB_FREQ_MAX;
1609 copy_basic_block->frequency = freq;
1611 copy_gsi = gsi_start_bb (copy_basic_block);
1613 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1615 gimple stmt = gsi_stmt (gsi);
1616 gimple orig_stmt = stmt;
1618 id->regimplify = false;
1619 stmt = remap_gimple_stmt (stmt, id);
1620 if (gimple_nop_p (stmt))
1621 continue;
1623 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1624 seq_gsi = copy_gsi;
1626 /* With return slot optimization we can end up with
1627 non-gimple (foo *)&this->m, fix that here. */
1628 if (is_gimple_assign (stmt)
1629 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1630 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1632 tree new_rhs;
1633 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1634 gimple_assign_rhs1 (stmt),
1635 true, NULL, false,
1636 GSI_CONTINUE_LINKING);
1637 gimple_assign_set_rhs1 (stmt, new_rhs);
1638 id->regimplify = false;
1641 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1643 if (id->regimplify)
1644 gimple_regimplify_operands (stmt, &seq_gsi);
1646 /* If copy_basic_block has been empty at the start of this iteration,
1647 call gsi_start_bb again to get at the newly added statements. */
1648 if (gsi_end_p (copy_gsi))
1649 copy_gsi = gsi_start_bb (copy_basic_block);
1650 else
1651 gsi_next (&copy_gsi);
1653 /* Process the new statement. The call to gimple_regimplify_operands
1654 possibly turned the statement into multiple statements, we
1655 need to process all of them. */
1658 tree fn;
1660 stmt = gsi_stmt (copy_gsi);
1661 if (is_gimple_call (stmt)
1662 && gimple_call_va_arg_pack_p (stmt)
1663 && id->gimple_call)
1665 /* __builtin_va_arg_pack () should be replaced by
1666 all arguments corresponding to ... in the caller. */
1667 tree p;
1668 gimple new_call;
1669 vec<tree> argarray;
1670 size_t nargs = gimple_call_num_args (id->gimple_call);
1671 size_t n;
1673 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1674 nargs--;
1676 /* Create the new array of arguments. */
1677 n = nargs + gimple_call_num_args (stmt);
1678 argarray.create (n);
1679 argarray.safe_grow_cleared (n);
1681 /* Copy all the arguments before '...' */
1682 memcpy (argarray.address (),
1683 gimple_call_arg_ptr (stmt, 0),
1684 gimple_call_num_args (stmt) * sizeof (tree));
1686 /* Append the arguments passed in '...' */
1687 memcpy (argarray.address () + gimple_call_num_args (stmt),
1688 gimple_call_arg_ptr (id->gimple_call, 0)
1689 + (gimple_call_num_args (id->gimple_call) - nargs),
1690 nargs * sizeof (tree));
1692 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1693 argarray);
1695 argarray.release ();
1697 /* Copy all GIMPLE_CALL flags, location and block, except
1698 GF_CALL_VA_ARG_PACK. */
1699 gimple_call_copy_flags (new_call, stmt);
1700 gimple_call_set_va_arg_pack (new_call, false);
1701 gimple_set_location (new_call, gimple_location (stmt));
1702 gimple_set_block (new_call, gimple_block (stmt));
1703 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1705 gsi_replace (&copy_gsi, new_call, false);
1706 stmt = new_call;
1708 else if (is_gimple_call (stmt)
1709 && id->gimple_call
1710 && (decl = gimple_call_fndecl (stmt))
1711 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1712 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1714 /* __builtin_va_arg_pack_len () should be replaced by
1715 the number of anonymous arguments. */
1716 size_t nargs = gimple_call_num_args (id->gimple_call);
1717 tree count, p;
1718 gimple new_stmt;
1720 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1721 nargs--;
1723 count = build_int_cst (integer_type_node, nargs);
1724 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1725 gsi_replace (&copy_gsi, new_stmt, false);
1726 stmt = new_stmt;
1729 /* Statements produced by inlining can be unfolded, especially
1730 when we constant propagated some operands. We can't fold
1731 them right now for two reasons:
1732 1) folding require SSA_NAME_DEF_STMTs to be correct
1733 2) we can't change function calls to builtins.
1734 So we just mark statement for later folding. We mark
1735 all new statements, instead just statements that has changed
1736 by some nontrivial substitution so even statements made
1737 foldable indirectly are updated. If this turns out to be
1738 expensive, copy_body can be told to watch for nontrivial
1739 changes. */
1740 if (id->statements_to_fold)
1741 pointer_set_insert (id->statements_to_fold, stmt);
1743 /* We're duplicating a CALL_EXPR. Find any corresponding
1744 callgraph edges and update or duplicate them. */
1745 if (is_gimple_call (stmt))
1747 struct cgraph_edge *edge;
1748 int flags;
1750 switch (id->transform_call_graph_edges)
1752 case CB_CGE_DUPLICATE:
1753 edge = cgraph_edge (id->src_node, orig_stmt);
1754 if (edge)
1756 int edge_freq = edge->frequency;
1757 int new_freq;
1758 struct cgraph_edge *old_edge = edge;
1759 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1760 gimple_uid (stmt),
1761 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1762 true);
1763 /* We could also just rescale the frequency, but
1764 doing so would introduce roundoff errors and make
1765 verifier unhappy. */
1766 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
1767 copy_basic_block);
1769 /* Speculative calls consist of two edges - direct and indirect.
1770 Duplicate the whole thing and distribute frequencies accordingly. */
1771 if (edge->speculative)
1773 struct cgraph_edge *direct, *indirect;
1774 struct ipa_ref *ref;
1776 gcc_assert (!edge->indirect_unknown_callee);
1777 cgraph_speculative_call_info (old_edge, direct, indirect, ref);
1778 indirect = cgraph_clone_edge (indirect, id->dst_node, stmt,
1779 gimple_uid (stmt),
1780 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1781 true);
1782 if (old_edge->frequency + indirect->frequency)
1784 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
1785 (old_edge->frequency + indirect->frequency)),
1786 CGRAPH_FREQ_MAX);
1787 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
1788 (old_edge->frequency + indirect->frequency)),
1789 CGRAPH_FREQ_MAX);
1791 ipa_clone_ref (ref, id->dst_node, stmt);
1793 else
1795 edge->frequency = new_freq;
1796 if (dump_file
1797 && profile_status_for_fn (cfun) != PROFILE_ABSENT
1798 && (edge_freq > edge->frequency + 10
1799 || edge_freq < edge->frequency - 10))
1801 fprintf (dump_file, "Edge frequency estimated by "
1802 "cgraph %i diverge from inliner's estimate %i\n",
1803 edge_freq,
1804 edge->frequency);
1805 fprintf (dump_file,
1806 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1807 bb->index,
1808 bb->frequency,
1809 copy_basic_block->frequency);
1813 break;
1815 case CB_CGE_MOVE_CLONES:
1816 cgraph_set_call_stmt_including_clones (id->dst_node,
1817 orig_stmt, stmt);
1818 edge = cgraph_edge (id->dst_node, stmt);
1819 break;
1821 case CB_CGE_MOVE:
1822 edge = cgraph_edge (id->dst_node, orig_stmt);
1823 if (edge)
1824 cgraph_set_call_stmt (edge, stmt);
1825 break;
1827 default:
1828 gcc_unreachable ();
1831 /* Constant propagation on argument done during inlining
1832 may create new direct call. Produce an edge for it. */
1833 if ((!edge
1834 || (edge->indirect_inlining_edge
1835 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1836 && id->dst_node->definition
1837 && (fn = gimple_call_fndecl (stmt)) != NULL)
1839 struct cgraph_node *dest = cgraph_get_node (fn);
1841 /* We have missing edge in the callgraph. This can happen
1842 when previous inlining turned an indirect call into a
1843 direct call by constant propagating arguments or we are
1844 producing dead clone (for further cloning). In all
1845 other cases we hit a bug (incorrect node sharing is the
1846 most common reason for missing edges). */
1847 gcc_assert (!dest->definition
1848 || dest->address_taken
1849 || !id->src_node->definition
1850 || !id->dst_node->definition);
1851 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1852 cgraph_create_edge_including_clones
1853 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1854 compute_call_stmt_bb_frequency (id->dst_node->decl,
1855 copy_basic_block),
1856 CIF_ORIGINALLY_INDIRECT_CALL);
1857 else
1858 cgraph_create_edge (id->dst_node, dest, stmt,
1859 bb->count,
1860 compute_call_stmt_bb_frequency
1861 (id->dst_node->decl,
1862 copy_basic_block))->inline_failed
1863 = CIF_ORIGINALLY_INDIRECT_CALL;
1864 if (dump_file)
1866 fprintf (dump_file, "Created new direct edge to %s\n",
1867 dest->name ());
1871 flags = gimple_call_flags (stmt);
1872 if (flags & ECF_MAY_BE_ALLOCA)
1873 cfun->calls_alloca = true;
1874 if (flags & ECF_RETURNS_TWICE)
1875 cfun->calls_setjmp = true;
1878 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1879 id->eh_map, id->eh_lp_nr);
1881 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1883 ssa_op_iter i;
1884 tree def;
1886 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1887 if (TREE_CODE (def) == SSA_NAME)
1888 SSA_NAME_DEF_STMT (def) = stmt;
1891 gsi_next (&copy_gsi);
1893 while (!gsi_end_p (copy_gsi));
1895 copy_gsi = gsi_last_bb (copy_basic_block);
1898 return copy_basic_block;
1901 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1902 form is quite easy, since dominator relationship for old basic blocks does
1903 not change.
1905 There is however exception where inlining might change dominator relation
1906 across EH edges from basic block within inlined functions destinating
1907 to landing pads in function we inline into.
1909 The function fills in PHI_RESULTs of such PHI nodes if they refer
1910 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1911 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1912 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1913 set, and this means that there will be no overlapping live ranges
1914 for the underlying symbol.
1916 This might change in future if we allow redirecting of EH edges and
1917 we might want to change way build CFG pre-inlining to include
1918 all the possible edges then. */
1919 static void
1920 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1921 bool can_throw, bool nonlocal_goto)
1923 edge e;
1924 edge_iterator ei;
1926 FOR_EACH_EDGE (e, ei, bb->succs)
1927 if (!e->dest->aux
1928 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1930 gimple phi;
1931 gimple_stmt_iterator si;
1933 if (!nonlocal_goto)
1934 gcc_assert (e->flags & EDGE_EH);
1936 if (!can_throw)
1937 gcc_assert (!(e->flags & EDGE_EH));
1939 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1941 edge re;
1943 phi = gsi_stmt (si);
1945 /* For abnormal goto/call edges the receiver can be the
1946 ENTRY_BLOCK. Do not assert this cannot happen. */
1948 gcc_assert ((e->flags & EDGE_EH)
1949 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1951 re = find_edge (ret_bb, e->dest);
1952 gcc_checking_assert (re);
1953 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1954 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1956 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1957 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1963 /* Copy edges from BB into its copy constructed earlier, scale profile
1964 accordingly. Edges will be taken care of later. Assume aux
1965 pointers to point to the copies of each BB. Return true if any
1966 debug stmts are left after a statement that must end the basic block. */
1968 static bool
1969 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
1970 bool can_make_abnormal_goto)
1972 basic_block new_bb = (basic_block) bb->aux;
1973 edge_iterator ei;
1974 edge old_edge;
1975 gimple_stmt_iterator si;
1976 int flags;
1977 bool need_debug_cleanup = false;
1979 /* Use the indices from the original blocks to create edges for the
1980 new ones. */
1981 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1982 if (!(old_edge->flags & EDGE_EH))
1984 edge new_edge;
1986 flags = old_edge->flags;
1988 /* Return edges do get a FALLTHRU flag when the get inlined. */
1989 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1990 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
1991 flags |= EDGE_FALLTHRU;
1992 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1993 new_edge->count = apply_scale (old_edge->count, count_scale);
1994 new_edge->probability = old_edge->probability;
1997 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1998 return false;
2000 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2002 gimple copy_stmt;
2003 bool can_throw, nonlocal_goto;
2005 copy_stmt = gsi_stmt (si);
2006 if (!is_gimple_debug (copy_stmt))
2007 update_stmt (copy_stmt);
2009 /* Do this before the possible split_block. */
2010 gsi_next (&si);
2012 /* If this tree could throw an exception, there are two
2013 cases where we need to add abnormal edge(s): the
2014 tree wasn't in a region and there is a "current
2015 region" in the caller; or the original tree had
2016 EH edges. In both cases split the block after the tree,
2017 and add abnormal edge(s) as needed; we need both
2018 those from the callee and the caller.
2019 We check whether the copy can throw, because the const
2020 propagation can change an INDIRECT_REF which throws
2021 into a COMPONENT_REF which doesn't. If the copy
2022 can throw, the original could also throw. */
2023 can_throw = stmt_can_throw_internal (copy_stmt);
2024 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
2026 if (can_throw || nonlocal_goto)
2028 if (!gsi_end_p (si))
2030 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2031 gsi_next (&si);
2032 if (gsi_end_p (si))
2033 need_debug_cleanup = true;
2035 if (!gsi_end_p (si))
2036 /* Note that bb's predecessor edges aren't necessarily
2037 right at this point; split_block doesn't care. */
2039 edge e = split_block (new_bb, copy_stmt);
2041 new_bb = e->dest;
2042 new_bb->aux = e->src->aux;
2043 si = gsi_start_bb (new_bb);
2047 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2048 make_eh_dispatch_edges (copy_stmt);
2049 else if (can_throw)
2050 make_eh_edges (copy_stmt);
2052 /* If the call we inline cannot make abnormal goto do not add
2053 additional abnormal edges but only retain those already present
2054 in the original function body. */
2055 nonlocal_goto &= can_make_abnormal_goto;
2056 if (nonlocal_goto)
2057 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
2059 if ((can_throw || nonlocal_goto)
2060 && gimple_in_ssa_p (cfun))
2061 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2062 can_throw, nonlocal_goto);
2064 return need_debug_cleanup;
2067 /* Copy the PHIs. All blocks and edges are copied, some blocks
2068 was possibly split and new outgoing EH edges inserted.
2069 BB points to the block of original function and AUX pointers links
2070 the original and newly copied blocks. */
2072 static void
2073 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2075 basic_block const new_bb = (basic_block) bb->aux;
2076 edge_iterator ei;
2077 gimple phi;
2078 gimple_stmt_iterator si;
2079 edge new_edge;
2080 bool inserted = false;
2082 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2084 tree res, new_res;
2085 gimple new_phi;
2087 phi = gsi_stmt (si);
2088 res = PHI_RESULT (phi);
2089 new_res = res;
2090 if (!virtual_operand_p (res))
2092 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2093 new_phi = create_phi_node (new_res, new_bb);
2094 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2096 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2097 tree arg;
2098 tree new_arg;
2099 edge_iterator ei2;
2100 location_t locus;
2102 /* When doing partial cloning, we allow PHIs on the entry block
2103 as long as all the arguments are the same. Find any input
2104 edge to see argument to copy. */
2105 if (!old_edge)
2106 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2107 if (!old_edge->src->aux)
2108 break;
2110 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2111 new_arg = arg;
2112 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2113 gcc_assert (new_arg);
2114 /* With return slot optimization we can end up with
2115 non-gimple (foo *)&this->m, fix that here. */
2116 if (TREE_CODE (new_arg) != SSA_NAME
2117 && TREE_CODE (new_arg) != FUNCTION_DECL
2118 && !is_gimple_val (new_arg))
2120 gimple_seq stmts = NULL;
2121 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2122 gsi_insert_seq_on_edge (new_edge, stmts);
2123 inserted = true;
2125 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2126 if (LOCATION_BLOCK (locus))
2128 tree *n;
2129 n = (tree *) pointer_map_contains (id->decl_map,
2130 LOCATION_BLOCK (locus));
2131 gcc_assert (n);
2132 if (*n)
2133 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2134 else
2135 locus = LOCATION_LOCUS (locus);
2137 else
2138 locus = LOCATION_LOCUS (locus);
2140 add_phi_arg (new_phi, new_arg, new_edge, locus);
2145 /* Commit the delayed edge insertions. */
2146 if (inserted)
2147 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2148 gsi_commit_one_edge_insert (new_edge, NULL);
2152 /* Wrapper for remap_decl so it can be used as a callback. */
2154 static tree
2155 remap_decl_1 (tree decl, void *data)
2157 return remap_decl (decl, (copy_body_data *) data);
2160 /* Build struct function and associated datastructures for the new clone
2161 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2162 the cfun to the function of new_fndecl (and current_function_decl too). */
2164 static void
2165 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2167 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2168 gcov_type count_scale;
2170 if (!DECL_ARGUMENTS (new_fndecl))
2171 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2172 if (!DECL_RESULT (new_fndecl))
2173 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2175 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2176 count_scale
2177 = GCOV_COMPUTE_SCALE (count,
2178 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2179 else
2180 count_scale = REG_BR_PROB_BASE;
2182 /* Register specific tree functions. */
2183 gimple_register_cfg_hooks ();
2185 /* Get clean struct function. */
2186 push_struct_function (new_fndecl);
2188 /* We will rebuild these, so just sanity check that they are empty. */
2189 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2190 gcc_assert (cfun->local_decls == NULL);
2191 gcc_assert (cfun->cfg == NULL);
2192 gcc_assert (cfun->decl == new_fndecl);
2194 /* Copy items we preserve during cloning. */
2195 cfun->static_chain_decl = src_cfun->static_chain_decl;
2196 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2197 cfun->function_end_locus = src_cfun->function_end_locus;
2198 cfun->curr_properties = src_cfun->curr_properties;
2199 cfun->last_verified = src_cfun->last_verified;
2200 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2201 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2202 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2203 cfun->stdarg = src_cfun->stdarg;
2204 cfun->after_inlining = src_cfun->after_inlining;
2205 cfun->can_throw_non_call_exceptions
2206 = src_cfun->can_throw_non_call_exceptions;
2207 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2208 cfun->returns_struct = src_cfun->returns_struct;
2209 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2211 init_empty_tree_cfg ();
2213 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2214 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2215 (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2216 REG_BR_PROB_BASE);
2217 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2218 = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2219 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2220 (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2221 REG_BR_PROB_BASE);
2222 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2223 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2224 if (src_cfun->eh)
2225 init_eh_for_function ();
2227 if (src_cfun->gimple_df)
2229 init_tree_ssa (cfun);
2230 cfun->gimple_df->in_ssa_p = true;
2231 init_ssa_operands (cfun);
2235 /* Helper function for copy_cfg_body. Move debug stmts from the end
2236 of NEW_BB to the beginning of successor basic blocks when needed. If the
2237 successor has multiple predecessors, reset them, otherwise keep
2238 their value. */
2240 static void
2241 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2243 edge e;
2244 edge_iterator ei;
2245 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2247 if (gsi_end_p (si)
2248 || gsi_one_before_end_p (si)
2249 || !(stmt_can_throw_internal (gsi_stmt (si))
2250 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2251 return;
2253 FOR_EACH_EDGE (e, ei, new_bb->succs)
2255 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2256 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2257 while (is_gimple_debug (gsi_stmt (ssi)))
2259 gimple stmt = gsi_stmt (ssi), new_stmt;
2260 tree var;
2261 tree value;
2263 /* For the last edge move the debug stmts instead of copying
2264 them. */
2265 if (ei_one_before_end_p (ei))
2267 si = ssi;
2268 gsi_prev (&ssi);
2269 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2270 gimple_debug_bind_reset_value (stmt);
2271 gsi_remove (&si, false);
2272 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2273 continue;
2276 if (gimple_debug_bind_p (stmt))
2278 var = gimple_debug_bind_get_var (stmt);
2279 if (single_pred_p (e->dest))
2281 value = gimple_debug_bind_get_value (stmt);
2282 value = unshare_expr (value);
2284 else
2285 value = NULL_TREE;
2286 new_stmt = gimple_build_debug_bind (var, value, stmt);
2288 else if (gimple_debug_source_bind_p (stmt))
2290 var = gimple_debug_source_bind_get_var (stmt);
2291 value = gimple_debug_source_bind_get_value (stmt);
2292 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2294 else
2295 gcc_unreachable ();
2296 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2297 id->debug_stmts.safe_push (new_stmt);
2298 gsi_prev (&ssi);
2303 /* Make a copy of the sub-loops of SRC_PARENT and place them
2304 as siblings of DEST_PARENT. */
2306 static void
2307 copy_loops (copy_body_data *id,
2308 struct loop *dest_parent, struct loop *src_parent)
2310 struct loop *src_loop = src_parent->inner;
2311 while (src_loop)
2313 if (!id->blocks_to_copy
2314 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2316 struct loop *dest_loop = alloc_loop ();
2318 /* Assign the new loop its header and latch and associate
2319 those with the new loop. */
2320 if (src_loop->header != NULL)
2322 dest_loop->header = (basic_block)src_loop->header->aux;
2323 dest_loop->header->loop_father = dest_loop;
2325 if (src_loop->latch != NULL)
2327 dest_loop->latch = (basic_block)src_loop->latch->aux;
2328 dest_loop->latch->loop_father = dest_loop;
2331 /* Copy loop meta-data. */
2332 copy_loop_info (src_loop, dest_loop);
2334 /* Finally place it into the loop array and the loop tree. */
2335 place_new_loop (cfun, dest_loop);
2336 flow_loop_tree_node_add (dest_parent, dest_loop);
2338 if (src_loop->simduid)
2340 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2341 cfun->has_simduid_loops = true;
2343 if (src_loop->force_vect)
2345 dest_loop->force_vect = true;
2346 cfun->has_force_vect_loops = true;
2349 /* Recurse. */
2350 copy_loops (id, dest_loop, src_loop);
2352 src_loop = src_loop->next;
2356 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2358 void
2359 redirect_all_calls (copy_body_data * id, basic_block bb)
2361 gimple_stmt_iterator si;
2362 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2364 if (is_gimple_call (gsi_stmt (si)))
2366 struct cgraph_edge *edge = cgraph_edge (id->dst_node, gsi_stmt (si));
2367 if (edge)
2368 cgraph_redirect_edge_call_stmt_to_callee (edge);
2373 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2374 with each bb's frequency. Used when NODE has a 0-weight entry
2375 but we are about to inline it into a non-zero count call bb.
2376 See the comments for handle_missing_profiles() in predict.c for
2377 when this can happen for COMDATs. */
2379 void
2380 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2382 basic_block bb;
2383 edge_iterator ei;
2384 edge e;
2385 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2387 FOR_ALL_BB_FN(bb, fn)
2389 bb->count = apply_scale (count,
2390 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2391 FOR_EACH_EDGE (e, ei, bb->succs)
2392 e->count = apply_probability (e->src->count, e->probability);
2396 /* Make a copy of the body of FN so that it can be inserted inline in
2397 another function. Walks FN via CFG, returns new fndecl. */
2399 static tree
2400 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2401 basic_block entry_block_map, basic_block exit_block_map,
2402 basic_block new_entry)
2404 tree callee_fndecl = id->src_fn;
2405 /* Original cfun for the callee, doesn't change. */
2406 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2407 struct function *cfun_to_copy;
2408 basic_block bb;
2409 tree new_fndecl = NULL;
2410 bool need_debug_cleanup = false;
2411 gcov_type count_scale;
2412 int last;
2413 int incoming_frequency = 0;
2414 gcov_type incoming_count = 0;
2416 /* This can happen for COMDAT routines that end up with 0 counts
2417 despite being called (see the comments for handle_missing_profiles()
2418 in predict.c as to why). Apply counts to the blocks in the callee
2419 before inlining, using the guessed edge frequencies, so that we don't
2420 end up with a 0-count inline body which can confuse downstream
2421 optimizations such as function splitting. */
2422 if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2424 /* Apply the larger of the call bb count and the total incoming
2425 call edge count to the callee. */
2426 gcov_type in_count = 0;
2427 struct cgraph_edge *in_edge;
2428 for (in_edge = id->src_node->callers; in_edge;
2429 in_edge = in_edge->next_caller)
2430 in_count += in_edge->count;
2431 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2434 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2435 count_scale
2436 = GCOV_COMPUTE_SCALE (count,
2437 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2438 else
2439 count_scale = REG_BR_PROB_BASE;
2441 /* Register specific tree functions. */
2442 gimple_register_cfg_hooks ();
2444 /* If we are inlining just region of the function, make sure to connect
2445 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2446 part of loop, we must compute frequency and probability of
2447 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2448 probabilities of edges incoming from nonduplicated region. */
2449 if (new_entry)
2451 edge e;
2452 edge_iterator ei;
2454 FOR_EACH_EDGE (e, ei, new_entry->preds)
2455 if (!e->src->aux)
2457 incoming_frequency += EDGE_FREQUENCY (e);
2458 incoming_count += e->count;
2460 incoming_count = apply_scale (incoming_count, count_scale);
2461 incoming_frequency
2462 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2463 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2464 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2467 /* Must have a CFG here at this point. */
2468 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2469 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2471 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2473 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2474 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2475 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2476 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2478 /* Duplicate any exception-handling regions. */
2479 if (cfun->eh)
2480 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2481 remap_decl_1, id);
2483 /* Use aux pointers to map the original blocks to copy. */
2484 FOR_EACH_BB_FN (bb, cfun_to_copy)
2485 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2487 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2488 bb->aux = new_bb;
2489 new_bb->aux = bb;
2490 new_bb->loop_father = entry_block_map->loop_father;
2493 last = last_basic_block_for_fn (cfun);
2495 /* Now that we've duplicated the blocks, duplicate their edges. */
2496 bool can_make_abormal_goto
2497 = id->gimple_call && stmt_can_make_abnormal_goto (id->gimple_call);
2498 FOR_ALL_BB_FN (bb, cfun_to_copy)
2499 if (!id->blocks_to_copy
2500 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2501 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2502 can_make_abormal_goto);
2504 if (new_entry)
2506 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2507 e->probability = REG_BR_PROB_BASE;
2508 e->count = incoming_count;
2511 /* Duplicate the loop tree, if available and wanted. */
2512 if (loops_for_fn (src_cfun) != NULL
2513 && current_loops != NULL)
2515 copy_loops (id, entry_block_map->loop_father,
2516 get_loop (src_cfun, 0));
2517 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2518 loops_state_set (LOOPS_NEED_FIXUP);
2521 /* If the loop tree in the source function needed fixup, mark the
2522 destination loop tree for fixup, too. */
2523 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2524 loops_state_set (LOOPS_NEED_FIXUP);
2526 if (gimple_in_ssa_p (cfun))
2527 FOR_ALL_BB_FN (bb, cfun_to_copy)
2528 if (!id->blocks_to_copy
2529 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2530 copy_phis_for_bb (bb, id);
2532 FOR_ALL_BB_FN (bb, cfun_to_copy)
2533 if (bb->aux)
2535 if (need_debug_cleanup
2536 && bb->index != ENTRY_BLOCK
2537 && bb->index != EXIT_BLOCK)
2538 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2539 /* Update call edge destinations. This can not be done before loop
2540 info is updated, because we may split basic blocks. */
2541 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2542 redirect_all_calls (id, (basic_block)bb->aux);
2543 ((basic_block)bb->aux)->aux = NULL;
2544 bb->aux = NULL;
2547 /* Zero out AUX fields of newly created block during EH edge
2548 insertion. */
2549 for (; last < last_basic_block_for_fn (cfun); last++)
2551 if (need_debug_cleanup)
2552 maybe_move_debug_stmts_to_successors (id,
2553 BASIC_BLOCK_FOR_FN (cfun, last));
2554 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2555 /* Update call edge destinations. This can not be done before loop
2556 info is updated, because we may split basic blocks. */
2557 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2558 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2560 entry_block_map->aux = NULL;
2561 exit_block_map->aux = NULL;
2563 if (id->eh_map)
2565 pointer_map_destroy (id->eh_map);
2566 id->eh_map = NULL;
2569 return new_fndecl;
2572 /* Copy the debug STMT using ID. We deal with these statements in a
2573 special way: if any variable in their VALUE expression wasn't
2574 remapped yet, we won't remap it, because that would get decl uids
2575 out of sync, causing codegen differences between -g and -g0. If
2576 this arises, we drop the VALUE expression altogether. */
2578 static void
2579 copy_debug_stmt (gimple stmt, copy_body_data *id)
2581 tree t, *n;
2582 struct walk_stmt_info wi;
2584 if (gimple_block (stmt))
2586 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2587 gimple_set_block (stmt, n ? *n : id->block);
2590 /* Remap all the operands in COPY. */
2591 memset (&wi, 0, sizeof (wi));
2592 wi.info = id;
2594 processing_debug_stmt = 1;
2596 if (gimple_debug_source_bind_p (stmt))
2597 t = gimple_debug_source_bind_get_var (stmt);
2598 else
2599 t = gimple_debug_bind_get_var (stmt);
2601 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2602 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2604 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2605 t = *n;
2607 else if (TREE_CODE (t) == VAR_DECL
2608 && !is_global_var (t)
2609 && !pointer_map_contains (id->decl_map, t))
2610 /* T is a non-localized variable. */;
2611 else
2612 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2614 if (gimple_debug_bind_p (stmt))
2616 gimple_debug_bind_set_var (stmt, t);
2618 if (gimple_debug_bind_has_value_p (stmt))
2619 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2620 remap_gimple_op_r, &wi, NULL);
2622 /* Punt if any decl couldn't be remapped. */
2623 if (processing_debug_stmt < 0)
2624 gimple_debug_bind_reset_value (stmt);
2626 else if (gimple_debug_source_bind_p (stmt))
2628 gimple_debug_source_bind_set_var (stmt, t);
2629 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2630 remap_gimple_op_r, &wi, NULL);
2631 /* When inlining and source bind refers to one of the optimized
2632 away parameters, change the source bind into normal debug bind
2633 referring to the corresponding DEBUG_EXPR_DECL that should have
2634 been bound before the call stmt. */
2635 t = gimple_debug_source_bind_get_value (stmt);
2636 if (t != NULL_TREE
2637 && TREE_CODE (t) == PARM_DECL
2638 && id->gimple_call)
2640 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2641 unsigned int i;
2642 if (debug_args != NULL)
2644 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2645 if ((**debug_args)[i] == DECL_ORIGIN (t)
2646 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2648 t = (**debug_args)[i + 1];
2649 stmt->subcode = GIMPLE_DEBUG_BIND;
2650 gimple_debug_bind_set_value (stmt, t);
2651 break;
2657 processing_debug_stmt = 0;
2659 update_stmt (stmt);
2662 /* Process deferred debug stmts. In order to give values better odds
2663 of being successfully remapped, we delay the processing of debug
2664 stmts until all other stmts that might require remapping are
2665 processed. */
2667 static void
2668 copy_debug_stmts (copy_body_data *id)
2670 size_t i;
2671 gimple stmt;
2673 if (!id->debug_stmts.exists ())
2674 return;
2676 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2677 copy_debug_stmt (stmt, id);
2679 id->debug_stmts.release ();
2682 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2683 another function. */
2685 static tree
2686 copy_tree_body (copy_body_data *id)
2688 tree fndecl = id->src_fn;
2689 tree body = DECL_SAVED_TREE (fndecl);
2691 walk_tree (&body, copy_tree_body_r, id, NULL);
2693 return body;
2696 /* Make a copy of the body of FN so that it can be inserted inline in
2697 another function. */
2699 static tree
2700 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2701 basic_block entry_block_map, basic_block exit_block_map,
2702 basic_block new_entry)
2704 tree fndecl = id->src_fn;
2705 tree body;
2707 /* If this body has a CFG, walk CFG and copy. */
2708 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2709 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2710 new_entry);
2711 copy_debug_stmts (id);
2713 return body;
2716 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2717 defined in function FN, or of a data member thereof. */
2719 static bool
2720 self_inlining_addr_expr (tree value, tree fn)
2722 tree var;
2724 if (TREE_CODE (value) != ADDR_EXPR)
2725 return false;
2727 var = get_base_address (TREE_OPERAND (value, 0));
2729 return var && auto_var_in_fn_p (var, fn);
2732 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2733 lexical block and line number information from base_stmt, if given,
2734 or from the last stmt of the block otherwise. */
2736 static gimple
2737 insert_init_debug_bind (copy_body_data *id,
2738 basic_block bb, tree var, tree value,
2739 gimple base_stmt)
2741 gimple note;
2742 gimple_stmt_iterator gsi;
2743 tree tracked_var;
2745 if (!gimple_in_ssa_p (id->src_cfun))
2746 return NULL;
2748 if (!MAY_HAVE_DEBUG_STMTS)
2749 return NULL;
2751 tracked_var = target_for_debug_bind (var);
2752 if (!tracked_var)
2753 return NULL;
2755 if (bb)
2757 gsi = gsi_last_bb (bb);
2758 if (!base_stmt && !gsi_end_p (gsi))
2759 base_stmt = gsi_stmt (gsi);
2762 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2764 if (bb)
2766 if (!gsi_end_p (gsi))
2767 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2768 else
2769 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2772 return note;
2775 static void
2776 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2778 /* If VAR represents a zero-sized variable, it's possible that the
2779 assignment statement may result in no gimple statements. */
2780 if (init_stmt)
2782 gimple_stmt_iterator si = gsi_last_bb (bb);
2784 /* We can end up with init statements that store to a non-register
2785 from a rhs with a conversion. Handle that here by forcing the
2786 rhs into a temporary. gimple_regimplify_operands is not
2787 prepared to do this for us. */
2788 if (!is_gimple_debug (init_stmt)
2789 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2790 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2791 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2793 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2794 gimple_expr_type (init_stmt),
2795 gimple_assign_rhs1 (init_stmt));
2796 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2797 GSI_NEW_STMT);
2798 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2799 gimple_assign_set_rhs1 (init_stmt, rhs);
2801 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2802 gimple_regimplify_operands (init_stmt, &si);
2804 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2806 tree def = gimple_assign_lhs (init_stmt);
2807 insert_init_debug_bind (id, bb, def, def, init_stmt);
2812 /* Initialize parameter P with VALUE. If needed, produce init statement
2813 at the end of BB. When BB is NULL, we return init statement to be
2814 output later. */
2815 static gimple
2816 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2817 basic_block bb, tree *vars)
2819 gimple init_stmt = NULL;
2820 tree var;
2821 tree rhs = value;
2822 tree def = (gimple_in_ssa_p (cfun)
2823 ? ssa_default_def (id->src_cfun, p) : NULL);
2825 if (value
2826 && value != error_mark_node
2827 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2829 /* If we can match up types by promotion/demotion do so. */
2830 if (fold_convertible_p (TREE_TYPE (p), value))
2831 rhs = fold_convert (TREE_TYPE (p), value);
2832 else
2834 /* ??? For valid programs we should not end up here.
2835 Still if we end up with truly mismatched types here, fall back
2836 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2837 GIMPLE to the following passes. */
2838 if (!is_gimple_reg_type (TREE_TYPE (value))
2839 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2840 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2841 else
2842 rhs = build_zero_cst (TREE_TYPE (p));
2846 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2847 here since the type of this decl must be visible to the calling
2848 function. */
2849 var = copy_decl_to_var (p, id);
2851 /* Declare this new variable. */
2852 DECL_CHAIN (var) = *vars;
2853 *vars = var;
2855 /* Make gimplifier happy about this variable. */
2856 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2858 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2859 we would not need to create a new variable here at all, if it
2860 weren't for debug info. Still, we can just use the argument
2861 value. */
2862 if (TREE_READONLY (p)
2863 && !TREE_ADDRESSABLE (p)
2864 && value && !TREE_SIDE_EFFECTS (value)
2865 && !def)
2867 /* We may produce non-gimple trees by adding NOPs or introduce
2868 invalid sharing when operand is not really constant.
2869 It is not big deal to prohibit constant propagation here as
2870 we will constant propagate in DOM1 pass anyway. */
2871 if (is_gimple_min_invariant (value)
2872 && useless_type_conversion_p (TREE_TYPE (p),
2873 TREE_TYPE (value))
2874 /* We have to be very careful about ADDR_EXPR. Make sure
2875 the base variable isn't a local variable of the inlined
2876 function, e.g., when doing recursive inlining, direct or
2877 mutually-recursive or whatever, which is why we don't
2878 just test whether fn == current_function_decl. */
2879 && ! self_inlining_addr_expr (value, fn))
2881 insert_decl_map (id, p, value);
2882 insert_debug_decl_map (id, p, var);
2883 return insert_init_debug_bind (id, bb, var, value, NULL);
2887 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2888 that way, when the PARM_DECL is encountered, it will be
2889 automatically replaced by the VAR_DECL. */
2890 insert_decl_map (id, p, var);
2892 /* Even if P was TREE_READONLY, the new VAR should not be.
2893 In the original code, we would have constructed a
2894 temporary, and then the function body would have never
2895 changed the value of P. However, now, we will be
2896 constructing VAR directly. The constructor body may
2897 change its value multiple times as it is being
2898 constructed. Therefore, it must not be TREE_READONLY;
2899 the back-end assumes that TREE_READONLY variable is
2900 assigned to only once. */
2901 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2902 TREE_READONLY (var) = 0;
2904 /* If there is no setup required and we are in SSA, take the easy route
2905 replacing all SSA names representing the function parameter by the
2906 SSA name passed to function.
2908 We need to construct map for the variable anyway as it might be used
2909 in different SSA names when parameter is set in function.
2911 Do replacement at -O0 for const arguments replaced by constant.
2912 This is important for builtin_constant_p and other construct requiring
2913 constant argument to be visible in inlined function body. */
2914 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2915 && (optimize
2916 || (TREE_READONLY (p)
2917 && is_gimple_min_invariant (rhs)))
2918 && (TREE_CODE (rhs) == SSA_NAME
2919 || is_gimple_min_invariant (rhs))
2920 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2922 insert_decl_map (id, def, rhs);
2923 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2926 /* If the value of argument is never used, don't care about initializing
2927 it. */
2928 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2930 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2931 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2934 /* Initialize this VAR_DECL from the equivalent argument. Convert
2935 the argument to the proper type in case it was promoted. */
2936 if (value)
2938 if (rhs == error_mark_node)
2940 insert_decl_map (id, p, var);
2941 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2944 STRIP_USELESS_TYPE_CONVERSION (rhs);
2946 /* If we are in SSA form properly remap the default definition
2947 or assign to a dummy SSA name if the parameter is unused and
2948 we are not optimizing. */
2949 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2951 if (def)
2953 def = remap_ssa_name (def, id);
2954 init_stmt = gimple_build_assign (def, rhs);
2955 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2956 set_ssa_default_def (cfun, var, NULL);
2958 else if (!optimize)
2960 def = make_ssa_name (var, NULL);
2961 init_stmt = gimple_build_assign (def, rhs);
2964 else
2965 init_stmt = gimple_build_assign (var, rhs);
2967 if (bb && init_stmt)
2968 insert_init_stmt (id, bb, init_stmt);
2970 return init_stmt;
2973 /* Generate code to initialize the parameters of the function at the
2974 top of the stack in ID from the GIMPLE_CALL STMT. */
2976 static void
2977 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2978 tree fn, basic_block bb)
2980 tree parms;
2981 size_t i;
2982 tree p;
2983 tree vars = NULL_TREE;
2984 tree static_chain = gimple_call_chain (stmt);
2986 /* Figure out what the parameters are. */
2987 parms = DECL_ARGUMENTS (fn);
2989 /* Loop through the parameter declarations, replacing each with an
2990 equivalent VAR_DECL, appropriately initialized. */
2991 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2993 tree val;
2994 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2995 setup_one_parameter (id, p, val, fn, bb, &vars);
2997 /* After remapping parameters remap their types. This has to be done
2998 in a second loop over all parameters to appropriately remap
2999 variable sized arrays when the size is specified in a
3000 parameter following the array. */
3001 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3003 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
3004 if (varp
3005 && TREE_CODE (*varp) == VAR_DECL)
3007 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3008 ? ssa_default_def (id->src_cfun, p) : NULL);
3009 tree var = *varp;
3010 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3011 /* Also remap the default definition if it was remapped
3012 to the default definition of the parameter replacement
3013 by the parameter setup. */
3014 if (def)
3016 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
3017 if (defp
3018 && TREE_CODE (*defp) == SSA_NAME
3019 && SSA_NAME_VAR (*defp) == var)
3020 TREE_TYPE (*defp) = TREE_TYPE (var);
3025 /* Initialize the static chain. */
3026 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3027 gcc_assert (fn != current_function_decl);
3028 if (p)
3030 /* No static chain? Seems like a bug in tree-nested.c. */
3031 gcc_assert (static_chain);
3033 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3036 declare_inline_vars (id->block, vars);
3040 /* Declare a return variable to replace the RESULT_DECL for the
3041 function we are calling. An appropriate DECL_STMT is returned.
3042 The USE_STMT is filled to contain a use of the declaration to
3043 indicate the return value of the function.
3045 RETURN_SLOT, if non-null is place where to store the result. It
3046 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3047 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3049 The return value is a (possibly null) value that holds the result
3050 as seen by the caller. */
3052 static tree
3053 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3054 basic_block entry_bb)
3056 tree callee = id->src_fn;
3057 tree result = DECL_RESULT (callee);
3058 tree callee_type = TREE_TYPE (result);
3059 tree caller_type;
3060 tree var, use;
3062 /* Handle type-mismatches in the function declaration return type
3063 vs. the call expression. */
3064 if (modify_dest)
3065 caller_type = TREE_TYPE (modify_dest);
3066 else
3067 caller_type = TREE_TYPE (TREE_TYPE (callee));
3069 /* We don't need to do anything for functions that don't return anything. */
3070 if (VOID_TYPE_P (callee_type))
3071 return NULL_TREE;
3073 /* If there was a return slot, then the return value is the
3074 dereferenced address of that object. */
3075 if (return_slot)
3077 /* The front end shouldn't have used both return_slot and
3078 a modify expression. */
3079 gcc_assert (!modify_dest);
3080 if (DECL_BY_REFERENCE (result))
3082 tree return_slot_addr = build_fold_addr_expr (return_slot);
3083 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3085 /* We are going to construct *&return_slot and we can't do that
3086 for variables believed to be not addressable.
3088 FIXME: This check possibly can match, because values returned
3089 via return slot optimization are not believed to have address
3090 taken by alias analysis. */
3091 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3092 var = return_slot_addr;
3094 else
3096 var = return_slot;
3097 gcc_assert (TREE_CODE (var) != SSA_NAME);
3098 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
3100 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3101 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3102 && !DECL_GIMPLE_REG_P (result)
3103 && DECL_P (var))
3104 DECL_GIMPLE_REG_P (var) = 0;
3105 use = NULL;
3106 goto done;
3109 /* All types requiring non-trivial constructors should have been handled. */
3110 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3112 /* Attempt to avoid creating a new temporary variable. */
3113 if (modify_dest
3114 && TREE_CODE (modify_dest) != SSA_NAME)
3116 bool use_it = false;
3118 /* We can't use MODIFY_DEST if there's type promotion involved. */
3119 if (!useless_type_conversion_p (callee_type, caller_type))
3120 use_it = false;
3122 /* ??? If we're assigning to a variable sized type, then we must
3123 reuse the destination variable, because we've no good way to
3124 create variable sized temporaries at this point. */
3125 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3126 use_it = true;
3128 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3129 reuse it as the result of the call directly. Don't do this if
3130 it would promote MODIFY_DEST to addressable. */
3131 else if (TREE_ADDRESSABLE (result))
3132 use_it = false;
3133 else
3135 tree base_m = get_base_address (modify_dest);
3137 /* If the base isn't a decl, then it's a pointer, and we don't
3138 know where that's going to go. */
3139 if (!DECL_P (base_m))
3140 use_it = false;
3141 else if (is_global_var (base_m))
3142 use_it = false;
3143 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3144 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3145 && !DECL_GIMPLE_REG_P (result)
3146 && DECL_GIMPLE_REG_P (base_m))
3147 use_it = false;
3148 else if (!TREE_ADDRESSABLE (base_m))
3149 use_it = true;
3152 if (use_it)
3154 var = modify_dest;
3155 use = NULL;
3156 goto done;
3160 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3162 var = copy_result_decl_to_var (result, id);
3163 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3165 /* Do not have the rest of GCC warn about this variable as it should
3166 not be visible to the user. */
3167 TREE_NO_WARNING (var) = 1;
3169 declare_inline_vars (id->block, var);
3171 /* Build the use expr. If the return type of the function was
3172 promoted, convert it back to the expected type. */
3173 use = var;
3174 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3176 /* If we can match up types by promotion/demotion do so. */
3177 if (fold_convertible_p (caller_type, var))
3178 use = fold_convert (caller_type, var);
3179 else
3181 /* ??? For valid programs we should not end up here.
3182 Still if we end up with truly mismatched types here, fall back
3183 to using a MEM_REF to not leak invalid GIMPLE to the following
3184 passes. */
3185 /* Prevent var from being written into SSA form. */
3186 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3187 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3188 DECL_GIMPLE_REG_P (var) = false;
3189 else if (is_gimple_reg_type (TREE_TYPE (var)))
3190 TREE_ADDRESSABLE (var) = true;
3191 use = fold_build2 (MEM_REF, caller_type,
3192 build_fold_addr_expr (var),
3193 build_int_cst (ptr_type_node, 0));
3197 STRIP_USELESS_TYPE_CONVERSION (use);
3199 if (DECL_BY_REFERENCE (result))
3201 TREE_ADDRESSABLE (var) = 1;
3202 var = build_fold_addr_expr (var);
3205 done:
3206 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3207 way, when the RESULT_DECL is encountered, it will be
3208 automatically replaced by the VAR_DECL.
3210 When returning by reference, ensure that RESULT_DECL remaps to
3211 gimple_val. */
3212 if (DECL_BY_REFERENCE (result)
3213 && !is_gimple_val (var))
3215 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3216 insert_decl_map (id, result, temp);
3217 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3218 it's default_def SSA_NAME. */
3219 if (gimple_in_ssa_p (id->src_cfun)
3220 && is_gimple_reg (result))
3222 temp = make_ssa_name (temp, NULL);
3223 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3225 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3227 else
3228 insert_decl_map (id, result, var);
3230 /* Remember this so we can ignore it in remap_decls. */
3231 id->retvar = var;
3233 return use;
3236 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3237 to a local label. */
3239 static tree
3240 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3242 tree node = *nodep;
3243 tree fn = (tree) fnp;
3245 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3246 return node;
3248 if (TYPE_P (node))
3249 *walk_subtrees = 0;
3251 return NULL_TREE;
3254 /* Determine if the function can be copied. If so return NULL. If
3255 not return a string describng the reason for failure. */
3257 static const char *
3258 copy_forbidden (struct function *fun, tree fndecl)
3260 const char *reason = fun->cannot_be_copied_reason;
3261 tree decl;
3262 unsigned ix;
3264 /* Only examine the function once. */
3265 if (fun->cannot_be_copied_set)
3266 return reason;
3268 /* We cannot copy a function that receives a non-local goto
3269 because we cannot remap the destination label used in the
3270 function that is performing the non-local goto. */
3271 /* ??? Actually, this should be possible, if we work at it.
3272 No doubt there's just a handful of places that simply
3273 assume it doesn't happen and don't substitute properly. */
3274 if (fun->has_nonlocal_label)
3276 reason = G_("function %q+F can never be copied "
3277 "because it receives a non-local goto");
3278 goto fail;
3281 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3282 if (TREE_CODE (decl) == VAR_DECL
3283 && TREE_STATIC (decl)
3284 && !DECL_EXTERNAL (decl)
3285 && DECL_INITIAL (decl)
3286 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3287 has_label_address_in_static_1,
3288 fndecl))
3290 reason = G_("function %q+F can never be copied because it saves "
3291 "address of local label in a static variable");
3292 goto fail;
3295 fail:
3296 fun->cannot_be_copied_reason = reason;
3297 fun->cannot_be_copied_set = true;
3298 return reason;
3302 static const char *inline_forbidden_reason;
3304 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3305 iff a function can not be inlined. Also sets the reason why. */
3307 static tree
3308 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3309 struct walk_stmt_info *wip)
3311 tree fn = (tree) wip->info;
3312 tree t;
3313 gimple stmt = gsi_stmt (*gsi);
3315 switch (gimple_code (stmt))
3317 case GIMPLE_CALL:
3318 /* Refuse to inline alloca call unless user explicitly forced so as
3319 this may change program's memory overhead drastically when the
3320 function using alloca is called in loop. In GCC present in
3321 SPEC2000 inlining into schedule_block cause it to require 2GB of
3322 RAM instead of 256MB. Don't do so for alloca calls emitted for
3323 VLA objects as those can't cause unbounded growth (they're always
3324 wrapped inside stack_save/stack_restore regions. */
3325 if (gimple_alloca_call_p (stmt)
3326 && !gimple_call_alloca_for_var_p (stmt)
3327 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3329 inline_forbidden_reason
3330 = G_("function %q+F can never be inlined because it uses "
3331 "alloca (override using the always_inline attribute)");
3332 *handled_ops_p = true;
3333 return fn;
3336 t = gimple_call_fndecl (stmt);
3337 if (t == NULL_TREE)
3338 break;
3340 /* We cannot inline functions that call setjmp. */
3341 if (setjmp_call_p (t))
3343 inline_forbidden_reason
3344 = G_("function %q+F can never be inlined because it uses setjmp");
3345 *handled_ops_p = true;
3346 return t;
3349 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3350 switch (DECL_FUNCTION_CODE (t))
3352 /* We cannot inline functions that take a variable number of
3353 arguments. */
3354 case BUILT_IN_VA_START:
3355 case BUILT_IN_NEXT_ARG:
3356 case BUILT_IN_VA_END:
3357 inline_forbidden_reason
3358 = G_("function %q+F can never be inlined because it "
3359 "uses variable argument lists");
3360 *handled_ops_p = true;
3361 return t;
3363 case BUILT_IN_LONGJMP:
3364 /* We can't inline functions that call __builtin_longjmp at
3365 all. The non-local goto machinery really requires the
3366 destination be in a different function. If we allow the
3367 function calling __builtin_longjmp to be inlined into the
3368 function calling __builtin_setjmp, Things will Go Awry. */
3369 inline_forbidden_reason
3370 = G_("function %q+F can never be inlined because "
3371 "it uses setjmp-longjmp exception handling");
3372 *handled_ops_p = true;
3373 return t;
3375 case BUILT_IN_NONLOCAL_GOTO:
3376 /* Similarly. */
3377 inline_forbidden_reason
3378 = G_("function %q+F can never be inlined because "
3379 "it uses non-local goto");
3380 *handled_ops_p = true;
3381 return t;
3383 case BUILT_IN_RETURN:
3384 case BUILT_IN_APPLY_ARGS:
3385 /* If a __builtin_apply_args caller would be inlined,
3386 it would be saving arguments of the function it has
3387 been inlined into. Similarly __builtin_return would
3388 return from the function the inline has been inlined into. */
3389 inline_forbidden_reason
3390 = G_("function %q+F can never be inlined because "
3391 "it uses __builtin_return or __builtin_apply_args");
3392 *handled_ops_p = true;
3393 return t;
3395 default:
3396 break;
3398 break;
3400 case GIMPLE_GOTO:
3401 t = gimple_goto_dest (stmt);
3403 /* We will not inline a function which uses computed goto. The
3404 addresses of its local labels, which may be tucked into
3405 global storage, are of course not constant across
3406 instantiations, which causes unexpected behavior. */
3407 if (TREE_CODE (t) != LABEL_DECL)
3409 inline_forbidden_reason
3410 = G_("function %q+F can never be inlined "
3411 "because it contains a computed goto");
3412 *handled_ops_p = true;
3413 return t;
3415 break;
3417 default:
3418 break;
3421 *handled_ops_p = false;
3422 return NULL_TREE;
3425 /* Return true if FNDECL is a function that cannot be inlined into
3426 another one. */
3428 static bool
3429 inline_forbidden_p (tree fndecl)
3431 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3432 struct walk_stmt_info wi;
3433 struct pointer_set_t *visited_nodes;
3434 basic_block bb;
3435 bool forbidden_p = false;
3437 /* First check for shared reasons not to copy the code. */
3438 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3439 if (inline_forbidden_reason != NULL)
3440 return true;
3442 /* Next, walk the statements of the function looking for
3443 constraucts we can't handle, or are non-optimal for inlining. */
3444 visited_nodes = pointer_set_create ();
3445 memset (&wi, 0, sizeof (wi));
3446 wi.info = (void *) fndecl;
3447 wi.pset = visited_nodes;
3449 FOR_EACH_BB_FN (bb, fun)
3451 gimple ret;
3452 gimple_seq seq = bb_seq (bb);
3453 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3454 forbidden_p = (ret != NULL);
3455 if (forbidden_p)
3456 break;
3459 pointer_set_destroy (visited_nodes);
3460 return forbidden_p;
3463 /* Return false if the function FNDECL cannot be inlined on account of its
3464 attributes, true otherwise. */
3465 static bool
3466 function_attribute_inlinable_p (const_tree fndecl)
3468 if (targetm.attribute_table)
3470 const_tree a;
3472 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3474 const_tree name = TREE_PURPOSE (a);
3475 int i;
3477 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3478 if (is_attribute_p (targetm.attribute_table[i].name, name))
3479 return targetm.function_attribute_inlinable_p (fndecl);
3483 return true;
3486 /* Returns nonzero if FN is a function that does not have any
3487 fundamental inline blocking properties. */
3489 bool
3490 tree_inlinable_function_p (tree fn)
3492 bool inlinable = true;
3493 bool do_warning;
3494 tree always_inline;
3496 /* If we've already decided this function shouldn't be inlined,
3497 there's no need to check again. */
3498 if (DECL_UNINLINABLE (fn))
3499 return false;
3501 /* We only warn for functions declared `inline' by the user. */
3502 do_warning = (warn_inline
3503 && DECL_DECLARED_INLINE_P (fn)
3504 && !DECL_NO_INLINE_WARNING_P (fn)
3505 && !DECL_IN_SYSTEM_HEADER (fn));
3507 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3509 if (flag_no_inline
3510 && always_inline == NULL)
3512 if (do_warning)
3513 warning (OPT_Winline, "function %q+F can never be inlined because it "
3514 "is suppressed using -fno-inline", fn);
3515 inlinable = false;
3518 else if (!function_attribute_inlinable_p (fn))
3520 if (do_warning)
3521 warning (OPT_Winline, "function %q+F can never be inlined because it "
3522 "uses attributes conflicting with inlining", fn);
3523 inlinable = false;
3526 else if (inline_forbidden_p (fn))
3528 /* See if we should warn about uninlinable functions. Previously,
3529 some of these warnings would be issued while trying to expand
3530 the function inline, but that would cause multiple warnings
3531 about functions that would for example call alloca. But since
3532 this a property of the function, just one warning is enough.
3533 As a bonus we can now give more details about the reason why a
3534 function is not inlinable. */
3535 if (always_inline)
3536 error (inline_forbidden_reason, fn);
3537 else if (do_warning)
3538 warning (OPT_Winline, inline_forbidden_reason, fn);
3540 inlinable = false;
3543 /* Squirrel away the result so that we don't have to check again. */
3544 DECL_UNINLINABLE (fn) = !inlinable;
3546 return inlinable;
3549 /* Estimate the cost of a memory move. Use machine dependent
3550 word size and take possible memcpy call into account. */
3553 estimate_move_cost (tree type)
3555 HOST_WIDE_INT size;
3557 gcc_assert (!VOID_TYPE_P (type));
3559 if (TREE_CODE (type) == VECTOR_TYPE)
3561 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3562 enum machine_mode simd
3563 = targetm.vectorize.preferred_simd_mode (inner);
3564 int simd_mode_size = GET_MODE_SIZE (simd);
3565 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3566 / simd_mode_size);
3569 size = int_size_in_bytes (type);
3571 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3572 /* Cost of a memcpy call, 3 arguments and the call. */
3573 return 4;
3574 else
3575 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3578 /* Returns cost of operation CODE, according to WEIGHTS */
3580 static int
3581 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3582 tree op1 ATTRIBUTE_UNUSED, tree op2)
3584 switch (code)
3586 /* These are "free" conversions, or their presumed cost
3587 is folded into other operations. */
3588 case RANGE_EXPR:
3589 CASE_CONVERT:
3590 case COMPLEX_EXPR:
3591 case PAREN_EXPR:
3592 case VIEW_CONVERT_EXPR:
3593 return 0;
3595 /* Assign cost of 1 to usual operations.
3596 ??? We may consider mapping RTL costs to this. */
3597 case COND_EXPR:
3598 case VEC_COND_EXPR:
3599 case VEC_PERM_EXPR:
3601 case PLUS_EXPR:
3602 case POINTER_PLUS_EXPR:
3603 case MINUS_EXPR:
3604 case MULT_EXPR:
3605 case MULT_HIGHPART_EXPR:
3606 case FMA_EXPR:
3608 case ADDR_SPACE_CONVERT_EXPR:
3609 case FIXED_CONVERT_EXPR:
3610 case FIX_TRUNC_EXPR:
3612 case NEGATE_EXPR:
3613 case FLOAT_EXPR:
3614 case MIN_EXPR:
3615 case MAX_EXPR:
3616 case ABS_EXPR:
3618 case LSHIFT_EXPR:
3619 case RSHIFT_EXPR:
3620 case LROTATE_EXPR:
3621 case RROTATE_EXPR:
3622 case VEC_LSHIFT_EXPR:
3623 case VEC_RSHIFT_EXPR:
3625 case BIT_IOR_EXPR:
3626 case BIT_XOR_EXPR:
3627 case BIT_AND_EXPR:
3628 case BIT_NOT_EXPR:
3630 case TRUTH_ANDIF_EXPR:
3631 case TRUTH_ORIF_EXPR:
3632 case TRUTH_AND_EXPR:
3633 case TRUTH_OR_EXPR:
3634 case TRUTH_XOR_EXPR:
3635 case TRUTH_NOT_EXPR:
3637 case LT_EXPR:
3638 case LE_EXPR:
3639 case GT_EXPR:
3640 case GE_EXPR:
3641 case EQ_EXPR:
3642 case NE_EXPR:
3643 case ORDERED_EXPR:
3644 case UNORDERED_EXPR:
3646 case UNLT_EXPR:
3647 case UNLE_EXPR:
3648 case UNGT_EXPR:
3649 case UNGE_EXPR:
3650 case UNEQ_EXPR:
3651 case LTGT_EXPR:
3653 case CONJ_EXPR:
3655 case PREDECREMENT_EXPR:
3656 case PREINCREMENT_EXPR:
3657 case POSTDECREMENT_EXPR:
3658 case POSTINCREMENT_EXPR:
3660 case REALIGN_LOAD_EXPR:
3662 case REDUC_MAX_EXPR:
3663 case REDUC_MIN_EXPR:
3664 case REDUC_PLUS_EXPR:
3665 case WIDEN_SUM_EXPR:
3666 case WIDEN_MULT_EXPR:
3667 case DOT_PROD_EXPR:
3668 case WIDEN_MULT_PLUS_EXPR:
3669 case WIDEN_MULT_MINUS_EXPR:
3670 case WIDEN_LSHIFT_EXPR:
3672 case VEC_WIDEN_MULT_HI_EXPR:
3673 case VEC_WIDEN_MULT_LO_EXPR:
3674 case VEC_WIDEN_MULT_EVEN_EXPR:
3675 case VEC_WIDEN_MULT_ODD_EXPR:
3676 case VEC_UNPACK_HI_EXPR:
3677 case VEC_UNPACK_LO_EXPR:
3678 case VEC_UNPACK_FLOAT_HI_EXPR:
3679 case VEC_UNPACK_FLOAT_LO_EXPR:
3680 case VEC_PACK_TRUNC_EXPR:
3681 case VEC_PACK_SAT_EXPR:
3682 case VEC_PACK_FIX_TRUNC_EXPR:
3683 case VEC_WIDEN_LSHIFT_HI_EXPR:
3684 case VEC_WIDEN_LSHIFT_LO_EXPR:
3686 return 1;
3688 /* Few special cases of expensive operations. This is useful
3689 to avoid inlining on functions having too many of these. */
3690 case TRUNC_DIV_EXPR:
3691 case CEIL_DIV_EXPR:
3692 case FLOOR_DIV_EXPR:
3693 case ROUND_DIV_EXPR:
3694 case EXACT_DIV_EXPR:
3695 case TRUNC_MOD_EXPR:
3696 case CEIL_MOD_EXPR:
3697 case FLOOR_MOD_EXPR:
3698 case ROUND_MOD_EXPR:
3699 case RDIV_EXPR:
3700 if (TREE_CODE (op2) != INTEGER_CST)
3701 return weights->div_mod_cost;
3702 return 1;
3704 default:
3705 /* We expect a copy assignment with no operator. */
3706 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3707 return 0;
3712 /* Estimate number of instructions that will be created by expanding
3713 the statements in the statement sequence STMTS.
3714 WEIGHTS contains weights attributed to various constructs. */
3716 static
3717 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3719 int cost;
3720 gimple_stmt_iterator gsi;
3722 cost = 0;
3723 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3724 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3726 return cost;
3730 /* Estimate number of instructions that will be created by expanding STMT.
3731 WEIGHTS contains weights attributed to various constructs. */
3734 estimate_num_insns (gimple stmt, eni_weights *weights)
3736 unsigned cost, i;
3737 enum gimple_code code = gimple_code (stmt);
3738 tree lhs;
3739 tree rhs;
3741 switch (code)
3743 case GIMPLE_ASSIGN:
3744 /* Try to estimate the cost of assignments. We have three cases to
3745 deal with:
3746 1) Simple assignments to registers;
3747 2) Stores to things that must live in memory. This includes
3748 "normal" stores to scalars, but also assignments of large
3749 structures, or constructors of big arrays;
3751 Let us look at the first two cases, assuming we have "a = b + C":
3752 <GIMPLE_ASSIGN <var_decl "a">
3753 <plus_expr <var_decl "b"> <constant C>>
3754 If "a" is a GIMPLE register, the assignment to it is free on almost
3755 any target, because "a" usually ends up in a real register. Hence
3756 the only cost of this expression comes from the PLUS_EXPR, and we
3757 can ignore the GIMPLE_ASSIGN.
3758 If "a" is not a GIMPLE register, the assignment to "a" will most
3759 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3760 of moving something into "a", which we compute using the function
3761 estimate_move_cost. */
3762 if (gimple_clobber_p (stmt))
3763 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3765 lhs = gimple_assign_lhs (stmt);
3766 rhs = gimple_assign_rhs1 (stmt);
3768 cost = 0;
3770 /* Account for the cost of moving to / from memory. */
3771 if (gimple_store_p (stmt))
3772 cost += estimate_move_cost (TREE_TYPE (lhs));
3773 if (gimple_assign_load_p (stmt))
3774 cost += estimate_move_cost (TREE_TYPE (rhs));
3776 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3777 gimple_assign_rhs1 (stmt),
3778 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3779 == GIMPLE_BINARY_RHS
3780 ? gimple_assign_rhs2 (stmt) : NULL);
3781 break;
3783 case GIMPLE_COND:
3784 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3785 gimple_op (stmt, 0),
3786 gimple_op (stmt, 1));
3787 break;
3789 case GIMPLE_SWITCH:
3790 /* Take into account cost of the switch + guess 2 conditional jumps for
3791 each case label.
3793 TODO: once the switch expansion logic is sufficiently separated, we can
3794 do better job on estimating cost of the switch. */
3795 if (weights->time_based)
3796 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3797 else
3798 cost = gimple_switch_num_labels (stmt) * 2;
3799 break;
3801 case GIMPLE_CALL:
3803 tree decl;
3804 struct cgraph_node *node = NULL;
3806 /* Do not special case builtins where we see the body.
3807 This just confuse inliner. */
3808 if (gimple_call_internal_p (stmt))
3809 return 0;
3810 else if (!(decl = gimple_call_fndecl (stmt))
3811 || !(node = cgraph_get_node (decl))
3812 || node->definition)
3814 /* For buitins that are likely expanded to nothing or
3815 inlined do not account operand costs. */
3816 else if (is_simple_builtin (decl))
3817 return 0;
3818 else if (is_inexpensive_builtin (decl))
3819 return weights->target_builtin_call_cost;
3820 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3822 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3823 specialize the cheap expansion we do here.
3824 ??? This asks for a more general solution. */
3825 switch (DECL_FUNCTION_CODE (decl))
3827 case BUILT_IN_POW:
3828 case BUILT_IN_POWF:
3829 case BUILT_IN_POWL:
3830 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3831 && REAL_VALUES_EQUAL
3832 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3833 return estimate_operator_cost (MULT_EXPR, weights,
3834 gimple_call_arg (stmt, 0),
3835 gimple_call_arg (stmt, 0));
3836 break;
3838 default:
3839 break;
3843 cost = node ? weights->call_cost : weights->indirect_call_cost;
3844 if (gimple_call_lhs (stmt))
3845 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3846 for (i = 0; i < gimple_call_num_args (stmt); i++)
3848 tree arg = gimple_call_arg (stmt, i);
3849 cost += estimate_move_cost (TREE_TYPE (arg));
3851 break;
3854 case GIMPLE_RETURN:
3855 return weights->return_cost;
3857 case GIMPLE_GOTO:
3858 case GIMPLE_LABEL:
3859 case GIMPLE_NOP:
3860 case GIMPLE_PHI:
3861 case GIMPLE_PREDICT:
3862 case GIMPLE_DEBUG:
3863 return 0;
3865 case GIMPLE_ASM:
3867 int count = asm_str_count (gimple_asm_string (stmt));
3868 /* 1000 means infinity. This avoids overflows later
3869 with very long asm statements. */
3870 if (count > 1000)
3871 count = 1000;
3872 return count;
3875 case GIMPLE_RESX:
3876 /* This is either going to be an external function call with one
3877 argument, or two register copy statements plus a goto. */
3878 return 2;
3880 case GIMPLE_EH_DISPATCH:
3881 /* ??? This is going to turn into a switch statement. Ideally
3882 we'd have a look at the eh region and estimate the number of
3883 edges involved. */
3884 return 10;
3886 case GIMPLE_BIND:
3887 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3889 case GIMPLE_EH_FILTER:
3890 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3892 case GIMPLE_CATCH:
3893 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3895 case GIMPLE_TRY:
3896 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3897 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3899 /* OpenMP directives are generally very expensive. */
3901 case GIMPLE_OMP_RETURN:
3902 case GIMPLE_OMP_SECTIONS_SWITCH:
3903 case GIMPLE_OMP_ATOMIC_STORE:
3904 case GIMPLE_OMP_CONTINUE:
3905 /* ...except these, which are cheap. */
3906 return 0;
3908 case GIMPLE_OMP_ATOMIC_LOAD:
3909 return weights->omp_cost;
3911 case GIMPLE_OMP_FOR:
3912 return (weights->omp_cost
3913 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3914 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3916 case GIMPLE_OMP_PARALLEL:
3917 case GIMPLE_OMP_TASK:
3918 case GIMPLE_OMP_CRITICAL:
3919 case GIMPLE_OMP_MASTER:
3920 case GIMPLE_OMP_TASKGROUP:
3921 case GIMPLE_OMP_ORDERED:
3922 case GIMPLE_OMP_SECTION:
3923 case GIMPLE_OMP_SECTIONS:
3924 case GIMPLE_OMP_SINGLE:
3925 case GIMPLE_OMP_TARGET:
3926 case GIMPLE_OMP_TEAMS:
3927 return (weights->omp_cost
3928 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3930 case GIMPLE_TRANSACTION:
3931 return (weights->tm_cost
3932 + estimate_num_insns_seq (gimple_transaction_body (stmt),
3933 weights));
3935 default:
3936 gcc_unreachable ();
3939 return cost;
3942 /* Estimate number of instructions that will be created by expanding
3943 function FNDECL. WEIGHTS contains weights attributed to various
3944 constructs. */
3947 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3949 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3950 gimple_stmt_iterator bsi;
3951 basic_block bb;
3952 int n = 0;
3954 gcc_assert (my_function && my_function->cfg);
3955 FOR_EACH_BB_FN (bb, my_function)
3957 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3958 n += estimate_num_insns (gsi_stmt (bsi), weights);
3961 return n;
3965 /* Initializes weights used by estimate_num_insns. */
3967 void
3968 init_inline_once (void)
3970 eni_size_weights.call_cost = 1;
3971 eni_size_weights.indirect_call_cost = 3;
3972 eni_size_weights.target_builtin_call_cost = 1;
3973 eni_size_weights.div_mod_cost = 1;
3974 eni_size_weights.omp_cost = 40;
3975 eni_size_weights.tm_cost = 10;
3976 eni_size_weights.time_based = false;
3977 eni_size_weights.return_cost = 1;
3979 /* Estimating time for call is difficult, since we have no idea what the
3980 called function does. In the current uses of eni_time_weights,
3981 underestimating the cost does less harm than overestimating it, so
3982 we choose a rather small value here. */
3983 eni_time_weights.call_cost = 10;
3984 eni_time_weights.indirect_call_cost = 15;
3985 eni_time_weights.target_builtin_call_cost = 1;
3986 eni_time_weights.div_mod_cost = 10;
3987 eni_time_weights.omp_cost = 40;
3988 eni_time_weights.tm_cost = 40;
3989 eni_time_weights.time_based = true;
3990 eni_time_weights.return_cost = 2;
3993 /* Estimate the number of instructions in a gimple_seq. */
3996 count_insns_seq (gimple_seq seq, eni_weights *weights)
3998 gimple_stmt_iterator gsi;
3999 int n = 0;
4000 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
4001 n += estimate_num_insns (gsi_stmt (gsi), weights);
4003 return n;
4007 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4009 static void
4010 prepend_lexical_block (tree current_block, tree new_block)
4012 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4013 BLOCK_SUBBLOCKS (current_block) = new_block;
4014 BLOCK_SUPERCONTEXT (new_block) = current_block;
4017 /* Add local variables from CALLEE to CALLER. */
4019 static inline void
4020 add_local_variables (struct function *callee, struct function *caller,
4021 copy_body_data *id)
4023 tree var;
4024 unsigned ix;
4026 FOR_EACH_LOCAL_DECL (callee, ix, var)
4027 if (!can_be_nonlocal (var, id))
4029 tree new_var = remap_decl (var, id);
4031 /* Remap debug-expressions. */
4032 if (TREE_CODE (new_var) == VAR_DECL
4033 && DECL_HAS_DEBUG_EXPR_P (var)
4034 && new_var != var)
4036 tree tem = DECL_DEBUG_EXPR (var);
4037 bool old_regimplify = id->regimplify;
4038 id->remapping_type_depth++;
4039 walk_tree (&tem, copy_tree_body_r, id, NULL);
4040 id->remapping_type_depth--;
4041 id->regimplify = old_regimplify;
4042 SET_DECL_DEBUG_EXPR (new_var, tem);
4043 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4045 add_local_decl (caller, new_var);
4049 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4051 static bool
4052 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
4054 tree use_retvar;
4055 tree fn;
4056 struct pointer_map_t *st, *dst;
4057 tree return_slot;
4058 tree modify_dest;
4059 location_t saved_location;
4060 struct cgraph_edge *cg_edge;
4061 cgraph_inline_failed_t reason;
4062 basic_block return_block;
4063 edge e;
4064 gimple_stmt_iterator gsi, stmt_gsi;
4065 bool successfully_inlined = FALSE;
4066 bool purge_dead_abnormal_edges;
4068 /* Set input_location here so we get the right instantiation context
4069 if we call instantiate_decl from inlinable_function_p. */
4070 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4071 saved_location = input_location;
4072 input_location = gimple_location (stmt);
4074 /* From here on, we're only interested in CALL_EXPRs. */
4075 if (gimple_code (stmt) != GIMPLE_CALL)
4076 goto egress;
4078 cg_edge = cgraph_edge (id->dst_node, stmt);
4079 gcc_checking_assert (cg_edge);
4080 /* First, see if we can figure out what function is being called.
4081 If we cannot, then there is no hope of inlining the function. */
4082 if (cg_edge->indirect_unknown_callee)
4083 goto egress;
4084 fn = cg_edge->callee->decl;
4085 gcc_checking_assert (fn);
4087 /* If FN is a declaration of a function in a nested scope that was
4088 globally declared inline, we don't set its DECL_INITIAL.
4089 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4090 C++ front-end uses it for cdtors to refer to their internal
4091 declarations, that are not real functions. Fortunately those
4092 don't have trees to be saved, so we can tell by checking their
4093 gimple_body. */
4094 if (!DECL_INITIAL (fn)
4095 && DECL_ABSTRACT_ORIGIN (fn)
4096 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4097 fn = DECL_ABSTRACT_ORIGIN (fn);
4099 /* Don't try to inline functions that are not well-suited to inlining. */
4100 if (cg_edge->inline_failed)
4102 reason = cg_edge->inline_failed;
4103 /* If this call was originally indirect, we do not want to emit any
4104 inlining related warnings or sorry messages because there are no
4105 guarantees regarding those. */
4106 if (cg_edge->indirect_inlining_edge)
4107 goto egress;
4109 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4110 /* For extern inline functions that get redefined we always
4111 silently ignored always_inline flag. Better behaviour would
4112 be to be able to keep both bodies and use extern inline body
4113 for inlining, but we can't do that because frontends overwrite
4114 the body. */
4115 && !cg_edge->callee->local.redefined_extern_inline
4116 /* During early inline pass, report only when optimization is
4117 not turned on. */
4118 && (cgraph_global_info_ready
4119 || !optimize
4120 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4121 /* PR 20090218-1_0.c. Body can be provided by another module. */
4122 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4124 error ("inlining failed in call to always_inline %q+F: %s", fn,
4125 cgraph_inline_failed_string (reason));
4126 error ("called from here");
4128 else if (warn_inline
4129 && DECL_DECLARED_INLINE_P (fn)
4130 && !DECL_NO_INLINE_WARNING_P (fn)
4131 && !DECL_IN_SYSTEM_HEADER (fn)
4132 && reason != CIF_UNSPECIFIED
4133 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4134 /* Do not warn about not inlined recursive calls. */
4135 && !cgraph_edge_recursive_p (cg_edge)
4136 /* Avoid warnings during early inline pass. */
4137 && cgraph_global_info_ready)
4139 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4140 fn, _(cgraph_inline_failed_string (reason)));
4141 warning (OPT_Winline, "called from here");
4143 goto egress;
4145 fn = cg_edge->callee->decl;
4146 cgraph_get_body (cg_edge->callee);
4148 #ifdef ENABLE_CHECKING
4149 if (cg_edge->callee->decl != id->dst_node->decl)
4150 verify_cgraph_node (cg_edge->callee);
4151 #endif
4153 /* We will be inlining this callee. */
4154 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4156 /* Update the callers EH personality. */
4157 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4158 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4159 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4161 /* Split the block holding the GIMPLE_CALL. */
4162 e = split_block (bb, stmt);
4163 bb = e->src;
4164 return_block = e->dest;
4165 remove_edge (e);
4167 /* split_block splits after the statement; work around this by
4168 moving the call into the second block manually. Not pretty,
4169 but seems easier than doing the CFG manipulation by hand
4170 when the GIMPLE_CALL is in the last statement of BB. */
4171 stmt_gsi = gsi_last_bb (bb);
4172 gsi_remove (&stmt_gsi, false);
4174 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4175 been the source of abnormal edges. In this case, schedule
4176 the removal of dead abnormal edges. */
4177 gsi = gsi_start_bb (return_block);
4178 if (gsi_end_p (gsi))
4180 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4181 purge_dead_abnormal_edges = true;
4183 else
4185 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4186 purge_dead_abnormal_edges = false;
4189 stmt_gsi = gsi_start_bb (return_block);
4191 /* Build a block containing code to initialize the arguments, the
4192 actual inline expansion of the body, and a label for the return
4193 statements within the function to jump to. The type of the
4194 statement expression is the return type of the function call.
4195 ??? If the call does not have an associated block then we will
4196 remap all callee blocks to NULL, effectively dropping most of
4197 its debug information. This should only happen for calls to
4198 artificial decls inserted by the compiler itself. We need to
4199 either link the inlined blocks into the caller block tree or
4200 not refer to them in any way to not break GC for locations. */
4201 if (gimple_block (stmt))
4203 id->block = make_node (BLOCK);
4204 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4205 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4206 prepend_lexical_block (gimple_block (stmt), id->block);
4209 /* Local declarations will be replaced by their equivalents in this
4210 map. */
4211 st = id->decl_map;
4212 id->decl_map = pointer_map_create ();
4213 dst = id->debug_map;
4214 id->debug_map = NULL;
4216 /* Record the function we are about to inline. */
4217 id->src_fn = fn;
4218 id->src_node = cg_edge->callee;
4219 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4220 id->gimple_call = stmt;
4222 gcc_assert (!id->src_cfun->after_inlining);
4224 id->entry_bb = bb;
4225 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4227 gimple_stmt_iterator si = gsi_last_bb (bb);
4228 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4229 NOT_TAKEN),
4230 GSI_NEW_STMT);
4232 initialize_inlined_parameters (id, stmt, fn, bb);
4234 if (DECL_INITIAL (fn))
4236 if (gimple_block (stmt))
4238 tree *var;
4240 prepend_lexical_block (id->block,
4241 remap_blocks (DECL_INITIAL (fn), id));
4242 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4243 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4244 == NULL_TREE));
4245 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4246 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4247 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4248 under it. The parameters can be then evaluated in the debugger,
4249 but don't show in backtraces. */
4250 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4251 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4253 tree v = *var;
4254 *var = TREE_CHAIN (v);
4255 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4256 BLOCK_VARS (id->block) = v;
4258 else
4259 var = &TREE_CHAIN (*var);
4261 else
4262 remap_blocks_to_null (DECL_INITIAL (fn), id);
4265 /* Return statements in the function body will be replaced by jumps
4266 to the RET_LABEL. */
4267 gcc_assert (DECL_INITIAL (fn));
4268 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4270 /* Find the LHS to which the result of this call is assigned. */
4271 return_slot = NULL;
4272 if (gimple_call_lhs (stmt))
4274 modify_dest = gimple_call_lhs (stmt);
4276 /* The function which we are inlining might not return a value,
4277 in which case we should issue a warning that the function
4278 does not return a value. In that case the optimizers will
4279 see that the variable to which the value is assigned was not
4280 initialized. We do not want to issue a warning about that
4281 uninitialized variable. */
4282 if (DECL_P (modify_dest))
4283 TREE_NO_WARNING (modify_dest) = 1;
4285 if (gimple_call_return_slot_opt_p (stmt))
4287 return_slot = modify_dest;
4288 modify_dest = NULL;
4291 else
4292 modify_dest = NULL;
4294 /* If we are inlining a call to the C++ operator new, we don't want
4295 to use type based alias analysis on the return value. Otherwise
4296 we may get confused if the compiler sees that the inlined new
4297 function returns a pointer which was just deleted. See bug
4298 33407. */
4299 if (DECL_IS_OPERATOR_NEW (fn))
4301 return_slot = NULL;
4302 modify_dest = NULL;
4305 /* Declare the return variable for the function. */
4306 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4308 /* Add local vars in this inlined callee to caller. */
4309 add_local_variables (id->src_cfun, cfun, id);
4311 if (dump_file && (dump_flags & TDF_DETAILS))
4313 fprintf (dump_file, "Inlining ");
4314 print_generic_expr (dump_file, id->src_fn, 0);
4315 fprintf (dump_file, " to ");
4316 print_generic_expr (dump_file, id->dst_fn, 0);
4317 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4320 /* This is it. Duplicate the callee body. Assume callee is
4321 pre-gimplified. Note that we must not alter the caller
4322 function in any way before this point, as this CALL_EXPR may be
4323 a self-referential call; if we're calling ourselves, we need to
4324 duplicate our body before altering anything. */
4325 copy_body (id, bb->count,
4326 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4327 bb, return_block, NULL);
4329 /* Reset the escaped solution. */
4330 if (cfun->gimple_df)
4331 pt_solution_reset (&cfun->gimple_df->escaped);
4333 /* Clean up. */
4334 if (id->debug_map)
4336 pointer_map_destroy (id->debug_map);
4337 id->debug_map = dst;
4339 pointer_map_destroy (id->decl_map);
4340 id->decl_map = st;
4342 /* Unlink the calls virtual operands before replacing it. */
4343 unlink_stmt_vdef (stmt);
4345 /* If the inlined function returns a result that we care about,
4346 substitute the GIMPLE_CALL with an assignment of the return
4347 variable to the LHS of the call. That is, if STMT was
4348 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4349 if (use_retvar && gimple_call_lhs (stmt))
4351 gimple old_stmt = stmt;
4352 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4353 gsi_replace (&stmt_gsi, stmt, false);
4354 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4356 else
4358 /* Handle the case of inlining a function with no return
4359 statement, which causes the return value to become undefined. */
4360 if (gimple_call_lhs (stmt)
4361 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4363 tree name = gimple_call_lhs (stmt);
4364 tree var = SSA_NAME_VAR (name);
4365 tree def = ssa_default_def (cfun, var);
4367 if (def)
4369 /* If the variable is used undefined, make this name
4370 undefined via a move. */
4371 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4372 gsi_replace (&stmt_gsi, stmt, true);
4374 else
4376 /* Otherwise make this variable undefined. */
4377 gsi_remove (&stmt_gsi, true);
4378 set_ssa_default_def (cfun, var, name);
4379 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4382 else
4383 gsi_remove (&stmt_gsi, true);
4386 if (purge_dead_abnormal_edges)
4388 gimple_purge_dead_eh_edges (return_block);
4389 gimple_purge_dead_abnormal_call_edges (return_block);
4392 /* If the value of the new expression is ignored, that's OK. We
4393 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4394 the equivalent inlined version either. */
4395 if (is_gimple_assign (stmt))
4397 gcc_assert (gimple_assign_single_p (stmt)
4398 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4399 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4402 /* Output the inlining info for this abstract function, since it has been
4403 inlined. If we don't do this now, we can lose the information about the
4404 variables in the function when the blocks get blown away as soon as we
4405 remove the cgraph node. */
4406 if (gimple_block (stmt))
4407 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4409 /* Update callgraph if needed. */
4410 cgraph_remove_node (cg_edge->callee);
4412 id->block = NULL_TREE;
4413 successfully_inlined = TRUE;
4415 egress:
4416 input_location = saved_location;
4417 return successfully_inlined;
4420 /* Expand call statements reachable from STMT_P.
4421 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4422 in a MODIFY_EXPR. */
4424 static bool
4425 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4427 gimple_stmt_iterator gsi;
4429 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4431 gimple stmt = gsi_stmt (gsi);
4433 if (is_gimple_call (stmt)
4434 && !gimple_call_internal_p (stmt)
4435 && expand_call_inline (bb, stmt, id))
4436 return true;
4439 return false;
4443 /* Walk all basic blocks created after FIRST and try to fold every statement
4444 in the STATEMENTS pointer set. */
4446 static void
4447 fold_marked_statements (int first, struct pointer_set_t *statements)
4449 for (; first < n_basic_blocks_for_fn (cfun); first++)
4450 if (BASIC_BLOCK_FOR_FN (cfun, first))
4452 gimple_stmt_iterator gsi;
4454 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4455 !gsi_end_p (gsi);
4456 gsi_next (&gsi))
4457 if (pointer_set_contains (statements, gsi_stmt (gsi)))
4459 gimple old_stmt = gsi_stmt (gsi);
4460 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4462 if (old_decl && DECL_BUILT_IN (old_decl))
4464 /* Folding builtins can create multiple instructions,
4465 we need to look at all of them. */
4466 gimple_stmt_iterator i2 = gsi;
4467 gsi_prev (&i2);
4468 if (fold_stmt (&gsi))
4470 gimple new_stmt;
4471 /* If a builtin at the end of a bb folded into nothing,
4472 the following loop won't work. */
4473 if (gsi_end_p (gsi))
4475 cgraph_update_edges_for_call_stmt (old_stmt,
4476 old_decl, NULL);
4477 break;
4479 if (gsi_end_p (i2))
4480 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4481 else
4482 gsi_next (&i2);
4483 while (1)
4485 new_stmt = gsi_stmt (i2);
4486 update_stmt (new_stmt);
4487 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4488 new_stmt);
4490 if (new_stmt == gsi_stmt (gsi))
4492 /* It is okay to check only for the very last
4493 of these statements. If it is a throwing
4494 statement nothing will change. If it isn't
4495 this can remove EH edges. If that weren't
4496 correct then because some intermediate stmts
4497 throw, but not the last one. That would mean
4498 we'd have to split the block, which we can't
4499 here and we'd loose anyway. And as builtins
4500 probably never throw, this all
4501 is mood anyway. */
4502 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4503 new_stmt))
4504 gimple_purge_dead_eh_edges (
4505 BASIC_BLOCK_FOR_FN (cfun, first));
4506 break;
4508 gsi_next (&i2);
4512 else if (fold_stmt (&gsi))
4514 /* Re-read the statement from GSI as fold_stmt() may
4515 have changed it. */
4516 gimple new_stmt = gsi_stmt (gsi);
4517 update_stmt (new_stmt);
4519 if (is_gimple_call (old_stmt)
4520 || is_gimple_call (new_stmt))
4521 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4522 new_stmt);
4524 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4525 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4526 first));
4532 /* Expand calls to inline functions in the body of FN. */
4534 unsigned int
4535 optimize_inline_calls (tree fn)
4537 copy_body_data id;
4538 basic_block bb;
4539 int last = n_basic_blocks_for_fn (cfun);
4540 bool inlined_p = false;
4542 /* Clear out ID. */
4543 memset (&id, 0, sizeof (id));
4545 id.src_node = id.dst_node = cgraph_get_node (fn);
4546 gcc_assert (id.dst_node->definition);
4547 id.dst_fn = fn;
4548 /* Or any functions that aren't finished yet. */
4549 if (current_function_decl)
4550 id.dst_fn = current_function_decl;
4552 id.copy_decl = copy_decl_maybe_to_var;
4553 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4554 id.transform_new_cfg = false;
4555 id.transform_return_to_modify = true;
4556 id.transform_parameter = true;
4557 id.transform_lang_insert_block = NULL;
4558 id.statements_to_fold = pointer_set_create ();
4560 push_gimplify_context ();
4562 /* We make no attempts to keep dominance info up-to-date. */
4563 free_dominance_info (CDI_DOMINATORS);
4564 free_dominance_info (CDI_POST_DOMINATORS);
4566 /* Register specific gimple functions. */
4567 gimple_register_cfg_hooks ();
4569 /* Reach the trees by walking over the CFG, and note the
4570 enclosing basic-blocks in the call edges. */
4571 /* We walk the blocks going forward, because inlined function bodies
4572 will split id->current_basic_block, and the new blocks will
4573 follow it; we'll trudge through them, processing their CALL_EXPRs
4574 along the way. */
4575 FOR_EACH_BB_FN (bb, cfun)
4576 inlined_p |= gimple_expand_calls_inline (bb, &id);
4578 pop_gimplify_context (NULL);
4580 #ifdef ENABLE_CHECKING
4582 struct cgraph_edge *e;
4584 verify_cgraph_node (id.dst_node);
4586 /* Double check that we inlined everything we are supposed to inline. */
4587 for (e = id.dst_node->callees; e; e = e->next_callee)
4588 gcc_assert (e->inline_failed);
4590 #endif
4592 /* Fold queued statements. */
4593 fold_marked_statements (last, id.statements_to_fold);
4594 pointer_set_destroy (id.statements_to_fold);
4596 gcc_assert (!id.debug_stmts.exists ());
4598 /* If we didn't inline into the function there is nothing to do. */
4599 if (!inlined_p)
4600 return 0;
4602 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4603 number_blocks (fn);
4605 delete_unreachable_blocks_update_callgraph (&id);
4606 #ifdef ENABLE_CHECKING
4607 verify_cgraph_node (id.dst_node);
4608 #endif
4610 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4611 not possible yet - the IPA passes might make various functions to not
4612 throw and they don't care to proactively update local EH info. This is
4613 done later in fixup_cfg pass that also execute the verification. */
4614 return (TODO_update_ssa
4615 | TODO_cleanup_cfg
4616 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4617 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4618 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
4619 ? TODO_rebuild_frequencies : 0));
4622 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4624 tree
4625 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4627 enum tree_code code = TREE_CODE (*tp);
4628 enum tree_code_class cl = TREE_CODE_CLASS (code);
4630 /* We make copies of most nodes. */
4631 if (IS_EXPR_CODE_CLASS (cl)
4632 || code == TREE_LIST
4633 || code == TREE_VEC
4634 || code == TYPE_DECL
4635 || code == OMP_CLAUSE)
4637 /* Because the chain gets clobbered when we make a copy, we save it
4638 here. */
4639 tree chain = NULL_TREE, new_tree;
4641 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4642 chain = TREE_CHAIN (*tp);
4644 /* Copy the node. */
4645 new_tree = copy_node (*tp);
4647 *tp = new_tree;
4649 /* Now, restore the chain, if appropriate. That will cause
4650 walk_tree to walk into the chain as well. */
4651 if (code == PARM_DECL
4652 || code == TREE_LIST
4653 || code == OMP_CLAUSE)
4654 TREE_CHAIN (*tp) = chain;
4656 /* For now, we don't update BLOCKs when we make copies. So, we
4657 have to nullify all BIND_EXPRs. */
4658 if (TREE_CODE (*tp) == BIND_EXPR)
4659 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4661 else if (code == CONSTRUCTOR)
4663 /* CONSTRUCTOR nodes need special handling because
4664 we need to duplicate the vector of elements. */
4665 tree new_tree;
4667 new_tree = copy_node (*tp);
4668 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
4669 *tp = new_tree;
4671 else if (code == STATEMENT_LIST)
4672 /* We used to just abort on STATEMENT_LIST, but we can run into them
4673 with statement-expressions (c++/40975). */
4674 copy_statement_list (tp);
4675 else if (TREE_CODE_CLASS (code) == tcc_type)
4676 *walk_subtrees = 0;
4677 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4678 *walk_subtrees = 0;
4679 else if (TREE_CODE_CLASS (code) == tcc_constant)
4680 *walk_subtrees = 0;
4681 return NULL_TREE;
4684 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4685 information indicating to what new SAVE_EXPR this one should be mapped,
4686 use that one. Otherwise, create a new node and enter it in ST. FN is
4687 the function into which the copy will be placed. */
4689 static void
4690 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4692 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4693 tree *n;
4694 tree t;
4696 /* See if we already encountered this SAVE_EXPR. */
4697 n = (tree *) pointer_map_contains (st, *tp);
4699 /* If we didn't already remap this SAVE_EXPR, do so now. */
4700 if (!n)
4702 t = copy_node (*tp);
4704 /* Remember this SAVE_EXPR. */
4705 *pointer_map_insert (st, *tp) = t;
4706 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4707 *pointer_map_insert (st, t) = t;
4709 else
4711 /* We've already walked into this SAVE_EXPR; don't do it again. */
4712 *walk_subtrees = 0;
4713 t = *n;
4716 /* Replace this SAVE_EXPR with the copy. */
4717 *tp = t;
4720 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4721 label, copies the declaration and enters it in the splay_tree in DATA (which
4722 is really a 'copy_body_data *'. */
4724 static tree
4725 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4726 bool *handled_ops_p ATTRIBUTE_UNUSED,
4727 struct walk_stmt_info *wi)
4729 copy_body_data *id = (copy_body_data *) wi->info;
4730 gimple stmt = gsi_stmt (*gsip);
4732 if (gimple_code (stmt) == GIMPLE_LABEL)
4734 tree decl = gimple_label_label (stmt);
4736 /* Copy the decl and remember the copy. */
4737 insert_decl_map (id, decl, id->copy_decl (decl, id));
4740 return NULL_TREE;
4744 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4745 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4746 remaps all local declarations to appropriate replacements in gimple
4747 operands. */
4749 static tree
4750 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4752 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4753 copy_body_data *id = (copy_body_data *) wi->info;
4754 struct pointer_map_t *st = id->decl_map;
4755 tree *n;
4756 tree expr = *tp;
4758 /* Only a local declaration (variable or label). */
4759 if ((TREE_CODE (expr) == VAR_DECL
4760 && !TREE_STATIC (expr))
4761 || TREE_CODE (expr) == LABEL_DECL)
4763 /* Lookup the declaration. */
4764 n = (tree *) pointer_map_contains (st, expr);
4766 /* If it's there, remap it. */
4767 if (n)
4768 *tp = *n;
4769 *walk_subtrees = 0;
4771 else if (TREE_CODE (expr) == STATEMENT_LIST
4772 || TREE_CODE (expr) == BIND_EXPR
4773 || TREE_CODE (expr) == SAVE_EXPR)
4774 gcc_unreachable ();
4775 else if (TREE_CODE (expr) == TARGET_EXPR)
4777 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4778 It's OK for this to happen if it was part of a subtree that
4779 isn't immediately expanded, such as operand 2 of another
4780 TARGET_EXPR. */
4781 if (!TREE_OPERAND (expr, 1))
4783 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4784 TREE_OPERAND (expr, 3) = NULL_TREE;
4788 /* Keep iterating. */
4789 return NULL_TREE;
4793 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4794 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4795 remaps all local declarations to appropriate replacements in gimple
4796 statements. */
4798 static tree
4799 replace_locals_stmt (gimple_stmt_iterator *gsip,
4800 bool *handled_ops_p ATTRIBUTE_UNUSED,
4801 struct walk_stmt_info *wi)
4803 copy_body_data *id = (copy_body_data *) wi->info;
4804 gimple stmt = gsi_stmt (*gsip);
4806 if (gimple_code (stmt) == GIMPLE_BIND)
4808 tree block = gimple_bind_block (stmt);
4810 if (block)
4812 remap_block (&block, id);
4813 gimple_bind_set_block (stmt, block);
4816 /* This will remap a lot of the same decls again, but this should be
4817 harmless. */
4818 if (gimple_bind_vars (stmt))
4819 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
4820 NULL, id));
4823 /* Keep iterating. */
4824 return NULL_TREE;
4828 /* Copies everything in SEQ and replaces variables and labels local to
4829 current_function_decl. */
4831 gimple_seq
4832 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4834 copy_body_data id;
4835 struct walk_stmt_info wi;
4836 struct pointer_set_t *visited;
4837 gimple_seq copy;
4839 /* There's nothing to do for NULL_TREE. */
4840 if (seq == NULL)
4841 return seq;
4843 /* Set up ID. */
4844 memset (&id, 0, sizeof (id));
4845 id.src_fn = current_function_decl;
4846 id.dst_fn = current_function_decl;
4847 id.decl_map = pointer_map_create ();
4848 id.debug_map = NULL;
4850 id.copy_decl = copy_decl_no_change;
4851 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4852 id.transform_new_cfg = false;
4853 id.transform_return_to_modify = false;
4854 id.transform_parameter = false;
4855 id.transform_lang_insert_block = NULL;
4857 /* Walk the tree once to find local labels. */
4858 memset (&wi, 0, sizeof (wi));
4859 visited = pointer_set_create ();
4860 wi.info = &id;
4861 wi.pset = visited;
4862 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4863 pointer_set_destroy (visited);
4865 copy = gimple_seq_copy (seq);
4867 /* Walk the copy, remapping decls. */
4868 memset (&wi, 0, sizeof (wi));
4869 wi.info = &id;
4870 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4872 /* Clean up. */
4873 pointer_map_destroy (id.decl_map);
4874 if (id.debug_map)
4875 pointer_map_destroy (id.debug_map);
4877 return copy;
4881 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4883 static tree
4884 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4886 if (*tp == data)
4887 return (tree) data;
4888 else
4889 return NULL;
4892 DEBUG_FUNCTION bool
4893 debug_find_tree (tree top, tree search)
4895 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4899 /* Declare the variables created by the inliner. Add all the variables in
4900 VARS to BIND_EXPR. */
4902 static void
4903 declare_inline_vars (tree block, tree vars)
4905 tree t;
4906 for (t = vars; t; t = DECL_CHAIN (t))
4908 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4909 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4910 add_local_decl (cfun, t);
4913 if (block)
4914 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4917 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4918 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4919 VAR_DECL translation. */
4921 static tree
4922 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4924 /* Don't generate debug information for the copy if we wouldn't have
4925 generated it for the copy either. */
4926 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4927 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4929 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4930 declaration inspired this copy. */
4931 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4933 /* The new variable/label has no RTL, yet. */
4934 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4935 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4936 SET_DECL_RTL (copy, 0);
4938 /* These args would always appear unused, if not for this. */
4939 TREE_USED (copy) = 1;
4941 /* Set the context for the new declaration. */
4942 if (!DECL_CONTEXT (decl))
4943 /* Globals stay global. */
4945 else if (DECL_CONTEXT (decl) != id->src_fn)
4946 /* Things that weren't in the scope of the function we're inlining
4947 from aren't in the scope we're inlining to, either. */
4949 else if (TREE_STATIC (decl))
4950 /* Function-scoped static variables should stay in the original
4951 function. */
4953 else
4954 /* Ordinary automatic local variables are now in the scope of the
4955 new function. */
4956 DECL_CONTEXT (copy) = id->dst_fn;
4958 return copy;
4961 static tree
4962 copy_decl_to_var (tree decl, copy_body_data *id)
4964 tree copy, type;
4966 gcc_assert (TREE_CODE (decl) == PARM_DECL
4967 || TREE_CODE (decl) == RESULT_DECL);
4969 type = TREE_TYPE (decl);
4971 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4972 VAR_DECL, DECL_NAME (decl), type);
4973 if (DECL_PT_UID_SET_P (decl))
4974 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4975 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4976 TREE_READONLY (copy) = TREE_READONLY (decl);
4977 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4978 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4980 return copy_decl_for_dup_finish (id, decl, copy);
4983 /* Like copy_decl_to_var, but create a return slot object instead of a
4984 pointer variable for return by invisible reference. */
4986 static tree
4987 copy_result_decl_to_var (tree decl, copy_body_data *id)
4989 tree copy, type;
4991 gcc_assert (TREE_CODE (decl) == PARM_DECL
4992 || TREE_CODE (decl) == RESULT_DECL);
4994 type = TREE_TYPE (decl);
4995 if (DECL_BY_REFERENCE (decl))
4996 type = TREE_TYPE (type);
4998 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4999 VAR_DECL, DECL_NAME (decl), type);
5000 if (DECL_PT_UID_SET_P (decl))
5001 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5002 TREE_READONLY (copy) = TREE_READONLY (decl);
5003 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5004 if (!DECL_BY_REFERENCE (decl))
5006 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5007 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5010 return copy_decl_for_dup_finish (id, decl, copy);
5013 tree
5014 copy_decl_no_change (tree decl, copy_body_data *id)
5016 tree copy;
5018 copy = copy_node (decl);
5020 /* The COPY is not abstract; it will be generated in DST_FN. */
5021 DECL_ABSTRACT (copy) = 0;
5022 lang_hooks.dup_lang_specific_decl (copy);
5024 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5025 been taken; it's for internal bookkeeping in expand_goto_internal. */
5026 if (TREE_CODE (copy) == LABEL_DECL)
5028 TREE_ADDRESSABLE (copy) = 0;
5029 LABEL_DECL_UID (copy) = -1;
5032 return copy_decl_for_dup_finish (id, decl, copy);
5035 static tree
5036 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5038 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5039 return copy_decl_to_var (decl, id);
5040 else
5041 return copy_decl_no_change (decl, id);
5044 /* Return a copy of the function's argument tree. */
5045 static tree
5046 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5047 bitmap args_to_skip, tree *vars)
5049 tree arg, *parg;
5050 tree new_parm = NULL;
5051 int i = 0;
5053 parg = &new_parm;
5055 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5056 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5058 tree new_tree = remap_decl (arg, id);
5059 if (TREE_CODE (new_tree) != PARM_DECL)
5060 new_tree = id->copy_decl (arg, id);
5061 lang_hooks.dup_lang_specific_decl (new_tree);
5062 *parg = new_tree;
5063 parg = &DECL_CHAIN (new_tree);
5065 else if (!pointer_map_contains (id->decl_map, arg))
5067 /* Make an equivalent VAR_DECL. If the argument was used
5068 as temporary variable later in function, the uses will be
5069 replaced by local variable. */
5070 tree var = copy_decl_to_var (arg, id);
5071 insert_decl_map (id, arg, var);
5072 /* Declare this new variable. */
5073 DECL_CHAIN (var) = *vars;
5074 *vars = var;
5076 return new_parm;
5079 /* Return a copy of the function's static chain. */
5080 static tree
5081 copy_static_chain (tree static_chain, copy_body_data * id)
5083 tree *chain_copy, *pvar;
5085 chain_copy = &static_chain;
5086 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5088 tree new_tree = remap_decl (*pvar, id);
5089 lang_hooks.dup_lang_specific_decl (new_tree);
5090 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5091 *pvar = new_tree;
5093 return static_chain;
5096 /* Return true if the function is allowed to be versioned.
5097 This is a guard for the versioning functionality. */
5099 bool
5100 tree_versionable_function_p (tree fndecl)
5102 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5103 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5106 /* Delete all unreachable basic blocks and update callgraph.
5107 Doing so is somewhat nontrivial because we need to update all clones and
5108 remove inline function that become unreachable. */
5110 static bool
5111 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5113 bool changed = false;
5114 basic_block b, next_bb;
5116 find_unreachable_blocks ();
5118 /* Delete all unreachable basic blocks. */
5120 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5121 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5123 next_bb = b->next_bb;
5125 if (!(b->flags & BB_REACHABLE))
5127 gimple_stmt_iterator bsi;
5129 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5131 struct cgraph_edge *e;
5132 struct cgraph_node *node;
5134 ipa_remove_stmt_references (id->dst_node, gsi_stmt (bsi));
5136 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5137 &&(e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
5139 if (!e->inline_failed)
5140 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5141 else
5142 cgraph_remove_edge (e);
5144 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5145 && id->dst_node->clones)
5146 for (node = id->dst_node->clones; node != id->dst_node;)
5148 ipa_remove_stmt_references (node, gsi_stmt (bsi));
5149 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5150 && (e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
5152 if (!e->inline_failed)
5153 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5154 else
5155 cgraph_remove_edge (e);
5158 if (node->clones)
5159 node = node->clones;
5160 else if (node->next_sibling_clone)
5161 node = node->next_sibling_clone;
5162 else
5164 while (node != id->dst_node && !node->next_sibling_clone)
5165 node = node->clone_of;
5166 if (node != id->dst_node)
5167 node = node->next_sibling_clone;
5171 delete_basic_block (b);
5172 changed = true;
5176 return changed;
5179 /* Update clone info after duplication. */
5181 static void
5182 update_clone_info (copy_body_data * id)
5184 struct cgraph_node *node;
5185 if (!id->dst_node->clones)
5186 return;
5187 for (node = id->dst_node->clones; node != id->dst_node;)
5189 /* First update replace maps to match the new body. */
5190 if (node->clone.tree_map)
5192 unsigned int i;
5193 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5195 struct ipa_replace_map *replace_info;
5196 replace_info = (*node->clone.tree_map)[i];
5197 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5198 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5201 if (node->clones)
5202 node = node->clones;
5203 else if (node->next_sibling_clone)
5204 node = node->next_sibling_clone;
5205 else
5207 while (node != id->dst_node && !node->next_sibling_clone)
5208 node = node->clone_of;
5209 if (node != id->dst_node)
5210 node = node->next_sibling_clone;
5215 /* Create a copy of a function's tree.
5216 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5217 of the original function and the new copied function
5218 respectively. In case we want to replace a DECL
5219 tree with another tree while duplicating the function's
5220 body, TREE_MAP represents the mapping between these
5221 trees. If UPDATE_CLONES is set, the call_stmt fields
5222 of edges of clones of the function will be updated.
5224 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5225 from new version.
5226 If SKIP_RETURN is true, the new version will return void.
5227 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5228 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5230 void
5231 tree_function_versioning (tree old_decl, tree new_decl,
5232 vec<ipa_replace_map_p, va_gc> *tree_map,
5233 bool update_clones, bitmap args_to_skip,
5234 bool skip_return, bitmap blocks_to_copy,
5235 basic_block new_entry)
5237 struct cgraph_node *old_version_node;
5238 struct cgraph_node *new_version_node;
5239 copy_body_data id;
5240 tree p;
5241 unsigned i;
5242 struct ipa_replace_map *replace_info;
5243 basic_block old_entry_block, bb;
5244 auto_vec<gimple, 10> init_stmts;
5245 tree vars = NULL_TREE;
5247 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5248 && TREE_CODE (new_decl) == FUNCTION_DECL);
5249 DECL_POSSIBLY_INLINED (old_decl) = 1;
5251 old_version_node = cgraph_get_node (old_decl);
5252 gcc_checking_assert (old_version_node);
5253 new_version_node = cgraph_get_node (new_decl);
5254 gcc_checking_assert (new_version_node);
5256 /* Copy over debug args. */
5257 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5259 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5260 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5261 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5262 old_debug_args = decl_debug_args_lookup (old_decl);
5263 if (old_debug_args)
5265 new_debug_args = decl_debug_args_insert (new_decl);
5266 *new_debug_args = vec_safe_copy (*old_debug_args);
5270 /* Output the inlining info for this abstract function, since it has been
5271 inlined. If we don't do this now, we can lose the information about the
5272 variables in the function when the blocks get blown away as soon as we
5273 remove the cgraph node. */
5274 (*debug_hooks->outlining_inline_function) (old_decl);
5276 DECL_ARTIFICIAL (new_decl) = 1;
5277 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5278 if (DECL_ORIGIN (old_decl) == old_decl)
5279 old_version_node->used_as_abstract_origin = true;
5280 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5282 /* Prepare the data structures for the tree copy. */
5283 memset (&id, 0, sizeof (id));
5285 /* Generate a new name for the new version. */
5286 id.statements_to_fold = pointer_set_create ();
5288 id.decl_map = pointer_map_create ();
5289 id.debug_map = NULL;
5290 id.src_fn = old_decl;
5291 id.dst_fn = new_decl;
5292 id.src_node = old_version_node;
5293 id.dst_node = new_version_node;
5294 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5295 id.blocks_to_copy = blocks_to_copy;
5296 if (id.src_node->ipa_transforms_to_apply.exists ())
5298 vec<ipa_opt_pass> old_transforms_to_apply
5299 = id.dst_node->ipa_transforms_to_apply;
5300 unsigned int i;
5302 id.dst_node->ipa_transforms_to_apply
5303 = id.src_node->ipa_transforms_to_apply.copy ();
5304 for (i = 0; i < old_transforms_to_apply.length (); i++)
5305 id.dst_node->ipa_transforms_to_apply.safe_push (old_transforms_to_apply[i]);
5306 old_transforms_to_apply.release ();
5309 id.copy_decl = copy_decl_no_change;
5310 id.transform_call_graph_edges
5311 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5312 id.transform_new_cfg = true;
5313 id.transform_return_to_modify = false;
5314 id.transform_parameter = false;
5315 id.transform_lang_insert_block = NULL;
5317 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5318 (DECL_STRUCT_FUNCTION (old_decl));
5319 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5320 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5321 initialize_cfun (new_decl, old_decl,
5322 old_entry_block->count);
5323 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5324 = id.src_cfun->gimple_df->ipa_pta;
5326 /* Copy the function's static chain. */
5327 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5328 if (p)
5329 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5330 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5331 &id);
5333 /* If there's a tree_map, prepare for substitution. */
5334 if (tree_map)
5335 for (i = 0; i < tree_map->length (); i++)
5337 gimple init;
5338 replace_info = (*tree_map)[i];
5339 if (replace_info->replace_p)
5341 if (!replace_info->old_tree)
5343 int i = replace_info->parm_num;
5344 tree parm;
5345 tree req_type;
5347 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5348 i --;
5349 replace_info->old_tree = parm;
5350 req_type = TREE_TYPE (parm);
5351 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5353 if (fold_convertible_p (req_type, replace_info->new_tree))
5354 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5355 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5356 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5357 else
5359 if (dump_file)
5361 fprintf (dump_file, " const ");
5362 print_generic_expr (dump_file, replace_info->new_tree, 0);
5363 fprintf (dump_file, " can't be converted to param ");
5364 print_generic_expr (dump_file, parm, 0);
5365 fprintf (dump_file, "\n");
5367 replace_info->old_tree = NULL;
5371 else
5372 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5373 if (replace_info->old_tree)
5375 init = setup_one_parameter (&id, replace_info->old_tree,
5376 replace_info->new_tree, id.src_fn,
5377 NULL,
5378 &vars);
5379 if (init)
5380 init_stmts.safe_push (init);
5384 /* Copy the function's arguments. */
5385 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5386 DECL_ARGUMENTS (new_decl) =
5387 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5388 args_to_skip, &vars);
5390 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5391 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5393 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5395 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5396 /* Add local vars. */
5397 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5399 if (DECL_RESULT (old_decl) == NULL_TREE)
5401 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5403 DECL_RESULT (new_decl)
5404 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5405 RESULT_DECL, NULL_TREE, void_type_node);
5406 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5407 cfun->returns_struct = 0;
5408 cfun->returns_pcc_struct = 0;
5410 else
5412 tree old_name;
5413 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5414 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5415 if (gimple_in_ssa_p (id.src_cfun)
5416 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5417 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5419 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5420 insert_decl_map (&id, old_name, new_name);
5421 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5422 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5426 /* Set up the destination functions loop tree. */
5427 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5429 cfun->curr_properties &= ~PROP_loops;
5430 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5431 cfun->curr_properties |= PROP_loops;
5434 /* Copy the Function's body. */
5435 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5436 ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5437 new_entry);
5439 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5440 number_blocks (new_decl);
5442 /* We want to create the BB unconditionally, so that the addition of
5443 debug stmts doesn't affect BB count, which may in the end cause
5444 codegen differences. */
5445 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5446 while (init_stmts.length ())
5447 insert_init_stmt (&id, bb, init_stmts.pop ());
5448 update_clone_info (&id);
5450 /* Remap the nonlocal_goto_save_area, if any. */
5451 if (cfun->nonlocal_goto_save_area)
5453 struct walk_stmt_info wi;
5455 memset (&wi, 0, sizeof (wi));
5456 wi.info = &id;
5457 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5460 /* Clean up. */
5461 pointer_map_destroy (id.decl_map);
5462 if (id.debug_map)
5463 pointer_map_destroy (id.debug_map);
5464 free_dominance_info (CDI_DOMINATORS);
5465 free_dominance_info (CDI_POST_DOMINATORS);
5467 fold_marked_statements (0, id.statements_to_fold);
5468 pointer_set_destroy (id.statements_to_fold);
5469 fold_cond_expr_cond ();
5470 delete_unreachable_blocks_update_callgraph (&id);
5471 if (id.dst_node->definition)
5472 cgraph_rebuild_references ();
5473 update_ssa (TODO_update_ssa);
5475 /* After partial cloning we need to rescale frequencies, so they are
5476 within proper range in the cloned function. */
5477 if (new_entry)
5479 struct cgraph_edge *e;
5480 rebuild_frequencies ();
5482 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5483 for (e = new_version_node->callees; e; e = e->next_callee)
5485 basic_block bb = gimple_bb (e->call_stmt);
5486 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5487 bb);
5488 e->count = bb->count;
5490 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5492 basic_block bb = gimple_bb (e->call_stmt);
5493 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5494 bb);
5495 e->count = bb->count;
5499 free_dominance_info (CDI_DOMINATORS);
5500 free_dominance_info (CDI_POST_DOMINATORS);
5502 gcc_assert (!id.debug_stmts.exists ());
5503 pop_cfun ();
5504 return;
5507 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5508 the callee and return the inlined body on success. */
5510 tree
5511 maybe_inline_call_in_expr (tree exp)
5513 tree fn = get_callee_fndecl (exp);
5515 /* We can only try to inline "const" functions. */
5516 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5518 struct pointer_map_t *decl_map = pointer_map_create ();
5519 call_expr_arg_iterator iter;
5520 copy_body_data id;
5521 tree param, arg, t;
5523 /* Remap the parameters. */
5524 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5525 param;
5526 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5527 *pointer_map_insert (decl_map, param) = arg;
5529 memset (&id, 0, sizeof (id));
5530 id.src_fn = fn;
5531 id.dst_fn = current_function_decl;
5532 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5533 id.decl_map = decl_map;
5535 id.copy_decl = copy_decl_no_change;
5536 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5537 id.transform_new_cfg = false;
5538 id.transform_return_to_modify = true;
5539 id.transform_parameter = true;
5540 id.transform_lang_insert_block = NULL;
5542 /* Make sure not to unshare trees behind the front-end's back
5543 since front-end specific mechanisms may rely on sharing. */
5544 id.regimplify = false;
5545 id.do_not_unshare = true;
5547 /* We're not inside any EH region. */
5548 id.eh_lp_nr = 0;
5550 t = copy_tree_body (&id);
5551 pointer_map_destroy (decl_map);
5553 /* We can only return something suitable for use in a GENERIC
5554 expression tree. */
5555 if (TREE_CODE (t) == MODIFY_EXPR)
5556 return TREE_OPERAND (t, 1);
5559 return NULL_TREE;
5562 /* Duplicate a type, fields and all. */
5564 tree
5565 build_duplicate_type (tree type)
5567 struct copy_body_data id;
5569 memset (&id, 0, sizeof (id));
5570 id.src_fn = current_function_decl;
5571 id.dst_fn = current_function_decl;
5572 id.src_cfun = cfun;
5573 id.decl_map = pointer_map_create ();
5574 id.debug_map = NULL;
5575 id.copy_decl = copy_decl_no_change;
5577 type = remap_type_1 (type, &id);
5579 pointer_map_destroy (id.decl_map);
5580 if (id.debug_map)
5581 pointer_map_destroy (id.debug_map);
5583 TYPE_CANONICAL (type) = type;
5585 return type;