cgraph.c (cgraph_turn_edge_to_speculative): Fix debug output.
[official-gcc.git] / gcc / tree-inline.c
blob55e527014f1148de4842e4391a2a27f477e65a1f
1 /* Tree inlining.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "tree.h"
27 #include "tree-inline.h"
28 #include "flags.h"
29 #include "params.h"
30 #include "input.h"
31 #include "insn-config.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-iterator.h"
36 #include "cgraph.h"
37 #include "intl.h"
38 #include "tree-mudflap.h"
39 #include "tree-flow.h"
40 #include "function.h"
41 #include "tree-flow.h"
42 #include "tree-pretty-print.h"
43 #include "except.h"
44 #include "debug.h"
45 #include "pointer-set.h"
46 #include "ipa-prop.h"
47 #include "value-prof.h"
48 #include "tree-pass.h"
49 #include "target.h"
50 #include "cfgloop.h"
52 #include "rtl.h" /* FIXME: For asm_str_count. */
54 /* I'm not real happy about this, but we need to handle gimple and
55 non-gimple trees. */
56 #include "gimple.h"
58 /* Inlining, Cloning, Versioning, Parallelization
60 Inlining: a function body is duplicated, but the PARM_DECLs are
61 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
62 MODIFY_EXPRs that store to a dedicated returned-value variable.
63 The duplicated eh_region info of the copy will later be appended
64 to the info for the caller; the eh_region info in copied throwing
65 statements and RESX statements are adjusted accordingly.
67 Cloning: (only in C++) We have one body for a con/de/structor, and
68 multiple function decls, each with a unique parameter list.
69 Duplicate the body, using the given splay tree; some parameters
70 will become constants (like 0 or 1).
72 Versioning: a function body is duplicated and the result is a new
73 function rather than into blocks of an existing function as with
74 inlining. Some parameters will become constants.
76 Parallelization: a region of a function is duplicated resulting in
77 a new function. Variables may be replaced with complex expressions
78 to enable shared variable semantics.
80 All of these will simultaneously lookup any callgraph edges. If
81 we're going to inline the duplicated function body, and the given
82 function has some cloned callgraph nodes (one for each place this
83 function will be inlined) those callgraph edges will be duplicated.
84 If we're cloning the body, those callgraph edges will be
85 updated to point into the new body. (Note that the original
86 callgraph node and edge list will not be altered.)
88 See the CALL_EXPR handling case in copy_tree_body_r (). */
90 /* To Do:
92 o In order to make inlining-on-trees work, we pessimized
93 function-local static constants. In particular, they are now
94 always output, even when not addressed. Fix this by treating
95 function-local static constants just like global static
96 constants; the back-end already knows not to output them if they
97 are not needed.
99 o Provide heuristics to clamp inlining of recursive template
100 calls? */
103 /* Weights that estimate_num_insns uses to estimate the size of the
104 produced code. */
106 eni_weights eni_size_weights;
108 /* Weights that estimate_num_insns uses to estimate the time necessary
109 to execute the produced code. */
111 eni_weights eni_time_weights;
113 /* Prototypes. */
115 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
116 static void remap_block (tree *, copy_body_data *);
117 static void copy_bind_expr (tree *, int *, copy_body_data *);
118 static void declare_inline_vars (tree, tree);
119 static void remap_save_expr (tree *, void *, int *);
120 static void prepend_lexical_block (tree current_block, tree new_block);
121 static tree copy_decl_to_var (tree, copy_body_data *);
122 static tree copy_result_decl_to_var (tree, copy_body_data *);
123 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
124 static gimple remap_gimple_stmt (gimple, copy_body_data *);
125 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
127 /* Insert a tree->tree mapping for ID. Despite the name suggests
128 that the trees should be variables, it is used for more than that. */
130 void
131 insert_decl_map (copy_body_data *id, tree key, tree value)
133 *pointer_map_insert (id->decl_map, key) = value;
135 /* Always insert an identity map as well. If we see this same new
136 node again, we won't want to duplicate it a second time. */
137 if (key != value)
138 *pointer_map_insert (id->decl_map, value) = value;
141 /* Insert a tree->tree mapping for ID. This is only used for
142 variables. */
144 static void
145 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
147 if (!gimple_in_ssa_p (id->src_cfun))
148 return;
150 if (!MAY_HAVE_DEBUG_STMTS)
151 return;
153 if (!target_for_debug_bind (key))
154 return;
156 gcc_assert (TREE_CODE (key) == PARM_DECL);
157 gcc_assert (TREE_CODE (value) == VAR_DECL);
159 if (!id->debug_map)
160 id->debug_map = pointer_map_create ();
162 *pointer_map_insert (id->debug_map, key) = value;
165 /* If nonzero, we're remapping the contents of inlined debug
166 statements. If negative, an error has occurred, such as a
167 reference to a variable that isn't available in the inlined
168 context. */
169 static int processing_debug_stmt = 0;
171 /* Construct new SSA name for old NAME. ID is the inline context. */
173 static tree
174 remap_ssa_name (tree name, copy_body_data *id)
176 tree new_tree, var;
177 tree *n;
179 gcc_assert (TREE_CODE (name) == SSA_NAME);
181 n = (tree *) pointer_map_contains (id->decl_map, name);
182 if (n)
183 return unshare_expr (*n);
185 if (processing_debug_stmt)
187 if (SSA_NAME_IS_DEFAULT_DEF (name)
188 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
189 && id->entry_bb == NULL
190 && single_succ_p (ENTRY_BLOCK_PTR))
192 tree vexpr = make_node (DEBUG_EXPR_DECL);
193 gimple def_temp;
194 gimple_stmt_iterator gsi;
195 tree val = SSA_NAME_VAR (name);
197 n = (tree *) pointer_map_contains (id->decl_map, val);
198 if (n != NULL)
199 val = *n;
200 if (TREE_CODE (val) != PARM_DECL)
202 processing_debug_stmt = -1;
203 return name;
205 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
206 DECL_ARTIFICIAL (vexpr) = 1;
207 TREE_TYPE (vexpr) = TREE_TYPE (name);
208 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
209 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
210 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
211 return vexpr;
214 processing_debug_stmt = -1;
215 return name;
218 /* Remap anonymous SSA names or SSA names of anonymous decls. */
219 var = SSA_NAME_VAR (name);
220 if (!var
221 || (!SSA_NAME_IS_DEFAULT_DEF (name)
222 && TREE_CODE (var) == VAR_DECL
223 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
224 && DECL_ARTIFICIAL (var)
225 && DECL_IGNORED_P (var)
226 && !DECL_NAME (var)))
228 struct ptr_info_def *pi;
229 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id), NULL);
230 if (!var && SSA_NAME_IDENTIFIER (name))
231 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
232 insert_decl_map (id, name, new_tree);
233 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
234 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
235 /* At least IPA points-to info can be directly transferred. */
236 if (id->src_cfun->gimple_df
237 && id->src_cfun->gimple_df->ipa_pta
238 && (pi = SSA_NAME_PTR_INFO (name))
239 && !pi->pt.anything)
241 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
242 new_pi->pt = pi->pt;
244 return new_tree;
247 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
248 in copy_bb. */
249 new_tree = remap_decl (var, id);
251 /* We might've substituted constant or another SSA_NAME for
252 the variable.
254 Replace the SSA name representing RESULT_DECL by variable during
255 inlining: this saves us from need to introduce PHI node in a case
256 return value is just partly initialized. */
257 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
258 && (!SSA_NAME_VAR (name)
259 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
260 || !id->transform_return_to_modify))
262 struct ptr_info_def *pi;
263 new_tree = make_ssa_name (new_tree, NULL);
264 insert_decl_map (id, name, new_tree);
265 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
266 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
267 /* At least IPA points-to info can be directly transferred. */
268 if (id->src_cfun->gimple_df
269 && id->src_cfun->gimple_df->ipa_pta
270 && (pi = SSA_NAME_PTR_INFO (name))
271 && !pi->pt.anything)
273 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
274 new_pi->pt = pi->pt;
276 if (SSA_NAME_IS_DEFAULT_DEF (name))
278 /* By inlining function having uninitialized variable, we might
279 extend the lifetime (variable might get reused). This cause
280 ICE in the case we end up extending lifetime of SSA name across
281 abnormal edge, but also increase register pressure.
283 We simply initialize all uninitialized vars by 0 except
284 for case we are inlining to very first BB. We can avoid
285 this for all BBs that are not inside strongly connected
286 regions of the CFG, but this is expensive to test. */
287 if (id->entry_bb
288 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
289 && (!SSA_NAME_VAR (name)
290 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
291 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
292 || EDGE_COUNT (id->entry_bb->preds) != 1))
294 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
295 gimple init_stmt;
296 tree zero = build_zero_cst (TREE_TYPE (new_tree));
298 init_stmt = gimple_build_assign (new_tree, zero);
299 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
300 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
302 else
304 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
305 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
309 else
310 insert_decl_map (id, name, new_tree);
311 return new_tree;
314 /* Remap DECL during the copying of the BLOCK tree for the function. */
316 tree
317 remap_decl (tree decl, copy_body_data *id)
319 tree *n;
321 /* We only remap local variables in the current function. */
323 /* See if we have remapped this declaration. */
325 n = (tree *) pointer_map_contains (id->decl_map, decl);
327 if (!n && processing_debug_stmt)
329 processing_debug_stmt = -1;
330 return decl;
333 /* If we didn't already have an equivalent for this declaration,
334 create one now. */
335 if (!n)
337 /* Make a copy of the variable or label. */
338 tree t = id->copy_decl (decl, id);
340 /* Remember it, so that if we encounter this local entity again
341 we can reuse this copy. Do this early because remap_type may
342 need this decl for TYPE_STUB_DECL. */
343 insert_decl_map (id, decl, t);
345 if (!DECL_P (t))
346 return t;
348 /* Remap types, if necessary. */
349 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
350 if (TREE_CODE (t) == TYPE_DECL)
351 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
353 /* Remap sizes as necessary. */
354 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
355 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
357 /* If fields, do likewise for offset and qualifier. */
358 if (TREE_CODE (t) == FIELD_DECL)
360 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
361 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
362 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
365 return t;
368 if (id->do_not_unshare)
369 return *n;
370 else
371 return unshare_expr (*n);
374 static tree
375 remap_type_1 (tree type, copy_body_data *id)
377 tree new_tree, t;
379 /* We do need a copy. build and register it now. If this is a pointer or
380 reference type, remap the designated type and make a new pointer or
381 reference type. */
382 if (TREE_CODE (type) == POINTER_TYPE)
384 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
385 TYPE_MODE (type),
386 TYPE_REF_CAN_ALIAS_ALL (type));
387 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
388 new_tree = build_type_attribute_qual_variant (new_tree,
389 TYPE_ATTRIBUTES (type),
390 TYPE_QUALS (type));
391 insert_decl_map (id, type, new_tree);
392 return new_tree;
394 else if (TREE_CODE (type) == REFERENCE_TYPE)
396 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
397 TYPE_MODE (type),
398 TYPE_REF_CAN_ALIAS_ALL (type));
399 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
400 new_tree = build_type_attribute_qual_variant (new_tree,
401 TYPE_ATTRIBUTES (type),
402 TYPE_QUALS (type));
403 insert_decl_map (id, type, new_tree);
404 return new_tree;
406 else
407 new_tree = copy_node (type);
409 insert_decl_map (id, type, new_tree);
411 /* This is a new type, not a copy of an old type. Need to reassociate
412 variants. We can handle everything except the main variant lazily. */
413 t = TYPE_MAIN_VARIANT (type);
414 if (type != t)
416 t = remap_type (t, id);
417 TYPE_MAIN_VARIANT (new_tree) = t;
418 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
419 TYPE_NEXT_VARIANT (t) = new_tree;
421 else
423 TYPE_MAIN_VARIANT (new_tree) = new_tree;
424 TYPE_NEXT_VARIANT (new_tree) = NULL;
427 if (TYPE_STUB_DECL (type))
428 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
430 /* Lazily create pointer and reference types. */
431 TYPE_POINTER_TO (new_tree) = NULL;
432 TYPE_REFERENCE_TO (new_tree) = NULL;
434 switch (TREE_CODE (new_tree))
436 case INTEGER_TYPE:
437 case REAL_TYPE:
438 case FIXED_POINT_TYPE:
439 case ENUMERAL_TYPE:
440 case BOOLEAN_TYPE:
441 t = TYPE_MIN_VALUE (new_tree);
442 if (t && TREE_CODE (t) != INTEGER_CST)
443 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
445 t = TYPE_MAX_VALUE (new_tree);
446 if (t && TREE_CODE (t) != INTEGER_CST)
447 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
448 return new_tree;
450 case FUNCTION_TYPE:
451 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
452 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
453 return new_tree;
455 case ARRAY_TYPE:
456 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
457 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
458 break;
460 case RECORD_TYPE:
461 case UNION_TYPE:
462 case QUAL_UNION_TYPE:
464 tree f, nf = NULL;
466 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
468 t = remap_decl (f, id);
469 DECL_CONTEXT (t) = new_tree;
470 DECL_CHAIN (t) = nf;
471 nf = t;
473 TYPE_FIELDS (new_tree) = nreverse (nf);
475 break;
477 case OFFSET_TYPE:
478 default:
479 /* Shouldn't have been thought variable sized. */
480 gcc_unreachable ();
483 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
484 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
486 return new_tree;
489 tree
490 remap_type (tree type, copy_body_data *id)
492 tree *node;
493 tree tmp;
495 if (type == NULL)
496 return type;
498 /* See if we have remapped this type. */
499 node = (tree *) pointer_map_contains (id->decl_map, type);
500 if (node)
501 return *node;
503 /* The type only needs remapping if it's variably modified. */
504 if (! variably_modified_type_p (type, id->src_fn))
506 insert_decl_map (id, type, type);
507 return type;
510 id->remapping_type_depth++;
511 tmp = remap_type_1 (type, id);
512 id->remapping_type_depth--;
514 return tmp;
517 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
519 static bool
520 can_be_nonlocal (tree decl, copy_body_data *id)
522 /* We can not duplicate function decls. */
523 if (TREE_CODE (decl) == FUNCTION_DECL)
524 return true;
526 /* Local static vars must be non-local or we get multiple declaration
527 problems. */
528 if (TREE_CODE (decl) == VAR_DECL
529 && !auto_var_in_fn_p (decl, id->src_fn))
530 return true;
532 return false;
535 static tree
536 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
537 copy_body_data *id)
539 tree old_var;
540 tree new_decls = NULL_TREE;
542 /* Remap its variables. */
543 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
545 tree new_var;
547 if (can_be_nonlocal (old_var, id))
549 /* We need to add this variable to the local decls as otherwise
550 nothing else will do so. */
551 if (TREE_CODE (old_var) == VAR_DECL
552 && ! DECL_EXTERNAL (old_var))
553 add_local_decl (cfun, old_var);
554 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
555 && !DECL_IGNORED_P (old_var)
556 && nonlocalized_list)
557 vec_safe_push (*nonlocalized_list, old_var);
558 continue;
561 /* Remap the variable. */
562 new_var = remap_decl (old_var, id);
564 /* If we didn't remap this variable, we can't mess with its
565 TREE_CHAIN. If we remapped this variable to the return slot, it's
566 already declared somewhere else, so don't declare it here. */
568 if (new_var == id->retvar)
570 else if (!new_var)
572 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
573 && !DECL_IGNORED_P (old_var)
574 && nonlocalized_list)
575 vec_safe_push (*nonlocalized_list, old_var);
577 else
579 gcc_assert (DECL_P (new_var));
580 DECL_CHAIN (new_var) = new_decls;
581 new_decls = new_var;
583 /* Also copy value-expressions. */
584 if (TREE_CODE (new_var) == VAR_DECL
585 && DECL_HAS_VALUE_EXPR_P (new_var))
587 tree tem = DECL_VALUE_EXPR (new_var);
588 bool old_regimplify = id->regimplify;
589 id->remapping_type_depth++;
590 walk_tree (&tem, copy_tree_body_r, id, NULL);
591 id->remapping_type_depth--;
592 id->regimplify = old_regimplify;
593 SET_DECL_VALUE_EXPR (new_var, tem);
598 return nreverse (new_decls);
601 /* Copy the BLOCK to contain remapped versions of the variables
602 therein. And hook the new block into the block-tree. */
604 static void
605 remap_block (tree *block, copy_body_data *id)
607 tree old_block;
608 tree new_block;
610 /* Make the new block. */
611 old_block = *block;
612 new_block = make_node (BLOCK);
613 TREE_USED (new_block) = TREE_USED (old_block);
614 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
615 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
616 BLOCK_NONLOCALIZED_VARS (new_block)
617 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
618 *block = new_block;
620 /* Remap its variables. */
621 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
622 &BLOCK_NONLOCALIZED_VARS (new_block),
623 id);
625 if (id->transform_lang_insert_block)
626 id->transform_lang_insert_block (new_block);
628 /* Remember the remapped block. */
629 insert_decl_map (id, old_block, new_block);
632 /* Copy the whole block tree and root it in id->block. */
633 static tree
634 remap_blocks (tree block, copy_body_data *id)
636 tree t;
637 tree new_tree = block;
639 if (!block)
640 return NULL;
642 remap_block (&new_tree, id);
643 gcc_assert (new_tree != block);
644 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
645 prepend_lexical_block (new_tree, remap_blocks (t, id));
646 /* Blocks are in arbitrary order, but make things slightly prettier and do
647 not swap order when producing a copy. */
648 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
649 return new_tree;
652 /* Remap the block tree rooted at BLOCK to nothing. */
653 static void
654 remap_blocks_to_null (tree block, copy_body_data *id)
656 tree t;
657 insert_decl_map (id, block, NULL_TREE);
658 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
659 remap_blocks_to_null (t, id);
662 static void
663 copy_statement_list (tree *tp)
665 tree_stmt_iterator oi, ni;
666 tree new_tree;
668 new_tree = alloc_stmt_list ();
669 ni = tsi_start (new_tree);
670 oi = tsi_start (*tp);
671 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
672 *tp = new_tree;
674 for (; !tsi_end_p (oi); tsi_next (&oi))
676 tree stmt = tsi_stmt (oi);
677 if (TREE_CODE (stmt) == STATEMENT_LIST)
678 /* This copy is not redundant; tsi_link_after will smash this
679 STATEMENT_LIST into the end of the one we're building, and we
680 don't want to do that with the original. */
681 copy_statement_list (&stmt);
682 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
686 static void
687 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
689 tree block = BIND_EXPR_BLOCK (*tp);
690 /* Copy (and replace) the statement. */
691 copy_tree_r (tp, walk_subtrees, NULL);
692 if (block)
694 remap_block (&block, id);
695 BIND_EXPR_BLOCK (*tp) = block;
698 if (BIND_EXPR_VARS (*tp))
699 /* This will remap a lot of the same decls again, but this should be
700 harmless. */
701 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
705 /* Create a new gimple_seq by remapping all the statements in BODY
706 using the inlining information in ID. */
708 static gimple_seq
709 remap_gimple_seq (gimple_seq body, copy_body_data *id)
711 gimple_stmt_iterator si;
712 gimple_seq new_body = NULL;
714 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
716 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
717 gimple_seq_add_stmt (&new_body, new_stmt);
720 return new_body;
724 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
725 block using the mapping information in ID. */
727 static gimple
728 copy_gimple_bind (gimple stmt, copy_body_data *id)
730 gimple new_bind;
731 tree new_block, new_vars;
732 gimple_seq body, new_body;
734 /* Copy the statement. Note that we purposely don't use copy_stmt
735 here because we need to remap statements as we copy. */
736 body = gimple_bind_body (stmt);
737 new_body = remap_gimple_seq (body, id);
739 new_block = gimple_bind_block (stmt);
740 if (new_block)
741 remap_block (&new_block, id);
743 /* This will remap a lot of the same decls again, but this should be
744 harmless. */
745 new_vars = gimple_bind_vars (stmt);
746 if (new_vars)
747 new_vars = remap_decls (new_vars, NULL, id);
749 new_bind = gimple_build_bind (new_vars, new_body, new_block);
751 return new_bind;
755 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
756 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
757 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
758 recursing into the children nodes of *TP. */
760 static tree
761 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
763 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
764 copy_body_data *id = (copy_body_data *) wi_p->info;
765 tree fn = id->src_fn;
767 if (TREE_CODE (*tp) == SSA_NAME)
769 *tp = remap_ssa_name (*tp, id);
770 *walk_subtrees = 0;
771 return NULL;
773 else if (auto_var_in_fn_p (*tp, fn))
775 /* Local variables and labels need to be replaced by equivalent
776 variables. We don't want to copy static variables; there's
777 only one of those, no matter how many times we inline the
778 containing function. Similarly for globals from an outer
779 function. */
780 tree new_decl;
782 /* Remap the declaration. */
783 new_decl = remap_decl (*tp, id);
784 gcc_assert (new_decl);
785 /* Replace this variable with the copy. */
786 STRIP_TYPE_NOPS (new_decl);
787 /* ??? The C++ frontend uses void * pointer zero to initialize
788 any other type. This confuses the middle-end type verification.
789 As cloned bodies do not go through gimplification again the fixup
790 there doesn't trigger. */
791 if (TREE_CODE (new_decl) == INTEGER_CST
792 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
793 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
794 *tp = new_decl;
795 *walk_subtrees = 0;
797 else if (TREE_CODE (*tp) == STATEMENT_LIST)
798 gcc_unreachable ();
799 else if (TREE_CODE (*tp) == SAVE_EXPR)
800 gcc_unreachable ();
801 else if (TREE_CODE (*tp) == LABEL_DECL
802 && (!DECL_CONTEXT (*tp)
803 || decl_function_context (*tp) == id->src_fn))
804 /* These may need to be remapped for EH handling. */
805 *tp = remap_decl (*tp, id);
806 else if (TREE_CODE (*tp) == FIELD_DECL)
808 /* If the enclosing record type is variably_modified_type_p, the field
809 has already been remapped. Otherwise, it need not be. */
810 tree *n = (tree *) pointer_map_contains (id->decl_map, *tp);
811 if (n)
812 *tp = *n;
813 *walk_subtrees = 0;
815 else if (TYPE_P (*tp))
816 /* Types may need remapping as well. */
817 *tp = remap_type (*tp, id);
818 else if (CONSTANT_CLASS_P (*tp))
820 /* If this is a constant, we have to copy the node iff the type
821 will be remapped. copy_tree_r will not copy a constant. */
822 tree new_type = remap_type (TREE_TYPE (*tp), id);
824 if (new_type == TREE_TYPE (*tp))
825 *walk_subtrees = 0;
827 else if (TREE_CODE (*tp) == INTEGER_CST)
828 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
829 TREE_INT_CST_HIGH (*tp));
830 else
832 *tp = copy_node (*tp);
833 TREE_TYPE (*tp) = new_type;
836 else
838 /* Otherwise, just copy the node. Note that copy_tree_r already
839 knows not to copy VAR_DECLs, etc., so this is safe. */
841 if (TREE_CODE (*tp) == MEM_REF)
843 tree ptr = TREE_OPERAND (*tp, 0);
844 tree type = remap_type (TREE_TYPE (*tp), id);
845 tree old = *tp;
847 /* We need to re-canonicalize MEM_REFs from inline substitutions
848 that can happen when a pointer argument is an ADDR_EXPR.
849 Recurse here manually to allow that. */
850 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
851 *tp = fold_build2 (MEM_REF, type,
852 ptr, TREE_OPERAND (*tp, 1));
853 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
854 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
855 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
856 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
857 *walk_subtrees = 0;
858 return NULL;
861 /* Here is the "usual case". Copy this tree node, and then
862 tweak some special cases. */
863 copy_tree_r (tp, walk_subtrees, NULL);
865 if (TREE_CODE (*tp) != OMP_CLAUSE)
866 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
868 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
870 /* The copied TARGET_EXPR has never been expanded, even if the
871 original node was expanded already. */
872 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
873 TREE_OPERAND (*tp, 3) = NULL_TREE;
875 else if (TREE_CODE (*tp) == ADDR_EXPR)
877 /* Variable substitution need not be simple. In particular,
878 the MEM_REF substitution above. Make sure that
879 TREE_CONSTANT and friends are up-to-date. */
880 int invariant = is_gimple_min_invariant (*tp);
881 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
882 recompute_tree_invariant_for_addr_expr (*tp);
884 /* If this used to be invariant, but is not any longer,
885 then regimplification is probably needed. */
886 if (invariant && !is_gimple_min_invariant (*tp))
887 id->regimplify = true;
889 *walk_subtrees = 0;
893 /* Update the TREE_BLOCK for the cloned expr. */
894 if (EXPR_P (*tp))
896 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
897 tree old_block = TREE_BLOCK (*tp);
898 if (old_block)
900 tree *n;
901 n = (tree *) pointer_map_contains (id->decl_map,
902 TREE_BLOCK (*tp));
903 if (n)
904 new_block = *n;
906 TREE_SET_BLOCK (*tp, new_block);
909 /* Keep iterating. */
910 return NULL_TREE;
914 /* Called from copy_body_id via walk_tree. DATA is really a
915 `copy_body_data *'. */
917 tree
918 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
920 copy_body_data *id = (copy_body_data *) data;
921 tree fn = id->src_fn;
922 tree new_block;
924 /* Begin by recognizing trees that we'll completely rewrite for the
925 inlining context. Our output for these trees is completely
926 different from out input (e.g. RETURN_EXPR is deleted, and morphs
927 into an edge). Further down, we'll handle trees that get
928 duplicated and/or tweaked. */
930 /* When requested, RETURN_EXPRs should be transformed to just the
931 contained MODIFY_EXPR. The branch semantics of the return will
932 be handled elsewhere by manipulating the CFG rather than a statement. */
933 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
935 tree assignment = TREE_OPERAND (*tp, 0);
937 /* If we're returning something, just turn that into an
938 assignment into the equivalent of the original RESULT_DECL.
939 If the "assignment" is just the result decl, the result
940 decl has already been set (e.g. a recent "foo (&result_decl,
941 ...)"); just toss the entire RETURN_EXPR. */
942 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
944 /* Replace the RETURN_EXPR with (a copy of) the
945 MODIFY_EXPR hanging underneath. */
946 *tp = copy_node (assignment);
948 else /* Else the RETURN_EXPR returns no value. */
950 *tp = NULL;
951 return (tree) (void *)1;
954 else if (TREE_CODE (*tp) == SSA_NAME)
956 *tp = remap_ssa_name (*tp, id);
957 *walk_subtrees = 0;
958 return NULL;
961 /* Local variables and labels need to be replaced by equivalent
962 variables. We don't want to copy static variables; there's only
963 one of those, no matter how many times we inline the containing
964 function. Similarly for globals from an outer function. */
965 else if (auto_var_in_fn_p (*tp, fn))
967 tree new_decl;
969 /* Remap the declaration. */
970 new_decl = remap_decl (*tp, id);
971 gcc_assert (new_decl);
972 /* Replace this variable with the copy. */
973 STRIP_TYPE_NOPS (new_decl);
974 *tp = new_decl;
975 *walk_subtrees = 0;
977 else if (TREE_CODE (*tp) == STATEMENT_LIST)
978 copy_statement_list (tp);
979 else if (TREE_CODE (*tp) == SAVE_EXPR
980 || TREE_CODE (*tp) == TARGET_EXPR)
981 remap_save_expr (tp, id->decl_map, walk_subtrees);
982 else if (TREE_CODE (*tp) == LABEL_DECL
983 && (! DECL_CONTEXT (*tp)
984 || decl_function_context (*tp) == id->src_fn))
985 /* These may need to be remapped for EH handling. */
986 *tp = remap_decl (*tp, id);
987 else if (TREE_CODE (*tp) == BIND_EXPR)
988 copy_bind_expr (tp, walk_subtrees, id);
989 /* Types may need remapping as well. */
990 else if (TYPE_P (*tp))
991 *tp = remap_type (*tp, id);
993 /* If this is a constant, we have to copy the node iff the type will be
994 remapped. copy_tree_r will not copy a constant. */
995 else if (CONSTANT_CLASS_P (*tp))
997 tree new_type = remap_type (TREE_TYPE (*tp), id);
999 if (new_type == TREE_TYPE (*tp))
1000 *walk_subtrees = 0;
1002 else if (TREE_CODE (*tp) == INTEGER_CST)
1003 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1004 TREE_INT_CST_HIGH (*tp));
1005 else
1007 *tp = copy_node (*tp);
1008 TREE_TYPE (*tp) = new_type;
1012 /* Otherwise, just copy the node. Note that copy_tree_r already
1013 knows not to copy VAR_DECLs, etc., so this is safe. */
1014 else
1016 /* Here we handle trees that are not completely rewritten.
1017 First we detect some inlining-induced bogosities for
1018 discarding. */
1019 if (TREE_CODE (*tp) == MODIFY_EXPR
1020 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1021 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1023 /* Some assignments VAR = VAR; don't generate any rtl code
1024 and thus don't count as variable modification. Avoid
1025 keeping bogosities like 0 = 0. */
1026 tree decl = TREE_OPERAND (*tp, 0), value;
1027 tree *n;
1029 n = (tree *) pointer_map_contains (id->decl_map, decl);
1030 if (n)
1032 value = *n;
1033 STRIP_TYPE_NOPS (value);
1034 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1036 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1037 return copy_tree_body_r (tp, walk_subtrees, data);
1041 else if (TREE_CODE (*tp) == INDIRECT_REF)
1043 /* Get rid of *& from inline substitutions that can happen when a
1044 pointer argument is an ADDR_EXPR. */
1045 tree decl = TREE_OPERAND (*tp, 0);
1046 tree *n;
1048 n = (tree *) pointer_map_contains (id->decl_map, decl);
1049 if (n)
1051 tree new_tree;
1052 tree old;
1053 /* If we happen to get an ADDR_EXPR in n->value, strip
1054 it manually here as we'll eventually get ADDR_EXPRs
1055 which lie about their types pointed to. In this case
1056 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1057 but we absolutely rely on that. As fold_indirect_ref
1058 does other useful transformations, try that first, though. */
1059 tree type = TREE_TYPE (TREE_TYPE (*n));
1060 if (id->do_not_unshare)
1061 new_tree = *n;
1062 else
1063 new_tree = unshare_expr (*n);
1064 old = *tp;
1065 *tp = gimple_fold_indirect_ref (new_tree);
1066 if (! *tp)
1068 if (TREE_CODE (new_tree) == ADDR_EXPR)
1070 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1071 type, new_tree);
1072 /* ??? We should either assert here or build
1073 a VIEW_CONVERT_EXPR instead of blindly leaking
1074 incompatible types to our IL. */
1075 if (! *tp)
1076 *tp = TREE_OPERAND (new_tree, 0);
1078 else
1080 *tp = build1 (INDIRECT_REF, type, new_tree);
1081 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1082 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1083 TREE_READONLY (*tp) = TREE_READONLY (old);
1084 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
1087 *walk_subtrees = 0;
1088 return NULL;
1091 else if (TREE_CODE (*tp) == MEM_REF)
1093 tree ptr = TREE_OPERAND (*tp, 0);
1094 tree type = remap_type (TREE_TYPE (*tp), id);
1095 tree old = *tp;
1097 /* We need to re-canonicalize MEM_REFs from inline substitutions
1098 that can happen when a pointer argument is an ADDR_EXPR.
1099 Recurse here manually to allow that. */
1100 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1101 *tp = fold_build2 (MEM_REF, type,
1102 ptr, TREE_OPERAND (*tp, 1));
1103 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
1104 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1105 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1106 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1107 *walk_subtrees = 0;
1108 return NULL;
1111 /* Here is the "usual case". Copy this tree node, and then
1112 tweak some special cases. */
1113 copy_tree_r (tp, walk_subtrees, NULL);
1115 /* If EXPR has block defined, map it to newly constructed block.
1116 When inlining we want EXPRs without block appear in the block
1117 of function call if we are not remapping a type. */
1118 if (EXPR_P (*tp))
1120 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1121 if (TREE_BLOCK (*tp))
1123 tree *n;
1124 n = (tree *) pointer_map_contains (id->decl_map,
1125 TREE_BLOCK (*tp));
1126 if (n)
1127 new_block = *n;
1129 TREE_SET_BLOCK (*tp, new_block);
1132 if (TREE_CODE (*tp) != OMP_CLAUSE)
1133 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1135 /* The copied TARGET_EXPR has never been expanded, even if the
1136 original node was expanded already. */
1137 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1139 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1140 TREE_OPERAND (*tp, 3) = NULL_TREE;
1143 /* Variable substitution need not be simple. In particular, the
1144 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1145 and friends are up-to-date. */
1146 else if (TREE_CODE (*tp) == ADDR_EXPR)
1148 int invariant = is_gimple_min_invariant (*tp);
1149 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1151 /* Handle the case where we substituted an INDIRECT_REF
1152 into the operand of the ADDR_EXPR. */
1153 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1154 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1155 else
1156 recompute_tree_invariant_for_addr_expr (*tp);
1158 /* If this used to be invariant, but is not any longer,
1159 then regimplification is probably needed. */
1160 if (invariant && !is_gimple_min_invariant (*tp))
1161 id->regimplify = true;
1163 *walk_subtrees = 0;
1167 /* Keep iterating. */
1168 return NULL_TREE;
1171 /* Helper for remap_gimple_stmt. Given an EH region number for the
1172 source function, map that to the duplicate EH region number in
1173 the destination function. */
1175 static int
1176 remap_eh_region_nr (int old_nr, copy_body_data *id)
1178 eh_region old_r, new_r;
1179 void **slot;
1181 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1182 slot = pointer_map_contains (id->eh_map, old_r);
1183 new_r = (eh_region) *slot;
1185 return new_r->index;
1188 /* Similar, but operate on INTEGER_CSTs. */
1190 static tree
1191 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1193 int old_nr, new_nr;
1195 old_nr = tree_low_cst (old_t_nr, 0);
1196 new_nr = remap_eh_region_nr (old_nr, id);
1198 return build_int_cst (integer_type_node, new_nr);
1201 /* Helper for copy_bb. Remap statement STMT using the inlining
1202 information in ID. Return the new statement copy. */
1204 static gimple
1205 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1207 gimple copy = NULL;
1208 struct walk_stmt_info wi;
1209 bool skip_first = false;
1211 /* Begin by recognizing trees that we'll completely rewrite for the
1212 inlining context. Our output for these trees is completely
1213 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1214 into an edge). Further down, we'll handle trees that get
1215 duplicated and/or tweaked. */
1217 /* When requested, GIMPLE_RETURNs should be transformed to just the
1218 contained GIMPLE_ASSIGN. The branch semantics of the return will
1219 be handled elsewhere by manipulating the CFG rather than the
1220 statement. */
1221 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1223 tree retval = gimple_return_retval (stmt);
1225 /* If we're returning something, just turn that into an
1226 assignment into the equivalent of the original RESULT_DECL.
1227 If RETVAL is just the result decl, the result decl has
1228 already been set (e.g. a recent "foo (&result_decl, ...)");
1229 just toss the entire GIMPLE_RETURN. */
1230 if (retval
1231 && (TREE_CODE (retval) != RESULT_DECL
1232 && (TREE_CODE (retval) != SSA_NAME
1233 || ! SSA_NAME_VAR (retval)
1234 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1236 copy = gimple_build_assign (id->retvar, retval);
1237 /* id->retvar is already substituted. Skip it on later remapping. */
1238 skip_first = true;
1240 else
1241 return gimple_build_nop ();
1243 else if (gimple_has_substatements (stmt))
1245 gimple_seq s1, s2;
1247 /* When cloning bodies from the C++ front end, we will be handed bodies
1248 in High GIMPLE form. Handle here all the High GIMPLE statements that
1249 have embedded statements. */
1250 switch (gimple_code (stmt))
1252 case GIMPLE_BIND:
1253 copy = copy_gimple_bind (stmt, id);
1254 break;
1256 case GIMPLE_CATCH:
1257 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1258 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1259 break;
1261 case GIMPLE_EH_FILTER:
1262 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1263 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1264 break;
1266 case GIMPLE_TRY:
1267 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1268 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1269 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1270 break;
1272 case GIMPLE_WITH_CLEANUP_EXPR:
1273 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1274 copy = gimple_build_wce (s1);
1275 break;
1277 case GIMPLE_OMP_PARALLEL:
1278 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1279 copy = gimple_build_omp_parallel
1280 (s1,
1281 gimple_omp_parallel_clauses (stmt),
1282 gimple_omp_parallel_child_fn (stmt),
1283 gimple_omp_parallel_data_arg (stmt));
1284 break;
1286 case GIMPLE_OMP_TASK:
1287 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1288 copy = gimple_build_omp_task
1289 (s1,
1290 gimple_omp_task_clauses (stmt),
1291 gimple_omp_task_child_fn (stmt),
1292 gimple_omp_task_data_arg (stmt),
1293 gimple_omp_task_copy_fn (stmt),
1294 gimple_omp_task_arg_size (stmt),
1295 gimple_omp_task_arg_align (stmt));
1296 break;
1298 case GIMPLE_OMP_FOR:
1299 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1300 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1301 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1302 gimple_omp_for_collapse (stmt), s2);
1304 size_t i;
1305 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1307 gimple_omp_for_set_index (copy, i,
1308 gimple_omp_for_index (stmt, i));
1309 gimple_omp_for_set_initial (copy, i,
1310 gimple_omp_for_initial (stmt, i));
1311 gimple_omp_for_set_final (copy, i,
1312 gimple_omp_for_final (stmt, i));
1313 gimple_omp_for_set_incr (copy, i,
1314 gimple_omp_for_incr (stmt, i));
1315 gimple_omp_for_set_cond (copy, i,
1316 gimple_omp_for_cond (stmt, i));
1319 break;
1321 case GIMPLE_OMP_MASTER:
1322 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1323 copy = gimple_build_omp_master (s1);
1324 break;
1326 case GIMPLE_OMP_ORDERED:
1327 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1328 copy = gimple_build_omp_ordered (s1);
1329 break;
1331 case GIMPLE_OMP_SECTION:
1332 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1333 copy = gimple_build_omp_section (s1);
1334 break;
1336 case GIMPLE_OMP_SECTIONS:
1337 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1338 copy = gimple_build_omp_sections
1339 (s1, gimple_omp_sections_clauses (stmt));
1340 break;
1342 case GIMPLE_OMP_SINGLE:
1343 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1344 copy = gimple_build_omp_single
1345 (s1, gimple_omp_single_clauses (stmt));
1346 break;
1348 case GIMPLE_OMP_CRITICAL:
1349 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1350 copy
1351 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1352 break;
1354 case GIMPLE_TRANSACTION:
1355 s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1356 copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1357 gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1358 break;
1360 default:
1361 gcc_unreachable ();
1364 else
1366 if (gimple_assign_copy_p (stmt)
1367 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1368 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1370 /* Here we handle statements that are not completely rewritten.
1371 First we detect some inlining-induced bogosities for
1372 discarding. */
1374 /* Some assignments VAR = VAR; don't generate any rtl code
1375 and thus don't count as variable modification. Avoid
1376 keeping bogosities like 0 = 0. */
1377 tree decl = gimple_assign_lhs (stmt), value;
1378 tree *n;
1380 n = (tree *) pointer_map_contains (id->decl_map, decl);
1381 if (n)
1383 value = *n;
1384 STRIP_TYPE_NOPS (value);
1385 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1386 return gimple_build_nop ();
1390 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1391 in a block that we aren't copying during tree_function_versioning,
1392 just drop the clobber stmt. */
1393 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1395 tree lhs = gimple_assign_lhs (stmt);
1396 if (TREE_CODE (lhs) == MEM_REF
1397 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1399 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1400 if (gimple_bb (def_stmt)
1401 && !bitmap_bit_p (id->blocks_to_copy,
1402 gimple_bb (def_stmt)->index))
1403 return gimple_build_nop ();
1407 if (gimple_debug_bind_p (stmt))
1409 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1410 gimple_debug_bind_get_value (stmt),
1411 stmt);
1412 id->debug_stmts.safe_push (copy);
1413 return copy;
1415 if (gimple_debug_source_bind_p (stmt))
1417 copy = gimple_build_debug_source_bind
1418 (gimple_debug_source_bind_get_var (stmt),
1419 gimple_debug_source_bind_get_value (stmt), stmt);
1420 id->debug_stmts.safe_push (copy);
1421 return copy;
1424 /* Create a new deep copy of the statement. */
1425 copy = gimple_copy (stmt);
1427 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1428 RESX and EH_DISPATCH. */
1429 if (id->eh_map)
1430 switch (gimple_code (copy))
1432 case GIMPLE_CALL:
1434 tree r, fndecl = gimple_call_fndecl (copy);
1435 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1436 switch (DECL_FUNCTION_CODE (fndecl))
1438 case BUILT_IN_EH_COPY_VALUES:
1439 r = gimple_call_arg (copy, 1);
1440 r = remap_eh_region_tree_nr (r, id);
1441 gimple_call_set_arg (copy, 1, r);
1442 /* FALLTHRU */
1444 case BUILT_IN_EH_POINTER:
1445 case BUILT_IN_EH_FILTER:
1446 r = gimple_call_arg (copy, 0);
1447 r = remap_eh_region_tree_nr (r, id);
1448 gimple_call_set_arg (copy, 0, r);
1449 break;
1451 default:
1452 break;
1455 /* Reset alias info if we didn't apply measures to
1456 keep it valid over inlining by setting DECL_PT_UID. */
1457 if (!id->src_cfun->gimple_df
1458 || !id->src_cfun->gimple_df->ipa_pta)
1459 gimple_call_reset_alias_info (copy);
1461 break;
1463 case GIMPLE_RESX:
1465 int r = gimple_resx_region (copy);
1466 r = remap_eh_region_nr (r, id);
1467 gimple_resx_set_region (copy, r);
1469 break;
1471 case GIMPLE_EH_DISPATCH:
1473 int r = gimple_eh_dispatch_region (copy);
1474 r = remap_eh_region_nr (r, id);
1475 gimple_eh_dispatch_set_region (copy, r);
1477 break;
1479 default:
1480 break;
1484 /* If STMT has a block defined, map it to the newly constructed
1485 block. */
1486 if (gimple_block (copy))
1488 tree *n;
1489 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1490 gcc_assert (n);
1491 gimple_set_block (copy, *n);
1494 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1495 return copy;
1497 /* Remap all the operands in COPY. */
1498 memset (&wi, 0, sizeof (wi));
1499 wi.info = id;
1500 if (skip_first)
1501 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1502 else
1503 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1505 /* Clear the copied virtual operands. We are not remapping them here
1506 but are going to recreate them from scratch. */
1507 if (gimple_has_mem_ops (copy))
1509 gimple_set_vdef (copy, NULL_TREE);
1510 gimple_set_vuse (copy, NULL_TREE);
1513 return copy;
1517 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1518 later */
1520 static basic_block
1521 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1522 gcov_type count_scale)
1524 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1525 basic_block copy_basic_block;
1526 tree decl;
1527 gcov_type freq;
1528 basic_block prev;
1530 /* Search for previous copied basic block. */
1531 prev = bb->prev_bb;
1532 while (!prev->aux)
1533 prev = prev->prev_bb;
1535 /* create_basic_block() will append every new block to
1536 basic_block_info automatically. */
1537 copy_basic_block = create_basic_block (NULL, (void *) 0,
1538 (basic_block) prev->aux);
1539 copy_basic_block->count = apply_scale (bb->count, count_scale);
1541 /* We are going to rebuild frequencies from scratch. These values
1542 have just small importance to drive canonicalize_loop_headers. */
1543 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1545 /* We recompute frequencies after inlining, so this is quite safe. */
1546 if (freq > BB_FREQ_MAX)
1547 freq = BB_FREQ_MAX;
1548 copy_basic_block->frequency = freq;
1550 copy_gsi = gsi_start_bb (copy_basic_block);
1552 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1554 gimple stmt = gsi_stmt (gsi);
1555 gimple orig_stmt = stmt;
1557 id->regimplify = false;
1558 stmt = remap_gimple_stmt (stmt, id);
1559 if (gimple_nop_p (stmt))
1560 continue;
1562 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1563 seq_gsi = copy_gsi;
1565 /* With return slot optimization we can end up with
1566 non-gimple (foo *)&this->m, fix that here. */
1567 if (is_gimple_assign (stmt)
1568 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1569 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1571 tree new_rhs;
1572 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1573 gimple_assign_rhs1 (stmt),
1574 true, NULL, false,
1575 GSI_CONTINUE_LINKING);
1576 gimple_assign_set_rhs1 (stmt, new_rhs);
1577 id->regimplify = false;
1580 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1582 if (id->regimplify)
1583 gimple_regimplify_operands (stmt, &seq_gsi);
1585 /* If copy_basic_block has been empty at the start of this iteration,
1586 call gsi_start_bb again to get at the newly added statements. */
1587 if (gsi_end_p (copy_gsi))
1588 copy_gsi = gsi_start_bb (copy_basic_block);
1589 else
1590 gsi_next (&copy_gsi);
1592 /* Process the new statement. The call to gimple_regimplify_operands
1593 possibly turned the statement into multiple statements, we
1594 need to process all of them. */
1597 tree fn;
1599 stmt = gsi_stmt (copy_gsi);
1600 if (is_gimple_call (stmt)
1601 && gimple_call_va_arg_pack_p (stmt)
1602 && id->gimple_call)
1604 /* __builtin_va_arg_pack () should be replaced by
1605 all arguments corresponding to ... in the caller. */
1606 tree p;
1607 gimple new_call;
1608 vec<tree> argarray;
1609 size_t nargs = gimple_call_num_args (id->gimple_call);
1610 size_t n;
1612 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1613 nargs--;
1615 /* Create the new array of arguments. */
1616 n = nargs + gimple_call_num_args (stmt);
1617 argarray.create (n);
1618 argarray.safe_grow_cleared (n);
1620 /* Copy all the arguments before '...' */
1621 memcpy (argarray.address (),
1622 gimple_call_arg_ptr (stmt, 0),
1623 gimple_call_num_args (stmt) * sizeof (tree));
1625 /* Append the arguments passed in '...' */
1626 memcpy (argarray.address () + gimple_call_num_args (stmt),
1627 gimple_call_arg_ptr (id->gimple_call, 0)
1628 + (gimple_call_num_args (id->gimple_call) - nargs),
1629 nargs * sizeof (tree));
1631 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1632 argarray);
1634 argarray.release ();
1636 /* Copy all GIMPLE_CALL flags, location and block, except
1637 GF_CALL_VA_ARG_PACK. */
1638 gimple_call_copy_flags (new_call, stmt);
1639 gimple_call_set_va_arg_pack (new_call, false);
1640 gimple_set_location (new_call, gimple_location (stmt));
1641 gimple_set_block (new_call, gimple_block (stmt));
1642 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1644 gsi_replace (&copy_gsi, new_call, false);
1645 stmt = new_call;
1647 else if (is_gimple_call (stmt)
1648 && id->gimple_call
1649 && (decl = gimple_call_fndecl (stmt))
1650 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1651 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1653 /* __builtin_va_arg_pack_len () should be replaced by
1654 the number of anonymous arguments. */
1655 size_t nargs = gimple_call_num_args (id->gimple_call);
1656 tree count, p;
1657 gimple new_stmt;
1659 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1660 nargs--;
1662 count = build_int_cst (integer_type_node, nargs);
1663 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1664 gsi_replace (&copy_gsi, new_stmt, false);
1665 stmt = new_stmt;
1668 /* Statements produced by inlining can be unfolded, especially
1669 when we constant propagated some operands. We can't fold
1670 them right now for two reasons:
1671 1) folding require SSA_NAME_DEF_STMTs to be correct
1672 2) we can't change function calls to builtins.
1673 So we just mark statement for later folding. We mark
1674 all new statements, instead just statements that has changed
1675 by some nontrivial substitution so even statements made
1676 foldable indirectly are updated. If this turns out to be
1677 expensive, copy_body can be told to watch for nontrivial
1678 changes. */
1679 if (id->statements_to_fold)
1680 pointer_set_insert (id->statements_to_fold, stmt);
1682 /* We're duplicating a CALL_EXPR. Find any corresponding
1683 callgraph edges and update or duplicate them. */
1684 if (is_gimple_call (stmt))
1686 struct cgraph_edge *edge;
1687 int flags;
1689 switch (id->transform_call_graph_edges)
1691 case CB_CGE_DUPLICATE:
1692 edge = cgraph_edge (id->src_node, orig_stmt);
1693 if (edge)
1695 int edge_freq = edge->frequency;
1696 int new_freq;
1697 struct cgraph_edge *old_edge = edge;
1698 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1699 gimple_uid (stmt),
1700 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1701 true);
1702 /* We could also just rescale the frequency, but
1703 doing so would introduce roundoff errors and make
1704 verifier unhappy. */
1705 new_freq = compute_call_stmt_bb_frequency (id->dst_node->symbol.decl,
1706 copy_basic_block);
1708 /* Speculative calls consist of two edges - direct and indirect.
1709 Duplicate the whole thing and distribute frequencies accordingly. */
1710 if (edge->speculative)
1712 struct cgraph_edge *direct, *indirect;
1713 struct ipa_ref *ref;
1715 gcc_assert (!edge->indirect_unknown_callee);
1716 cgraph_speculative_call_info (old_edge, direct, indirect, ref);
1717 indirect = cgraph_clone_edge (indirect, id->dst_node, stmt,
1718 gimple_uid (stmt),
1719 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1720 true);
1721 if (old_edge->frequency + indirect->frequency)
1723 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
1724 (old_edge->frequency + indirect->frequency)),
1725 CGRAPH_FREQ_MAX);
1726 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
1727 (old_edge->frequency + indirect->frequency)),
1728 CGRAPH_FREQ_MAX);
1730 ipa_clone_ref (ref, (symtab_node)id->dst_node, stmt);
1732 else
1734 edge->frequency = new_freq;
1735 if (dump_file
1736 && profile_status_for_function (cfun) != PROFILE_ABSENT
1737 && (edge_freq > edge->frequency + 10
1738 || edge_freq < edge->frequency - 10))
1740 fprintf (dump_file, "Edge frequency estimated by "
1741 "cgraph %i diverge from inliner's estimate %i\n",
1742 edge_freq,
1743 edge->frequency);
1744 fprintf (dump_file,
1745 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1746 bb->index,
1747 bb->frequency,
1748 copy_basic_block->frequency);
1752 break;
1754 case CB_CGE_MOVE_CLONES:
1755 cgraph_set_call_stmt_including_clones (id->dst_node,
1756 orig_stmt, stmt);
1757 edge = cgraph_edge (id->dst_node, stmt);
1758 break;
1760 case CB_CGE_MOVE:
1761 edge = cgraph_edge (id->dst_node, orig_stmt);
1762 if (edge)
1763 cgraph_set_call_stmt (edge, stmt);
1764 break;
1766 default:
1767 gcc_unreachable ();
1770 /* Constant propagation on argument done during inlining
1771 may create new direct call. Produce an edge for it. */
1772 if ((!edge
1773 || (edge->indirect_inlining_edge
1774 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1775 && id->dst_node->symbol.definition
1776 && (fn = gimple_call_fndecl (stmt)) != NULL)
1778 struct cgraph_node *dest = cgraph_get_node (fn);
1780 /* We have missing edge in the callgraph. This can happen
1781 when previous inlining turned an indirect call into a
1782 direct call by constant propagating arguments or we are
1783 producing dead clone (for further cloning). In all
1784 other cases we hit a bug (incorrect node sharing is the
1785 most common reason for missing edges). */
1786 gcc_assert (!dest->symbol.definition
1787 || dest->symbol.address_taken
1788 || !id->src_node->symbol.definition
1789 || !id->dst_node->symbol.definition);
1790 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1791 cgraph_create_edge_including_clones
1792 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1793 compute_call_stmt_bb_frequency (id->dst_node->symbol.decl,
1794 copy_basic_block),
1795 CIF_ORIGINALLY_INDIRECT_CALL);
1796 else
1797 cgraph_create_edge (id->dst_node, dest, stmt,
1798 bb->count,
1799 compute_call_stmt_bb_frequency
1800 (id->dst_node->symbol.decl,
1801 copy_basic_block))->inline_failed
1802 = CIF_ORIGINALLY_INDIRECT_CALL;
1803 if (dump_file)
1805 fprintf (dump_file, "Created new direct edge to %s\n",
1806 cgraph_node_name (dest));
1810 flags = gimple_call_flags (stmt);
1811 if (flags & ECF_MAY_BE_ALLOCA)
1812 cfun->calls_alloca = true;
1813 if (flags & ECF_RETURNS_TWICE)
1814 cfun->calls_setjmp = true;
1817 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1818 id->eh_map, id->eh_lp_nr);
1820 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1822 ssa_op_iter i;
1823 tree def;
1825 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1826 if (TREE_CODE (def) == SSA_NAME)
1827 SSA_NAME_DEF_STMT (def) = stmt;
1830 gsi_next (&copy_gsi);
1832 while (!gsi_end_p (copy_gsi));
1834 copy_gsi = gsi_last_bb (copy_basic_block);
1837 return copy_basic_block;
1840 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1841 form is quite easy, since dominator relationship for old basic blocks does
1842 not change.
1844 There is however exception where inlining might change dominator relation
1845 across EH edges from basic block within inlined functions destinating
1846 to landing pads in function we inline into.
1848 The function fills in PHI_RESULTs of such PHI nodes if they refer
1849 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1850 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1851 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1852 set, and this means that there will be no overlapping live ranges
1853 for the underlying symbol.
1855 This might change in future if we allow redirecting of EH edges and
1856 we might want to change way build CFG pre-inlining to include
1857 all the possible edges then. */
1858 static void
1859 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1860 bool can_throw, bool nonlocal_goto)
1862 edge e;
1863 edge_iterator ei;
1865 FOR_EACH_EDGE (e, ei, bb->succs)
1866 if (!e->dest->aux
1867 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1869 gimple phi;
1870 gimple_stmt_iterator si;
1872 if (!nonlocal_goto)
1873 gcc_assert (e->flags & EDGE_EH);
1875 if (!can_throw)
1876 gcc_assert (!(e->flags & EDGE_EH));
1878 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1880 edge re;
1882 phi = gsi_stmt (si);
1884 /* For abnormal goto/call edges the receiver can be the
1885 ENTRY_BLOCK. Do not assert this cannot happen. */
1887 gcc_assert ((e->flags & EDGE_EH)
1888 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1890 re = find_edge (ret_bb, e->dest);
1891 gcc_checking_assert (re);
1892 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1893 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1895 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1896 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1902 /* Copy edges from BB into its copy constructed earlier, scale profile
1903 accordingly. Edges will be taken care of later. Assume aux
1904 pointers to point to the copies of each BB. Return true if any
1905 debug stmts are left after a statement that must end the basic block. */
1907 static bool
1908 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
1909 bool can_make_abnormal_goto)
1911 basic_block new_bb = (basic_block) bb->aux;
1912 edge_iterator ei;
1913 edge old_edge;
1914 gimple_stmt_iterator si;
1915 int flags;
1916 bool need_debug_cleanup = false;
1918 /* Use the indices from the original blocks to create edges for the
1919 new ones. */
1920 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1921 if (!(old_edge->flags & EDGE_EH))
1923 edge new_edge;
1925 flags = old_edge->flags;
1927 /* Return edges do get a FALLTHRU flag when the get inlined. */
1928 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1929 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1930 flags |= EDGE_FALLTHRU;
1931 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1932 new_edge->count = apply_scale (old_edge->count, count_scale);
1933 new_edge->probability = old_edge->probability;
1936 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1937 return false;
1939 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1941 gimple copy_stmt;
1942 bool can_throw, nonlocal_goto;
1944 copy_stmt = gsi_stmt (si);
1945 if (!is_gimple_debug (copy_stmt))
1946 update_stmt (copy_stmt);
1948 /* Do this before the possible split_block. */
1949 gsi_next (&si);
1951 /* If this tree could throw an exception, there are two
1952 cases where we need to add abnormal edge(s): the
1953 tree wasn't in a region and there is a "current
1954 region" in the caller; or the original tree had
1955 EH edges. In both cases split the block after the tree,
1956 and add abnormal edge(s) as needed; we need both
1957 those from the callee and the caller.
1958 We check whether the copy can throw, because the const
1959 propagation can change an INDIRECT_REF which throws
1960 into a COMPONENT_REF which doesn't. If the copy
1961 can throw, the original could also throw. */
1962 can_throw = stmt_can_throw_internal (copy_stmt);
1963 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1965 if (can_throw || nonlocal_goto)
1967 if (!gsi_end_p (si))
1969 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1970 gsi_next (&si);
1971 if (gsi_end_p (si))
1972 need_debug_cleanup = true;
1974 if (!gsi_end_p (si))
1975 /* Note that bb's predecessor edges aren't necessarily
1976 right at this point; split_block doesn't care. */
1978 edge e = split_block (new_bb, copy_stmt);
1980 new_bb = e->dest;
1981 new_bb->aux = e->src->aux;
1982 si = gsi_start_bb (new_bb);
1986 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1987 make_eh_dispatch_edges (copy_stmt);
1988 else if (can_throw)
1989 make_eh_edges (copy_stmt);
1991 /* If the call we inline cannot make abnormal goto do not add
1992 additional abnormal edges but only retain those already present
1993 in the original function body. */
1994 nonlocal_goto &= can_make_abnormal_goto;
1995 if (nonlocal_goto)
1996 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1998 if ((can_throw || nonlocal_goto)
1999 && gimple_in_ssa_p (cfun))
2000 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2001 can_throw, nonlocal_goto);
2003 return need_debug_cleanup;
2006 /* Copy the PHIs. All blocks and edges are copied, some blocks
2007 was possibly split and new outgoing EH edges inserted.
2008 BB points to the block of original function and AUX pointers links
2009 the original and newly copied blocks. */
2011 static void
2012 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2014 basic_block const new_bb = (basic_block) bb->aux;
2015 edge_iterator ei;
2016 gimple phi;
2017 gimple_stmt_iterator si;
2018 edge new_edge;
2019 bool inserted = false;
2021 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2023 tree res, new_res;
2024 gimple new_phi;
2026 phi = gsi_stmt (si);
2027 res = PHI_RESULT (phi);
2028 new_res = res;
2029 if (!virtual_operand_p (res))
2031 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2032 new_phi = create_phi_node (new_res, new_bb);
2033 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2035 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2036 tree arg;
2037 tree new_arg;
2038 edge_iterator ei2;
2039 location_t locus;
2041 /* When doing partial cloning, we allow PHIs on the entry block
2042 as long as all the arguments are the same. Find any input
2043 edge to see argument to copy. */
2044 if (!old_edge)
2045 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2046 if (!old_edge->src->aux)
2047 break;
2049 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2050 new_arg = arg;
2051 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2052 gcc_assert (new_arg);
2053 /* With return slot optimization we can end up with
2054 non-gimple (foo *)&this->m, fix that here. */
2055 if (TREE_CODE (new_arg) != SSA_NAME
2056 && TREE_CODE (new_arg) != FUNCTION_DECL
2057 && !is_gimple_val (new_arg))
2059 gimple_seq stmts = NULL;
2060 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2061 gsi_insert_seq_on_edge (new_edge, stmts);
2062 inserted = true;
2064 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2065 if (LOCATION_BLOCK (locus))
2067 tree *n;
2068 n = (tree *) pointer_map_contains (id->decl_map,
2069 LOCATION_BLOCK (locus));
2070 gcc_assert (n);
2071 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2073 else
2074 locus = LOCATION_LOCUS (locus);
2076 add_phi_arg (new_phi, new_arg, new_edge, locus);
2081 /* Commit the delayed edge insertions. */
2082 if (inserted)
2083 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2084 gsi_commit_one_edge_insert (new_edge, NULL);
2088 /* Wrapper for remap_decl so it can be used as a callback. */
2090 static tree
2091 remap_decl_1 (tree decl, void *data)
2093 return remap_decl (decl, (copy_body_data *) data);
2096 /* Build struct function and associated datastructures for the new clone
2097 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2098 the cfun to the function of new_fndecl (and current_function_decl too). */
2100 static void
2101 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2103 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2104 gcov_type count_scale;
2106 if (!DECL_ARGUMENTS (new_fndecl))
2107 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2108 if (!DECL_RESULT (new_fndecl))
2109 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2111 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2112 count_scale
2113 = GCOV_COMPUTE_SCALE (count,
2114 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2115 else
2116 count_scale = REG_BR_PROB_BASE;
2118 /* Register specific tree functions. */
2119 gimple_register_cfg_hooks ();
2121 /* Get clean struct function. */
2122 push_struct_function (new_fndecl);
2124 /* We will rebuild these, so just sanity check that they are empty. */
2125 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2126 gcc_assert (cfun->local_decls == NULL);
2127 gcc_assert (cfun->cfg == NULL);
2128 gcc_assert (cfun->decl == new_fndecl);
2130 /* Copy items we preserve during cloning. */
2131 cfun->static_chain_decl = src_cfun->static_chain_decl;
2132 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2133 cfun->function_end_locus = src_cfun->function_end_locus;
2134 cfun->curr_properties = src_cfun->curr_properties;
2135 cfun->last_verified = src_cfun->last_verified;
2136 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2137 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2138 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2139 cfun->stdarg = src_cfun->stdarg;
2140 cfun->after_inlining = src_cfun->after_inlining;
2141 cfun->can_throw_non_call_exceptions
2142 = src_cfun->can_throw_non_call_exceptions;
2143 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2144 cfun->returns_struct = src_cfun->returns_struct;
2145 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2147 init_empty_tree_cfg ();
2149 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2150 ENTRY_BLOCK_PTR->count =
2151 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2152 REG_BR_PROB_BASE);
2153 ENTRY_BLOCK_PTR->frequency
2154 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2155 EXIT_BLOCK_PTR->count =
2156 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2157 REG_BR_PROB_BASE);
2158 EXIT_BLOCK_PTR->frequency =
2159 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2160 if (src_cfun->eh)
2161 init_eh_for_function ();
2163 if (src_cfun->gimple_df)
2165 init_tree_ssa (cfun);
2166 cfun->gimple_df->in_ssa_p = true;
2167 init_ssa_operands (cfun);
2171 /* Helper function for copy_cfg_body. Move debug stmts from the end
2172 of NEW_BB to the beginning of successor basic blocks when needed. If the
2173 successor has multiple predecessors, reset them, otherwise keep
2174 their value. */
2176 static void
2177 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2179 edge e;
2180 edge_iterator ei;
2181 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2183 if (gsi_end_p (si)
2184 || gsi_one_before_end_p (si)
2185 || !(stmt_can_throw_internal (gsi_stmt (si))
2186 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2187 return;
2189 FOR_EACH_EDGE (e, ei, new_bb->succs)
2191 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2192 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2193 while (is_gimple_debug (gsi_stmt (ssi)))
2195 gimple stmt = gsi_stmt (ssi), new_stmt;
2196 tree var;
2197 tree value;
2199 /* For the last edge move the debug stmts instead of copying
2200 them. */
2201 if (ei_one_before_end_p (ei))
2203 si = ssi;
2204 gsi_prev (&ssi);
2205 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2206 gimple_debug_bind_reset_value (stmt);
2207 gsi_remove (&si, false);
2208 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2209 continue;
2212 if (gimple_debug_bind_p (stmt))
2214 var = gimple_debug_bind_get_var (stmt);
2215 if (single_pred_p (e->dest))
2217 value = gimple_debug_bind_get_value (stmt);
2218 value = unshare_expr (value);
2220 else
2221 value = NULL_TREE;
2222 new_stmt = gimple_build_debug_bind (var, value, stmt);
2224 else if (gimple_debug_source_bind_p (stmt))
2226 var = gimple_debug_source_bind_get_var (stmt);
2227 value = gimple_debug_source_bind_get_value (stmt);
2228 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2230 else
2231 gcc_unreachable ();
2232 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2233 id->debug_stmts.safe_push (new_stmt);
2234 gsi_prev (&ssi);
2239 /* Make a copy of the sub-loops of SRC_PARENT and place them
2240 as siblings of DEST_PARENT. */
2242 static void
2243 copy_loops (bitmap blocks_to_copy,
2244 struct loop *dest_parent, struct loop *src_parent)
2246 struct loop *src_loop = src_parent->inner;
2247 while (src_loop)
2249 if (!blocks_to_copy
2250 || bitmap_bit_p (blocks_to_copy, src_loop->header->index))
2252 struct loop *dest_loop = alloc_loop ();
2254 /* Assign the new loop its header and latch and associate
2255 those with the new loop. */
2256 if (src_loop->header != NULL)
2258 dest_loop->header = (basic_block)src_loop->header->aux;
2259 dest_loop->header->loop_father = dest_loop;
2261 if (src_loop->latch != NULL)
2263 dest_loop->latch = (basic_block)src_loop->latch->aux;
2264 dest_loop->latch->loop_father = dest_loop;
2267 /* Copy loop meta-data. */
2268 copy_loop_info (src_loop, dest_loop);
2270 /* Finally place it into the loop array and the loop tree. */
2271 place_new_loop (cfun, dest_loop);
2272 flow_loop_tree_node_add (dest_parent, dest_loop);
2274 /* Recurse. */
2275 copy_loops (blocks_to_copy, dest_loop, src_loop);
2277 src_loop = src_loop->next;
2281 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2283 void
2284 redirect_all_calls (copy_body_data * id, basic_block bb)
2286 gimple_stmt_iterator si;
2287 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2289 if (is_gimple_call (gsi_stmt (si)))
2291 struct cgraph_edge *edge = cgraph_edge (id->dst_node, gsi_stmt (si));
2292 if (edge)
2293 cgraph_redirect_edge_call_stmt_to_callee (edge);
2298 /* Make a copy of the body of FN so that it can be inserted inline in
2299 another function. Walks FN via CFG, returns new fndecl. */
2301 static tree
2302 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2303 basic_block entry_block_map, basic_block exit_block_map,
2304 bitmap blocks_to_copy, basic_block new_entry)
2306 tree callee_fndecl = id->src_fn;
2307 /* Original cfun for the callee, doesn't change. */
2308 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2309 struct function *cfun_to_copy;
2310 basic_block bb;
2311 tree new_fndecl = NULL;
2312 bool need_debug_cleanup = false;
2313 gcov_type count_scale;
2314 int last;
2315 int incoming_frequency = 0;
2316 gcov_type incoming_count = 0;
2318 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2319 count_scale
2320 = GCOV_COMPUTE_SCALE (count,
2321 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2322 else
2323 count_scale = REG_BR_PROB_BASE;
2325 /* Register specific tree functions. */
2326 gimple_register_cfg_hooks ();
2328 /* If we are inlining just region of the function, make sure to connect new entry
2329 to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute
2330 frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
2331 probabilities of edges incoming from nonduplicated region. */
2332 if (new_entry)
2334 edge e;
2335 edge_iterator ei;
2337 FOR_EACH_EDGE (e, ei, new_entry->preds)
2338 if (!e->src->aux)
2340 incoming_frequency += EDGE_FREQUENCY (e);
2341 incoming_count += e->count;
2343 incoming_count = apply_scale (incoming_count, count_scale);
2344 incoming_frequency
2345 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2346 ENTRY_BLOCK_PTR->count = incoming_count;
2347 ENTRY_BLOCK_PTR->frequency = incoming_frequency;
2350 /* Must have a CFG here at this point. */
2351 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2352 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2354 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2356 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2357 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2358 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2359 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2361 /* Duplicate any exception-handling regions. */
2362 if (cfun->eh)
2363 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2364 remap_decl_1, id);
2366 /* Use aux pointers to map the original blocks to copy. */
2367 FOR_EACH_BB_FN (bb, cfun_to_copy)
2368 if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index))
2370 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2371 bb->aux = new_bb;
2372 new_bb->aux = bb;
2373 new_bb->loop_father = entry_block_map->loop_father;
2376 last = last_basic_block;
2378 /* Now that we've duplicated the blocks, duplicate their edges. */
2379 bool can_make_abormal_goto
2380 = id->gimple_call && stmt_can_make_abnormal_goto (id->gimple_call);
2381 FOR_ALL_BB_FN (bb, cfun_to_copy)
2382 if (!blocks_to_copy
2383 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2384 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2385 can_make_abormal_goto);
2387 if (new_entry)
2389 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2390 e->probability = REG_BR_PROB_BASE;
2391 e->count = incoming_count;
2394 /* Duplicate the loop tree, if available and wanted. */
2395 if (loops_for_fn (src_cfun) != NULL
2396 && current_loops != NULL)
2398 copy_loops (blocks_to_copy, entry_block_map->loop_father,
2399 get_loop (src_cfun, 0));
2400 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2401 loops_state_set (LOOPS_NEED_FIXUP);
2404 /* If the loop tree in the source function needed fixup, mark the
2405 destination loop tree for fixup, too. */
2406 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2407 loops_state_set (LOOPS_NEED_FIXUP);
2409 if (gimple_in_ssa_p (cfun))
2410 FOR_ALL_BB_FN (bb, cfun_to_copy)
2411 if (!blocks_to_copy
2412 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2413 copy_phis_for_bb (bb, id);
2415 FOR_ALL_BB_FN (bb, cfun_to_copy)
2416 if (bb->aux)
2418 if (need_debug_cleanup
2419 && bb->index != ENTRY_BLOCK
2420 && bb->index != EXIT_BLOCK)
2421 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2422 /* Update call edge destinations. This can not be done before loop
2423 info is updated, because we may split basic blocks. */
2424 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2425 redirect_all_calls (id, (basic_block)bb->aux);
2426 ((basic_block)bb->aux)->aux = NULL;
2427 bb->aux = NULL;
2430 /* Zero out AUX fields of newly created block during EH edge
2431 insertion. */
2432 for (; last < last_basic_block; last++)
2434 if (need_debug_cleanup)
2435 maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2436 BASIC_BLOCK (last)->aux = NULL;
2437 /* Update call edge destinations. This can not be done before loop
2438 info is updated, because we may split basic blocks. */
2439 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2440 redirect_all_calls (id, BASIC_BLOCK (last));
2442 entry_block_map->aux = NULL;
2443 exit_block_map->aux = NULL;
2445 if (id->eh_map)
2447 pointer_map_destroy (id->eh_map);
2448 id->eh_map = NULL;
2451 return new_fndecl;
2454 /* Copy the debug STMT using ID. We deal with these statements in a
2455 special way: if any variable in their VALUE expression wasn't
2456 remapped yet, we won't remap it, because that would get decl uids
2457 out of sync, causing codegen differences between -g and -g0. If
2458 this arises, we drop the VALUE expression altogether. */
2460 static void
2461 copy_debug_stmt (gimple stmt, copy_body_data *id)
2463 tree t, *n;
2464 struct walk_stmt_info wi;
2466 if (gimple_block (stmt))
2468 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2469 gimple_set_block (stmt, n ? *n : id->block);
2472 /* Remap all the operands in COPY. */
2473 memset (&wi, 0, sizeof (wi));
2474 wi.info = id;
2476 processing_debug_stmt = 1;
2478 if (gimple_debug_source_bind_p (stmt))
2479 t = gimple_debug_source_bind_get_var (stmt);
2480 else
2481 t = gimple_debug_bind_get_var (stmt);
2483 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2484 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2486 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2487 t = *n;
2489 else if (TREE_CODE (t) == VAR_DECL
2490 && !is_global_var (t)
2491 && !pointer_map_contains (id->decl_map, t))
2492 /* T is a non-localized variable. */;
2493 else
2494 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2496 if (gimple_debug_bind_p (stmt))
2498 gimple_debug_bind_set_var (stmt, t);
2500 if (gimple_debug_bind_has_value_p (stmt))
2501 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2502 remap_gimple_op_r, &wi, NULL);
2504 /* Punt if any decl couldn't be remapped. */
2505 if (processing_debug_stmt < 0)
2506 gimple_debug_bind_reset_value (stmt);
2508 else if (gimple_debug_source_bind_p (stmt))
2510 gimple_debug_source_bind_set_var (stmt, t);
2511 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2512 remap_gimple_op_r, &wi, NULL);
2513 /* When inlining and source bind refers to one of the optimized
2514 away parameters, change the source bind into normal debug bind
2515 referring to the corresponding DEBUG_EXPR_DECL that should have
2516 been bound before the call stmt. */
2517 t = gimple_debug_source_bind_get_value (stmt);
2518 if (t != NULL_TREE
2519 && TREE_CODE (t) == PARM_DECL
2520 && id->gimple_call)
2522 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2523 unsigned int i;
2524 if (debug_args != NULL)
2526 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2527 if ((**debug_args)[i] == DECL_ORIGIN (t)
2528 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2530 t = (**debug_args)[i + 1];
2531 stmt->gsbase.subcode = GIMPLE_DEBUG_BIND;
2532 gimple_debug_bind_set_value (stmt, t);
2533 break;
2539 processing_debug_stmt = 0;
2541 update_stmt (stmt);
2544 /* Process deferred debug stmts. In order to give values better odds
2545 of being successfully remapped, we delay the processing of debug
2546 stmts until all other stmts that might require remapping are
2547 processed. */
2549 static void
2550 copy_debug_stmts (copy_body_data *id)
2552 size_t i;
2553 gimple stmt;
2555 if (!id->debug_stmts.exists ())
2556 return;
2558 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2559 copy_debug_stmt (stmt, id);
2561 id->debug_stmts.release ();
2564 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2565 another function. */
2567 static tree
2568 copy_tree_body (copy_body_data *id)
2570 tree fndecl = id->src_fn;
2571 tree body = DECL_SAVED_TREE (fndecl);
2573 walk_tree (&body, copy_tree_body_r, id, NULL);
2575 return body;
2578 /* Make a copy of the body of FN so that it can be inserted inline in
2579 another function. */
2581 static tree
2582 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2583 basic_block entry_block_map, basic_block exit_block_map,
2584 bitmap blocks_to_copy, basic_block new_entry)
2586 tree fndecl = id->src_fn;
2587 tree body;
2589 /* If this body has a CFG, walk CFG and copy. */
2590 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2591 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2592 blocks_to_copy, new_entry);
2593 copy_debug_stmts (id);
2595 return body;
2598 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2599 defined in function FN, or of a data member thereof. */
2601 static bool
2602 self_inlining_addr_expr (tree value, tree fn)
2604 tree var;
2606 if (TREE_CODE (value) != ADDR_EXPR)
2607 return false;
2609 var = get_base_address (TREE_OPERAND (value, 0));
2611 return var && auto_var_in_fn_p (var, fn);
2614 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2615 lexical block and line number information from base_stmt, if given,
2616 or from the last stmt of the block otherwise. */
2618 static gimple
2619 insert_init_debug_bind (copy_body_data *id,
2620 basic_block bb, tree var, tree value,
2621 gimple base_stmt)
2623 gimple note;
2624 gimple_stmt_iterator gsi;
2625 tree tracked_var;
2627 if (!gimple_in_ssa_p (id->src_cfun))
2628 return NULL;
2630 if (!MAY_HAVE_DEBUG_STMTS)
2631 return NULL;
2633 tracked_var = target_for_debug_bind (var);
2634 if (!tracked_var)
2635 return NULL;
2637 if (bb)
2639 gsi = gsi_last_bb (bb);
2640 if (!base_stmt && !gsi_end_p (gsi))
2641 base_stmt = gsi_stmt (gsi);
2644 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2646 if (bb)
2648 if (!gsi_end_p (gsi))
2649 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2650 else
2651 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2654 return note;
2657 static void
2658 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2660 /* If VAR represents a zero-sized variable, it's possible that the
2661 assignment statement may result in no gimple statements. */
2662 if (init_stmt)
2664 gimple_stmt_iterator si = gsi_last_bb (bb);
2666 /* We can end up with init statements that store to a non-register
2667 from a rhs with a conversion. Handle that here by forcing the
2668 rhs into a temporary. gimple_regimplify_operands is not
2669 prepared to do this for us. */
2670 if (!is_gimple_debug (init_stmt)
2671 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2672 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2673 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2675 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2676 gimple_expr_type (init_stmt),
2677 gimple_assign_rhs1 (init_stmt));
2678 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2679 GSI_NEW_STMT);
2680 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2681 gimple_assign_set_rhs1 (init_stmt, rhs);
2683 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2684 gimple_regimplify_operands (init_stmt, &si);
2686 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2688 tree def = gimple_assign_lhs (init_stmt);
2689 insert_init_debug_bind (id, bb, def, def, init_stmt);
2694 /* Initialize parameter P with VALUE. If needed, produce init statement
2695 at the end of BB. When BB is NULL, we return init statement to be
2696 output later. */
2697 static gimple
2698 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2699 basic_block bb, tree *vars)
2701 gimple init_stmt = NULL;
2702 tree var;
2703 tree rhs = value;
2704 tree def = (gimple_in_ssa_p (cfun)
2705 ? ssa_default_def (id->src_cfun, p) : NULL);
2707 if (value
2708 && value != error_mark_node
2709 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2711 /* If we can match up types by promotion/demotion do so. */
2712 if (fold_convertible_p (TREE_TYPE (p), value))
2713 rhs = fold_convert (TREE_TYPE (p), value);
2714 else
2716 /* ??? For valid programs we should not end up here.
2717 Still if we end up with truly mismatched types here, fall back
2718 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2719 GIMPLE to the following passes. */
2720 if (!is_gimple_reg_type (TREE_TYPE (value))
2721 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2722 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2723 else
2724 rhs = build_zero_cst (TREE_TYPE (p));
2728 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2729 here since the type of this decl must be visible to the calling
2730 function. */
2731 var = copy_decl_to_var (p, id);
2733 /* Declare this new variable. */
2734 DECL_CHAIN (var) = *vars;
2735 *vars = var;
2737 /* Make gimplifier happy about this variable. */
2738 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2740 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2741 we would not need to create a new variable here at all, if it
2742 weren't for debug info. Still, we can just use the argument
2743 value. */
2744 if (TREE_READONLY (p)
2745 && !TREE_ADDRESSABLE (p)
2746 && value && !TREE_SIDE_EFFECTS (value)
2747 && !def)
2749 /* We may produce non-gimple trees by adding NOPs or introduce
2750 invalid sharing when operand is not really constant.
2751 It is not big deal to prohibit constant propagation here as
2752 we will constant propagate in DOM1 pass anyway. */
2753 if (is_gimple_min_invariant (value)
2754 && useless_type_conversion_p (TREE_TYPE (p),
2755 TREE_TYPE (value))
2756 /* We have to be very careful about ADDR_EXPR. Make sure
2757 the base variable isn't a local variable of the inlined
2758 function, e.g., when doing recursive inlining, direct or
2759 mutually-recursive or whatever, which is why we don't
2760 just test whether fn == current_function_decl. */
2761 && ! self_inlining_addr_expr (value, fn))
2763 insert_decl_map (id, p, value);
2764 insert_debug_decl_map (id, p, var);
2765 return insert_init_debug_bind (id, bb, var, value, NULL);
2769 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2770 that way, when the PARM_DECL is encountered, it will be
2771 automatically replaced by the VAR_DECL. */
2772 insert_decl_map (id, p, var);
2774 /* Even if P was TREE_READONLY, the new VAR should not be.
2775 In the original code, we would have constructed a
2776 temporary, and then the function body would have never
2777 changed the value of P. However, now, we will be
2778 constructing VAR directly. The constructor body may
2779 change its value multiple times as it is being
2780 constructed. Therefore, it must not be TREE_READONLY;
2781 the back-end assumes that TREE_READONLY variable is
2782 assigned to only once. */
2783 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2784 TREE_READONLY (var) = 0;
2786 /* If there is no setup required and we are in SSA, take the easy route
2787 replacing all SSA names representing the function parameter by the
2788 SSA name passed to function.
2790 We need to construct map for the variable anyway as it might be used
2791 in different SSA names when parameter is set in function.
2793 Do replacement at -O0 for const arguments replaced by constant.
2794 This is important for builtin_constant_p and other construct requiring
2795 constant argument to be visible in inlined function body. */
2796 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2797 && (optimize
2798 || (TREE_READONLY (p)
2799 && is_gimple_min_invariant (rhs)))
2800 && (TREE_CODE (rhs) == SSA_NAME
2801 || is_gimple_min_invariant (rhs))
2802 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2804 insert_decl_map (id, def, rhs);
2805 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2808 /* If the value of argument is never used, don't care about initializing
2809 it. */
2810 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2812 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2813 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2816 /* Initialize this VAR_DECL from the equivalent argument. Convert
2817 the argument to the proper type in case it was promoted. */
2818 if (value)
2820 if (rhs == error_mark_node)
2822 insert_decl_map (id, p, var);
2823 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2826 STRIP_USELESS_TYPE_CONVERSION (rhs);
2828 /* If we are in SSA form properly remap the default definition
2829 or assign to a dummy SSA name if the parameter is unused and
2830 we are not optimizing. */
2831 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2833 if (def)
2835 def = remap_ssa_name (def, id);
2836 init_stmt = gimple_build_assign (def, rhs);
2837 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2838 set_ssa_default_def (cfun, var, NULL);
2840 else if (!optimize)
2842 def = make_ssa_name (var, NULL);
2843 init_stmt = gimple_build_assign (def, rhs);
2846 else
2847 init_stmt = gimple_build_assign (var, rhs);
2849 if (bb && init_stmt)
2850 insert_init_stmt (id, bb, init_stmt);
2852 return init_stmt;
2855 /* Generate code to initialize the parameters of the function at the
2856 top of the stack in ID from the GIMPLE_CALL STMT. */
2858 static void
2859 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2860 tree fn, basic_block bb)
2862 tree parms;
2863 size_t i;
2864 tree p;
2865 tree vars = NULL_TREE;
2866 tree static_chain = gimple_call_chain (stmt);
2868 /* Figure out what the parameters are. */
2869 parms = DECL_ARGUMENTS (fn);
2871 /* Loop through the parameter declarations, replacing each with an
2872 equivalent VAR_DECL, appropriately initialized. */
2873 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2875 tree val;
2876 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2877 setup_one_parameter (id, p, val, fn, bb, &vars);
2879 /* After remapping parameters remap their types. This has to be done
2880 in a second loop over all parameters to appropriately remap
2881 variable sized arrays when the size is specified in a
2882 parameter following the array. */
2883 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2885 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
2886 if (varp
2887 && TREE_CODE (*varp) == VAR_DECL)
2889 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
2890 ? ssa_default_def (id->src_cfun, p) : NULL);
2891 tree var = *varp;
2892 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
2893 /* Also remap the default definition if it was remapped
2894 to the default definition of the parameter replacement
2895 by the parameter setup. */
2896 if (def)
2898 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
2899 if (defp
2900 && TREE_CODE (*defp) == SSA_NAME
2901 && SSA_NAME_VAR (*defp) == var)
2902 TREE_TYPE (*defp) = TREE_TYPE (var);
2907 /* Initialize the static chain. */
2908 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2909 gcc_assert (fn != current_function_decl);
2910 if (p)
2912 /* No static chain? Seems like a bug in tree-nested.c. */
2913 gcc_assert (static_chain);
2915 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2918 declare_inline_vars (id->block, vars);
2922 /* Declare a return variable to replace the RESULT_DECL for the
2923 function we are calling. An appropriate DECL_STMT is returned.
2924 The USE_STMT is filled to contain a use of the declaration to
2925 indicate the return value of the function.
2927 RETURN_SLOT, if non-null is place where to store the result. It
2928 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2929 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2931 The return value is a (possibly null) value that holds the result
2932 as seen by the caller. */
2934 static tree
2935 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2936 basic_block entry_bb)
2938 tree callee = id->src_fn;
2939 tree result = DECL_RESULT (callee);
2940 tree callee_type = TREE_TYPE (result);
2941 tree caller_type;
2942 tree var, use;
2944 /* Handle type-mismatches in the function declaration return type
2945 vs. the call expression. */
2946 if (modify_dest)
2947 caller_type = TREE_TYPE (modify_dest);
2948 else
2949 caller_type = TREE_TYPE (TREE_TYPE (callee));
2951 /* We don't need to do anything for functions that don't return anything. */
2952 if (VOID_TYPE_P (callee_type))
2953 return NULL_TREE;
2955 /* If there was a return slot, then the return value is the
2956 dereferenced address of that object. */
2957 if (return_slot)
2959 /* The front end shouldn't have used both return_slot and
2960 a modify expression. */
2961 gcc_assert (!modify_dest);
2962 if (DECL_BY_REFERENCE (result))
2964 tree return_slot_addr = build_fold_addr_expr (return_slot);
2965 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2967 /* We are going to construct *&return_slot and we can't do that
2968 for variables believed to be not addressable.
2970 FIXME: This check possibly can match, because values returned
2971 via return slot optimization are not believed to have address
2972 taken by alias analysis. */
2973 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2974 var = return_slot_addr;
2976 else
2978 var = return_slot;
2979 gcc_assert (TREE_CODE (var) != SSA_NAME);
2980 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2982 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2983 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2984 && !DECL_GIMPLE_REG_P (result)
2985 && DECL_P (var))
2986 DECL_GIMPLE_REG_P (var) = 0;
2987 use = NULL;
2988 goto done;
2991 /* All types requiring non-trivial constructors should have been handled. */
2992 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2994 /* Attempt to avoid creating a new temporary variable. */
2995 if (modify_dest
2996 && TREE_CODE (modify_dest) != SSA_NAME)
2998 bool use_it = false;
3000 /* We can't use MODIFY_DEST if there's type promotion involved. */
3001 if (!useless_type_conversion_p (callee_type, caller_type))
3002 use_it = false;
3004 /* ??? If we're assigning to a variable sized type, then we must
3005 reuse the destination variable, because we've no good way to
3006 create variable sized temporaries at this point. */
3007 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3008 use_it = true;
3010 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3011 reuse it as the result of the call directly. Don't do this if
3012 it would promote MODIFY_DEST to addressable. */
3013 else if (TREE_ADDRESSABLE (result))
3014 use_it = false;
3015 else
3017 tree base_m = get_base_address (modify_dest);
3019 /* If the base isn't a decl, then it's a pointer, and we don't
3020 know where that's going to go. */
3021 if (!DECL_P (base_m))
3022 use_it = false;
3023 else if (is_global_var (base_m))
3024 use_it = false;
3025 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3026 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3027 && !DECL_GIMPLE_REG_P (result)
3028 && DECL_GIMPLE_REG_P (base_m))
3029 use_it = false;
3030 else if (!TREE_ADDRESSABLE (base_m))
3031 use_it = true;
3034 if (use_it)
3036 var = modify_dest;
3037 use = NULL;
3038 goto done;
3042 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3044 var = copy_result_decl_to_var (result, id);
3045 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3047 /* Do not have the rest of GCC warn about this variable as it should
3048 not be visible to the user. */
3049 TREE_NO_WARNING (var) = 1;
3051 declare_inline_vars (id->block, var);
3053 /* Build the use expr. If the return type of the function was
3054 promoted, convert it back to the expected type. */
3055 use = var;
3056 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3058 /* If we can match up types by promotion/demotion do so. */
3059 if (fold_convertible_p (caller_type, var))
3060 use = fold_convert (caller_type, var);
3061 else
3063 /* ??? For valid programs we should not end up here.
3064 Still if we end up with truly mismatched types here, fall back
3065 to using a MEM_REF to not leak invalid GIMPLE to the following
3066 passes. */
3067 /* Prevent var from being written into SSA form. */
3068 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3069 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3070 DECL_GIMPLE_REG_P (var) = false;
3071 else if (is_gimple_reg_type (TREE_TYPE (var)))
3072 TREE_ADDRESSABLE (var) = true;
3073 use = fold_build2 (MEM_REF, caller_type,
3074 build_fold_addr_expr (var),
3075 build_int_cst (ptr_type_node, 0));
3079 STRIP_USELESS_TYPE_CONVERSION (use);
3081 if (DECL_BY_REFERENCE (result))
3083 TREE_ADDRESSABLE (var) = 1;
3084 var = build_fold_addr_expr (var);
3087 done:
3088 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3089 way, when the RESULT_DECL is encountered, it will be
3090 automatically replaced by the VAR_DECL.
3092 When returning by reference, ensure that RESULT_DECL remaps to
3093 gimple_val. */
3094 if (DECL_BY_REFERENCE (result)
3095 && !is_gimple_val (var))
3097 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3098 insert_decl_map (id, result, temp);
3099 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3100 it's default_def SSA_NAME. */
3101 if (gimple_in_ssa_p (id->src_cfun)
3102 && is_gimple_reg (result))
3104 temp = make_ssa_name (temp, NULL);
3105 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3107 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3109 else
3110 insert_decl_map (id, result, var);
3112 /* Remember this so we can ignore it in remap_decls. */
3113 id->retvar = var;
3115 return use;
3118 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3119 to a local label. */
3121 static tree
3122 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3124 tree node = *nodep;
3125 tree fn = (tree) fnp;
3127 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3128 return node;
3130 if (TYPE_P (node))
3131 *walk_subtrees = 0;
3133 return NULL_TREE;
3136 /* Determine if the function can be copied. If so return NULL. If
3137 not return a string describng the reason for failure. */
3139 static const char *
3140 copy_forbidden (struct function *fun, tree fndecl)
3142 const char *reason = fun->cannot_be_copied_reason;
3143 tree decl;
3144 unsigned ix;
3146 /* Only examine the function once. */
3147 if (fun->cannot_be_copied_set)
3148 return reason;
3150 /* We cannot copy a function that receives a non-local goto
3151 because we cannot remap the destination label used in the
3152 function that is performing the non-local goto. */
3153 /* ??? Actually, this should be possible, if we work at it.
3154 No doubt there's just a handful of places that simply
3155 assume it doesn't happen and don't substitute properly. */
3156 if (fun->has_nonlocal_label)
3158 reason = G_("function %q+F can never be copied "
3159 "because it receives a non-local goto");
3160 goto fail;
3163 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3164 if (TREE_CODE (decl) == VAR_DECL
3165 && TREE_STATIC (decl)
3166 && !DECL_EXTERNAL (decl)
3167 && DECL_INITIAL (decl)
3168 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3169 has_label_address_in_static_1,
3170 fndecl))
3172 reason = G_("function %q+F can never be copied because it saves "
3173 "address of local label in a static variable");
3174 goto fail;
3177 fail:
3178 fun->cannot_be_copied_reason = reason;
3179 fun->cannot_be_copied_set = true;
3180 return reason;
3184 static const char *inline_forbidden_reason;
3186 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3187 iff a function can not be inlined. Also sets the reason why. */
3189 static tree
3190 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3191 struct walk_stmt_info *wip)
3193 tree fn = (tree) wip->info;
3194 tree t;
3195 gimple stmt = gsi_stmt (*gsi);
3197 switch (gimple_code (stmt))
3199 case GIMPLE_CALL:
3200 /* Refuse to inline alloca call unless user explicitly forced so as
3201 this may change program's memory overhead drastically when the
3202 function using alloca is called in loop. In GCC present in
3203 SPEC2000 inlining into schedule_block cause it to require 2GB of
3204 RAM instead of 256MB. Don't do so for alloca calls emitted for
3205 VLA objects as those can't cause unbounded growth (they're always
3206 wrapped inside stack_save/stack_restore regions. */
3207 if (gimple_alloca_call_p (stmt)
3208 && !gimple_call_alloca_for_var_p (stmt)
3209 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3211 inline_forbidden_reason
3212 = G_("function %q+F can never be inlined because it uses "
3213 "alloca (override using the always_inline attribute)");
3214 *handled_ops_p = true;
3215 return fn;
3218 t = gimple_call_fndecl (stmt);
3219 if (t == NULL_TREE)
3220 break;
3222 /* We cannot inline functions that call setjmp. */
3223 if (setjmp_call_p (t))
3225 inline_forbidden_reason
3226 = G_("function %q+F can never be inlined because it uses setjmp");
3227 *handled_ops_p = true;
3228 return t;
3231 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3232 switch (DECL_FUNCTION_CODE (t))
3234 /* We cannot inline functions that take a variable number of
3235 arguments. */
3236 case BUILT_IN_VA_START:
3237 case BUILT_IN_NEXT_ARG:
3238 case BUILT_IN_VA_END:
3239 inline_forbidden_reason
3240 = G_("function %q+F can never be inlined because it "
3241 "uses variable argument lists");
3242 *handled_ops_p = true;
3243 return t;
3245 case BUILT_IN_LONGJMP:
3246 /* We can't inline functions that call __builtin_longjmp at
3247 all. The non-local goto machinery really requires the
3248 destination be in a different function. If we allow the
3249 function calling __builtin_longjmp to be inlined into the
3250 function calling __builtin_setjmp, Things will Go Awry. */
3251 inline_forbidden_reason
3252 = G_("function %q+F can never be inlined because "
3253 "it uses setjmp-longjmp exception handling");
3254 *handled_ops_p = true;
3255 return t;
3257 case BUILT_IN_NONLOCAL_GOTO:
3258 /* Similarly. */
3259 inline_forbidden_reason
3260 = G_("function %q+F can never be inlined because "
3261 "it uses non-local goto");
3262 *handled_ops_p = true;
3263 return t;
3265 case BUILT_IN_RETURN:
3266 case BUILT_IN_APPLY_ARGS:
3267 /* If a __builtin_apply_args caller would be inlined,
3268 it would be saving arguments of the function it has
3269 been inlined into. Similarly __builtin_return would
3270 return from the function the inline has been inlined into. */
3271 inline_forbidden_reason
3272 = G_("function %q+F can never be inlined because "
3273 "it uses __builtin_return or __builtin_apply_args");
3274 *handled_ops_p = true;
3275 return t;
3277 default:
3278 break;
3280 break;
3282 case GIMPLE_GOTO:
3283 t = gimple_goto_dest (stmt);
3285 /* We will not inline a function which uses computed goto. The
3286 addresses of its local labels, which may be tucked into
3287 global storage, are of course not constant across
3288 instantiations, which causes unexpected behavior. */
3289 if (TREE_CODE (t) != LABEL_DECL)
3291 inline_forbidden_reason
3292 = G_("function %q+F can never be inlined "
3293 "because it contains a computed goto");
3294 *handled_ops_p = true;
3295 return t;
3297 break;
3299 default:
3300 break;
3303 *handled_ops_p = false;
3304 return NULL_TREE;
3307 /* Return true if FNDECL is a function that cannot be inlined into
3308 another one. */
3310 static bool
3311 inline_forbidden_p (tree fndecl)
3313 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3314 struct walk_stmt_info wi;
3315 struct pointer_set_t *visited_nodes;
3316 basic_block bb;
3317 bool forbidden_p = false;
3319 /* First check for shared reasons not to copy the code. */
3320 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3321 if (inline_forbidden_reason != NULL)
3322 return true;
3324 /* Next, walk the statements of the function looking for
3325 constraucts we can't handle, or are non-optimal for inlining. */
3326 visited_nodes = pointer_set_create ();
3327 memset (&wi, 0, sizeof (wi));
3328 wi.info = (void *) fndecl;
3329 wi.pset = visited_nodes;
3331 FOR_EACH_BB_FN (bb, fun)
3333 gimple ret;
3334 gimple_seq seq = bb_seq (bb);
3335 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3336 forbidden_p = (ret != NULL);
3337 if (forbidden_p)
3338 break;
3341 pointer_set_destroy (visited_nodes);
3342 return forbidden_p;
3345 /* Return false if the function FNDECL cannot be inlined on account of its
3346 attributes, true otherwise. */
3347 static bool
3348 function_attribute_inlinable_p (const_tree fndecl)
3350 if (targetm.attribute_table)
3352 const_tree a;
3354 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3356 const_tree name = TREE_PURPOSE (a);
3357 int i;
3359 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3360 if (is_attribute_p (targetm.attribute_table[i].name, name))
3361 return targetm.function_attribute_inlinable_p (fndecl);
3365 return true;
3368 /* Returns nonzero if FN is a function that does not have any
3369 fundamental inline blocking properties. */
3371 bool
3372 tree_inlinable_function_p (tree fn)
3374 bool inlinable = true;
3375 bool do_warning;
3376 tree always_inline;
3378 /* If we've already decided this function shouldn't be inlined,
3379 there's no need to check again. */
3380 if (DECL_UNINLINABLE (fn))
3381 return false;
3383 /* We only warn for functions declared `inline' by the user. */
3384 do_warning = (warn_inline
3385 && DECL_DECLARED_INLINE_P (fn)
3386 && !DECL_NO_INLINE_WARNING_P (fn)
3387 && !DECL_IN_SYSTEM_HEADER (fn));
3389 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3391 if (flag_no_inline
3392 && always_inline == NULL)
3394 if (do_warning)
3395 warning (OPT_Winline, "function %q+F can never be inlined because it "
3396 "is suppressed using -fno-inline", fn);
3397 inlinable = false;
3400 else if (!function_attribute_inlinable_p (fn))
3402 if (do_warning)
3403 warning (OPT_Winline, "function %q+F can never be inlined because it "
3404 "uses attributes conflicting with inlining", fn);
3405 inlinable = false;
3408 else if (inline_forbidden_p (fn))
3410 /* See if we should warn about uninlinable functions. Previously,
3411 some of these warnings would be issued while trying to expand
3412 the function inline, but that would cause multiple warnings
3413 about functions that would for example call alloca. But since
3414 this a property of the function, just one warning is enough.
3415 As a bonus we can now give more details about the reason why a
3416 function is not inlinable. */
3417 if (always_inline)
3418 error (inline_forbidden_reason, fn);
3419 else if (do_warning)
3420 warning (OPT_Winline, inline_forbidden_reason, fn);
3422 inlinable = false;
3425 /* Squirrel away the result so that we don't have to check again. */
3426 DECL_UNINLINABLE (fn) = !inlinable;
3428 return inlinable;
3431 /* Estimate the cost of a memory move. Use machine dependent
3432 word size and take possible memcpy call into account. */
3435 estimate_move_cost (tree type)
3437 HOST_WIDE_INT size;
3439 gcc_assert (!VOID_TYPE_P (type));
3441 if (TREE_CODE (type) == VECTOR_TYPE)
3443 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3444 enum machine_mode simd
3445 = targetm.vectorize.preferred_simd_mode (inner);
3446 int simd_mode_size = GET_MODE_SIZE (simd);
3447 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3448 / simd_mode_size);
3451 size = int_size_in_bytes (type);
3453 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3454 /* Cost of a memcpy call, 3 arguments and the call. */
3455 return 4;
3456 else
3457 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3460 /* Returns cost of operation CODE, according to WEIGHTS */
3462 static int
3463 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3464 tree op1 ATTRIBUTE_UNUSED, tree op2)
3466 switch (code)
3468 /* These are "free" conversions, or their presumed cost
3469 is folded into other operations. */
3470 case RANGE_EXPR:
3471 CASE_CONVERT:
3472 case COMPLEX_EXPR:
3473 case PAREN_EXPR:
3474 case VIEW_CONVERT_EXPR:
3475 return 0;
3477 /* Assign cost of 1 to usual operations.
3478 ??? We may consider mapping RTL costs to this. */
3479 case COND_EXPR:
3480 case VEC_COND_EXPR:
3481 case VEC_PERM_EXPR:
3483 case PLUS_EXPR:
3484 case POINTER_PLUS_EXPR:
3485 case MINUS_EXPR:
3486 case MULT_EXPR:
3487 case MULT_HIGHPART_EXPR:
3488 case FMA_EXPR:
3490 case ADDR_SPACE_CONVERT_EXPR:
3491 case FIXED_CONVERT_EXPR:
3492 case FIX_TRUNC_EXPR:
3494 case NEGATE_EXPR:
3495 case FLOAT_EXPR:
3496 case MIN_EXPR:
3497 case MAX_EXPR:
3498 case ABS_EXPR:
3500 case LSHIFT_EXPR:
3501 case RSHIFT_EXPR:
3502 case LROTATE_EXPR:
3503 case RROTATE_EXPR:
3504 case VEC_LSHIFT_EXPR:
3505 case VEC_RSHIFT_EXPR:
3507 case BIT_IOR_EXPR:
3508 case BIT_XOR_EXPR:
3509 case BIT_AND_EXPR:
3510 case BIT_NOT_EXPR:
3512 case TRUTH_ANDIF_EXPR:
3513 case TRUTH_ORIF_EXPR:
3514 case TRUTH_AND_EXPR:
3515 case TRUTH_OR_EXPR:
3516 case TRUTH_XOR_EXPR:
3517 case TRUTH_NOT_EXPR:
3519 case LT_EXPR:
3520 case LE_EXPR:
3521 case GT_EXPR:
3522 case GE_EXPR:
3523 case EQ_EXPR:
3524 case NE_EXPR:
3525 case ORDERED_EXPR:
3526 case UNORDERED_EXPR:
3528 case UNLT_EXPR:
3529 case UNLE_EXPR:
3530 case UNGT_EXPR:
3531 case UNGE_EXPR:
3532 case UNEQ_EXPR:
3533 case LTGT_EXPR:
3535 case CONJ_EXPR:
3537 case PREDECREMENT_EXPR:
3538 case PREINCREMENT_EXPR:
3539 case POSTDECREMENT_EXPR:
3540 case POSTINCREMENT_EXPR:
3542 case REALIGN_LOAD_EXPR:
3544 case REDUC_MAX_EXPR:
3545 case REDUC_MIN_EXPR:
3546 case REDUC_PLUS_EXPR:
3547 case WIDEN_SUM_EXPR:
3548 case WIDEN_MULT_EXPR:
3549 case DOT_PROD_EXPR:
3550 case WIDEN_MULT_PLUS_EXPR:
3551 case WIDEN_MULT_MINUS_EXPR:
3552 case WIDEN_LSHIFT_EXPR:
3554 case VEC_WIDEN_MULT_HI_EXPR:
3555 case VEC_WIDEN_MULT_LO_EXPR:
3556 case VEC_WIDEN_MULT_EVEN_EXPR:
3557 case VEC_WIDEN_MULT_ODD_EXPR:
3558 case VEC_UNPACK_HI_EXPR:
3559 case VEC_UNPACK_LO_EXPR:
3560 case VEC_UNPACK_FLOAT_HI_EXPR:
3561 case VEC_UNPACK_FLOAT_LO_EXPR:
3562 case VEC_PACK_TRUNC_EXPR:
3563 case VEC_PACK_SAT_EXPR:
3564 case VEC_PACK_FIX_TRUNC_EXPR:
3565 case VEC_WIDEN_LSHIFT_HI_EXPR:
3566 case VEC_WIDEN_LSHIFT_LO_EXPR:
3568 return 1;
3570 /* Few special cases of expensive operations. This is useful
3571 to avoid inlining on functions having too many of these. */
3572 case TRUNC_DIV_EXPR:
3573 case CEIL_DIV_EXPR:
3574 case FLOOR_DIV_EXPR:
3575 case ROUND_DIV_EXPR:
3576 case EXACT_DIV_EXPR:
3577 case TRUNC_MOD_EXPR:
3578 case CEIL_MOD_EXPR:
3579 case FLOOR_MOD_EXPR:
3580 case ROUND_MOD_EXPR:
3581 case RDIV_EXPR:
3582 if (TREE_CODE (op2) != INTEGER_CST)
3583 return weights->div_mod_cost;
3584 return 1;
3586 default:
3587 /* We expect a copy assignment with no operator. */
3588 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3589 return 0;
3594 /* Estimate number of instructions that will be created by expanding
3595 the statements in the statement sequence STMTS.
3596 WEIGHTS contains weights attributed to various constructs. */
3598 static
3599 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3601 int cost;
3602 gimple_stmt_iterator gsi;
3604 cost = 0;
3605 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3606 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3608 return cost;
3612 /* Estimate number of instructions that will be created by expanding STMT.
3613 WEIGHTS contains weights attributed to various constructs. */
3616 estimate_num_insns (gimple stmt, eni_weights *weights)
3618 unsigned cost, i;
3619 enum gimple_code code = gimple_code (stmt);
3620 tree lhs;
3621 tree rhs;
3623 switch (code)
3625 case GIMPLE_ASSIGN:
3626 /* Try to estimate the cost of assignments. We have three cases to
3627 deal with:
3628 1) Simple assignments to registers;
3629 2) Stores to things that must live in memory. This includes
3630 "normal" stores to scalars, but also assignments of large
3631 structures, or constructors of big arrays;
3633 Let us look at the first two cases, assuming we have "a = b + C":
3634 <GIMPLE_ASSIGN <var_decl "a">
3635 <plus_expr <var_decl "b"> <constant C>>
3636 If "a" is a GIMPLE register, the assignment to it is free on almost
3637 any target, because "a" usually ends up in a real register. Hence
3638 the only cost of this expression comes from the PLUS_EXPR, and we
3639 can ignore the GIMPLE_ASSIGN.
3640 If "a" is not a GIMPLE register, the assignment to "a" will most
3641 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3642 of moving something into "a", which we compute using the function
3643 estimate_move_cost. */
3644 if (gimple_clobber_p (stmt))
3645 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3647 lhs = gimple_assign_lhs (stmt);
3648 rhs = gimple_assign_rhs1 (stmt);
3650 cost = 0;
3652 /* Account for the cost of moving to / from memory. */
3653 if (gimple_store_p (stmt))
3654 cost += estimate_move_cost (TREE_TYPE (lhs));
3655 if (gimple_assign_load_p (stmt))
3656 cost += estimate_move_cost (TREE_TYPE (rhs));
3658 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3659 gimple_assign_rhs1 (stmt),
3660 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3661 == GIMPLE_BINARY_RHS
3662 ? gimple_assign_rhs2 (stmt) : NULL);
3663 break;
3665 case GIMPLE_COND:
3666 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3667 gimple_op (stmt, 0),
3668 gimple_op (stmt, 1));
3669 break;
3671 case GIMPLE_SWITCH:
3672 /* Take into account cost of the switch + guess 2 conditional jumps for
3673 each case label.
3675 TODO: once the switch expansion logic is sufficiently separated, we can
3676 do better job on estimating cost of the switch. */
3677 if (weights->time_based)
3678 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3679 else
3680 cost = gimple_switch_num_labels (stmt) * 2;
3681 break;
3683 case GIMPLE_CALL:
3685 tree decl = gimple_call_fndecl (stmt);
3686 struct cgraph_node *node = NULL;
3688 /* Do not special case builtins where we see the body.
3689 This just confuse inliner. */
3690 if (!decl || !(node = cgraph_get_node (decl)) || node->symbol.definition)
3692 /* For buitins that are likely expanded to nothing or
3693 inlined do not account operand costs. */
3694 else if (is_simple_builtin (decl))
3695 return 0;
3696 else if (is_inexpensive_builtin (decl))
3697 return weights->target_builtin_call_cost;
3698 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3700 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3701 specialize the cheap expansion we do here.
3702 ??? This asks for a more general solution. */
3703 switch (DECL_FUNCTION_CODE (decl))
3705 case BUILT_IN_POW:
3706 case BUILT_IN_POWF:
3707 case BUILT_IN_POWL:
3708 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3709 && REAL_VALUES_EQUAL
3710 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3711 return estimate_operator_cost (MULT_EXPR, weights,
3712 gimple_call_arg (stmt, 0),
3713 gimple_call_arg (stmt, 0));
3714 break;
3716 default:
3717 break;
3721 cost = node ? weights->call_cost : weights->indirect_call_cost;
3722 if (gimple_call_lhs (stmt))
3723 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3724 for (i = 0; i < gimple_call_num_args (stmt); i++)
3726 tree arg = gimple_call_arg (stmt, i);
3727 cost += estimate_move_cost (TREE_TYPE (arg));
3729 break;
3732 case GIMPLE_RETURN:
3733 return weights->return_cost;
3735 case GIMPLE_GOTO:
3736 case GIMPLE_LABEL:
3737 case GIMPLE_NOP:
3738 case GIMPLE_PHI:
3739 case GIMPLE_PREDICT:
3740 case GIMPLE_DEBUG:
3741 return 0;
3743 case GIMPLE_ASM:
3744 return asm_str_count (gimple_asm_string (stmt));
3746 case GIMPLE_RESX:
3747 /* This is either going to be an external function call with one
3748 argument, or two register copy statements plus a goto. */
3749 return 2;
3751 case GIMPLE_EH_DISPATCH:
3752 /* ??? This is going to turn into a switch statement. Ideally
3753 we'd have a look at the eh region and estimate the number of
3754 edges involved. */
3755 return 10;
3757 case GIMPLE_BIND:
3758 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3760 case GIMPLE_EH_FILTER:
3761 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3763 case GIMPLE_CATCH:
3764 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3766 case GIMPLE_TRY:
3767 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3768 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3770 /* OpenMP directives are generally very expensive. */
3772 case GIMPLE_OMP_RETURN:
3773 case GIMPLE_OMP_SECTIONS_SWITCH:
3774 case GIMPLE_OMP_ATOMIC_STORE:
3775 case GIMPLE_OMP_CONTINUE:
3776 /* ...except these, which are cheap. */
3777 return 0;
3779 case GIMPLE_OMP_ATOMIC_LOAD:
3780 return weights->omp_cost;
3782 case GIMPLE_OMP_FOR:
3783 return (weights->omp_cost
3784 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3785 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3787 case GIMPLE_OMP_PARALLEL:
3788 case GIMPLE_OMP_TASK:
3789 case GIMPLE_OMP_CRITICAL:
3790 case GIMPLE_OMP_MASTER:
3791 case GIMPLE_OMP_ORDERED:
3792 case GIMPLE_OMP_SECTION:
3793 case GIMPLE_OMP_SECTIONS:
3794 case GIMPLE_OMP_SINGLE:
3795 return (weights->omp_cost
3796 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3798 case GIMPLE_TRANSACTION:
3799 return (weights->tm_cost
3800 + estimate_num_insns_seq (gimple_transaction_body (stmt),
3801 weights));
3803 default:
3804 gcc_unreachable ();
3807 return cost;
3810 /* Estimate number of instructions that will be created by expanding
3811 function FNDECL. WEIGHTS contains weights attributed to various
3812 constructs. */
3815 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3817 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3818 gimple_stmt_iterator bsi;
3819 basic_block bb;
3820 int n = 0;
3822 gcc_assert (my_function && my_function->cfg);
3823 FOR_EACH_BB_FN (bb, my_function)
3825 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3826 n += estimate_num_insns (gsi_stmt (bsi), weights);
3829 return n;
3833 /* Initializes weights used by estimate_num_insns. */
3835 void
3836 init_inline_once (void)
3838 eni_size_weights.call_cost = 1;
3839 eni_size_weights.indirect_call_cost = 3;
3840 eni_size_weights.target_builtin_call_cost = 1;
3841 eni_size_weights.div_mod_cost = 1;
3842 eni_size_weights.omp_cost = 40;
3843 eni_size_weights.tm_cost = 10;
3844 eni_size_weights.time_based = false;
3845 eni_size_weights.return_cost = 1;
3847 /* Estimating time for call is difficult, since we have no idea what the
3848 called function does. In the current uses of eni_time_weights,
3849 underestimating the cost does less harm than overestimating it, so
3850 we choose a rather small value here. */
3851 eni_time_weights.call_cost = 10;
3852 eni_time_weights.indirect_call_cost = 15;
3853 eni_time_weights.target_builtin_call_cost = 1;
3854 eni_time_weights.div_mod_cost = 10;
3855 eni_time_weights.omp_cost = 40;
3856 eni_time_weights.tm_cost = 40;
3857 eni_time_weights.time_based = true;
3858 eni_time_weights.return_cost = 2;
3861 /* Estimate the number of instructions in a gimple_seq. */
3864 count_insns_seq (gimple_seq seq, eni_weights *weights)
3866 gimple_stmt_iterator gsi;
3867 int n = 0;
3868 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3869 n += estimate_num_insns (gsi_stmt (gsi), weights);
3871 return n;
3875 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3877 static void
3878 prepend_lexical_block (tree current_block, tree new_block)
3880 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3881 BLOCK_SUBBLOCKS (current_block) = new_block;
3882 BLOCK_SUPERCONTEXT (new_block) = current_block;
3885 /* Add local variables from CALLEE to CALLER. */
3887 static inline void
3888 add_local_variables (struct function *callee, struct function *caller,
3889 copy_body_data *id)
3891 tree var;
3892 unsigned ix;
3894 FOR_EACH_LOCAL_DECL (callee, ix, var)
3895 if (!can_be_nonlocal (var, id))
3897 tree new_var = remap_decl (var, id);
3899 /* Remap debug-expressions. */
3900 if (TREE_CODE (new_var) == VAR_DECL
3901 && DECL_HAS_DEBUG_EXPR_P (var)
3902 && new_var != var)
3904 tree tem = DECL_DEBUG_EXPR (var);
3905 bool old_regimplify = id->regimplify;
3906 id->remapping_type_depth++;
3907 walk_tree (&tem, copy_tree_body_r, id, NULL);
3908 id->remapping_type_depth--;
3909 id->regimplify = old_regimplify;
3910 SET_DECL_DEBUG_EXPR (new_var, tem);
3911 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
3913 add_local_decl (caller, new_var);
3917 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3919 static bool
3920 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3922 tree use_retvar;
3923 tree fn;
3924 struct pointer_map_t *st, *dst;
3925 tree return_slot;
3926 tree modify_dest;
3927 location_t saved_location;
3928 struct cgraph_edge *cg_edge;
3929 cgraph_inline_failed_t reason;
3930 basic_block return_block;
3931 edge e;
3932 gimple_stmt_iterator gsi, stmt_gsi;
3933 bool successfully_inlined = FALSE;
3934 bool purge_dead_abnormal_edges;
3936 /* Set input_location here so we get the right instantiation context
3937 if we call instantiate_decl from inlinable_function_p. */
3938 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
3939 saved_location = input_location;
3940 input_location = gimple_location (stmt);
3942 /* From here on, we're only interested in CALL_EXPRs. */
3943 if (gimple_code (stmt) != GIMPLE_CALL)
3944 goto egress;
3946 cg_edge = cgraph_edge (id->dst_node, stmt);
3947 gcc_checking_assert (cg_edge);
3948 /* First, see if we can figure out what function is being called.
3949 If we cannot, then there is no hope of inlining the function. */
3950 if (cg_edge->indirect_unknown_callee)
3951 goto egress;
3952 fn = cg_edge->callee->symbol.decl;
3953 gcc_checking_assert (fn);
3955 /* If FN is a declaration of a function in a nested scope that was
3956 globally declared inline, we don't set its DECL_INITIAL.
3957 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3958 C++ front-end uses it for cdtors to refer to their internal
3959 declarations, that are not real functions. Fortunately those
3960 don't have trees to be saved, so we can tell by checking their
3961 gimple_body. */
3962 if (!DECL_INITIAL (fn)
3963 && DECL_ABSTRACT_ORIGIN (fn)
3964 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3965 fn = DECL_ABSTRACT_ORIGIN (fn);
3967 /* Don't try to inline functions that are not well-suited to inlining. */
3968 if (cg_edge->inline_failed)
3970 reason = cg_edge->inline_failed;
3971 /* If this call was originally indirect, we do not want to emit any
3972 inlining related warnings or sorry messages because there are no
3973 guarantees regarding those. */
3974 if (cg_edge->indirect_inlining_edge)
3975 goto egress;
3977 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3978 /* For extern inline functions that get redefined we always
3979 silently ignored always_inline flag. Better behaviour would
3980 be to be able to keep both bodies and use extern inline body
3981 for inlining, but we can't do that because frontends overwrite
3982 the body. */
3983 && !cg_edge->callee->local.redefined_extern_inline
3984 /* During early inline pass, report only when optimization is
3985 not turned on. */
3986 && (cgraph_global_info_ready
3987 || !optimize)
3988 /* PR 20090218-1_0.c. Body can be provided by another module. */
3989 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
3991 error ("inlining failed in call to always_inline %q+F: %s", fn,
3992 cgraph_inline_failed_string (reason));
3993 error ("called from here");
3995 else if (warn_inline
3996 && DECL_DECLARED_INLINE_P (fn)
3997 && !DECL_NO_INLINE_WARNING_P (fn)
3998 && !DECL_IN_SYSTEM_HEADER (fn)
3999 && reason != CIF_UNSPECIFIED
4000 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4001 /* Do not warn about not inlined recursive calls. */
4002 && !cgraph_edge_recursive_p (cg_edge)
4003 /* Avoid warnings during early inline pass. */
4004 && cgraph_global_info_ready)
4006 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4007 fn, _(cgraph_inline_failed_string (reason)));
4008 warning (OPT_Winline, "called from here");
4010 goto egress;
4012 fn = cg_edge->callee->symbol.decl;
4013 cgraph_get_body (cg_edge->callee);
4015 #ifdef ENABLE_CHECKING
4016 if (cg_edge->callee->symbol.decl != id->dst_node->symbol.decl)
4017 verify_cgraph_node (cg_edge->callee);
4018 #endif
4020 /* We will be inlining this callee. */
4021 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4023 /* Update the callers EH personality. */
4024 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->symbol.decl))
4025 DECL_FUNCTION_PERSONALITY (cg_edge->caller->symbol.decl)
4026 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->symbol.decl);
4028 /* Split the block holding the GIMPLE_CALL. */
4029 e = split_block (bb, stmt);
4030 bb = e->src;
4031 return_block = e->dest;
4032 remove_edge (e);
4034 /* split_block splits after the statement; work around this by
4035 moving the call into the second block manually. Not pretty,
4036 but seems easier than doing the CFG manipulation by hand
4037 when the GIMPLE_CALL is in the last statement of BB. */
4038 stmt_gsi = gsi_last_bb (bb);
4039 gsi_remove (&stmt_gsi, false);
4041 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4042 been the source of abnormal edges. In this case, schedule
4043 the removal of dead abnormal edges. */
4044 gsi = gsi_start_bb (return_block);
4045 if (gsi_end_p (gsi))
4047 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4048 purge_dead_abnormal_edges = true;
4050 else
4052 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4053 purge_dead_abnormal_edges = false;
4056 stmt_gsi = gsi_start_bb (return_block);
4058 /* Build a block containing code to initialize the arguments, the
4059 actual inline expansion of the body, and a label for the return
4060 statements within the function to jump to. The type of the
4061 statement expression is the return type of the function call.
4062 ??? If the call does not have an associated block then we will
4063 remap all callee blocks to NULL, effectively dropping most of
4064 its debug information. This should only happen for calls to
4065 artificial decls inserted by the compiler itself. We need to
4066 either link the inlined blocks into the caller block tree or
4067 not refer to them in any way to not break GC for locations. */
4068 if (gimple_block (stmt))
4070 id->block = make_node (BLOCK);
4071 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4072 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4073 prepend_lexical_block (gimple_block (stmt), id->block);
4076 /* Local declarations will be replaced by their equivalents in this
4077 map. */
4078 st = id->decl_map;
4079 id->decl_map = pointer_map_create ();
4080 dst = id->debug_map;
4081 id->debug_map = NULL;
4083 /* Record the function we are about to inline. */
4084 id->src_fn = fn;
4085 id->src_node = cg_edge->callee;
4086 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4087 id->gimple_call = stmt;
4089 gcc_assert (!id->src_cfun->after_inlining);
4091 id->entry_bb = bb;
4092 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4094 gimple_stmt_iterator si = gsi_last_bb (bb);
4095 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4096 NOT_TAKEN),
4097 GSI_NEW_STMT);
4099 initialize_inlined_parameters (id, stmt, fn, bb);
4101 if (DECL_INITIAL (fn))
4103 if (gimple_block (stmt))
4105 tree *var;
4107 prepend_lexical_block (id->block,
4108 remap_blocks (DECL_INITIAL (fn), id));
4109 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4110 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4111 == NULL_TREE));
4112 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4113 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4114 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4115 under it. The parameters can be then evaluated in the debugger,
4116 but don't show in backtraces. */
4117 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4118 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4120 tree v = *var;
4121 *var = TREE_CHAIN (v);
4122 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4123 BLOCK_VARS (id->block) = v;
4125 else
4126 var = &TREE_CHAIN (*var);
4128 else
4129 remap_blocks_to_null (DECL_INITIAL (fn), id);
4132 /* Return statements in the function body will be replaced by jumps
4133 to the RET_LABEL. */
4134 gcc_assert (DECL_INITIAL (fn));
4135 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4137 /* Find the LHS to which the result of this call is assigned. */
4138 return_slot = NULL;
4139 if (gimple_call_lhs (stmt))
4141 modify_dest = gimple_call_lhs (stmt);
4143 /* The function which we are inlining might not return a value,
4144 in which case we should issue a warning that the function
4145 does not return a value. In that case the optimizers will
4146 see that the variable to which the value is assigned was not
4147 initialized. We do not want to issue a warning about that
4148 uninitialized variable. */
4149 if (DECL_P (modify_dest))
4150 TREE_NO_WARNING (modify_dest) = 1;
4152 if (gimple_call_return_slot_opt_p (stmt))
4154 return_slot = modify_dest;
4155 modify_dest = NULL;
4158 else
4159 modify_dest = NULL;
4161 /* If we are inlining a call to the C++ operator new, we don't want
4162 to use type based alias analysis on the return value. Otherwise
4163 we may get confused if the compiler sees that the inlined new
4164 function returns a pointer which was just deleted. See bug
4165 33407. */
4166 if (DECL_IS_OPERATOR_NEW (fn))
4168 return_slot = NULL;
4169 modify_dest = NULL;
4172 /* Declare the return variable for the function. */
4173 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4175 /* Add local vars in this inlined callee to caller. */
4176 add_local_variables (id->src_cfun, cfun, id);
4178 if (dump_file && (dump_flags & TDF_DETAILS))
4180 fprintf (dump_file, "Inlining ");
4181 print_generic_expr (dump_file, id->src_fn, 0);
4182 fprintf (dump_file, " to ");
4183 print_generic_expr (dump_file, id->dst_fn, 0);
4184 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4187 /* This is it. Duplicate the callee body. Assume callee is
4188 pre-gimplified. Note that we must not alter the caller
4189 function in any way before this point, as this CALL_EXPR may be
4190 a self-referential call; if we're calling ourselves, we need to
4191 duplicate our body before altering anything. */
4192 copy_body (id, bb->count,
4193 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4194 bb, return_block, NULL, NULL);
4196 /* Reset the escaped solution. */
4197 if (cfun->gimple_df)
4198 pt_solution_reset (&cfun->gimple_df->escaped);
4200 /* Clean up. */
4201 if (id->debug_map)
4203 pointer_map_destroy (id->debug_map);
4204 id->debug_map = dst;
4206 pointer_map_destroy (id->decl_map);
4207 id->decl_map = st;
4209 /* Unlink the calls virtual operands before replacing it. */
4210 unlink_stmt_vdef (stmt);
4212 /* If the inlined function returns a result that we care about,
4213 substitute the GIMPLE_CALL with an assignment of the return
4214 variable to the LHS of the call. That is, if STMT was
4215 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4216 if (use_retvar && gimple_call_lhs (stmt))
4218 gimple old_stmt = stmt;
4219 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4220 gsi_replace (&stmt_gsi, stmt, false);
4221 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4223 else
4225 /* Handle the case of inlining a function with no return
4226 statement, which causes the return value to become undefined. */
4227 if (gimple_call_lhs (stmt)
4228 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4230 tree name = gimple_call_lhs (stmt);
4231 tree var = SSA_NAME_VAR (name);
4232 tree def = ssa_default_def (cfun, var);
4234 if (def)
4236 /* If the variable is used undefined, make this name
4237 undefined via a move. */
4238 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4239 gsi_replace (&stmt_gsi, stmt, true);
4241 else
4243 /* Otherwise make this variable undefined. */
4244 gsi_remove (&stmt_gsi, true);
4245 set_ssa_default_def (cfun, var, name);
4246 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4249 else
4250 gsi_remove (&stmt_gsi, true);
4253 if (purge_dead_abnormal_edges)
4255 gimple_purge_dead_eh_edges (return_block);
4256 gimple_purge_dead_abnormal_call_edges (return_block);
4259 /* If the value of the new expression is ignored, that's OK. We
4260 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4261 the equivalent inlined version either. */
4262 if (is_gimple_assign (stmt))
4264 gcc_assert (gimple_assign_single_p (stmt)
4265 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4266 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4269 /* Output the inlining info for this abstract function, since it has been
4270 inlined. If we don't do this now, we can lose the information about the
4271 variables in the function when the blocks get blown away as soon as we
4272 remove the cgraph node. */
4273 if (gimple_block (stmt))
4274 (*debug_hooks->outlining_inline_function) (cg_edge->callee->symbol.decl);
4276 /* Update callgraph if needed. */
4277 cgraph_remove_node (cg_edge->callee);
4279 id->block = NULL_TREE;
4280 successfully_inlined = TRUE;
4282 egress:
4283 input_location = saved_location;
4284 return successfully_inlined;
4287 /* Expand call statements reachable from STMT_P.
4288 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4289 in a MODIFY_EXPR. */
4291 static bool
4292 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4294 gimple_stmt_iterator gsi;
4296 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4298 gimple stmt = gsi_stmt (gsi);
4300 if (is_gimple_call (stmt)
4301 && expand_call_inline (bb, stmt, id))
4302 return true;
4305 return false;
4309 /* Walk all basic blocks created after FIRST and try to fold every statement
4310 in the STATEMENTS pointer set. */
4312 static void
4313 fold_marked_statements (int first, struct pointer_set_t *statements)
4315 for (; first < n_basic_blocks; first++)
4316 if (BASIC_BLOCK (first))
4318 gimple_stmt_iterator gsi;
4320 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4321 !gsi_end_p (gsi);
4322 gsi_next (&gsi))
4323 if (pointer_set_contains (statements, gsi_stmt (gsi)))
4325 gimple old_stmt = gsi_stmt (gsi);
4326 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4328 if (old_decl && DECL_BUILT_IN (old_decl))
4330 /* Folding builtins can create multiple instructions,
4331 we need to look at all of them. */
4332 gimple_stmt_iterator i2 = gsi;
4333 gsi_prev (&i2);
4334 if (fold_stmt (&gsi))
4336 gimple new_stmt;
4337 /* If a builtin at the end of a bb folded into nothing,
4338 the following loop won't work. */
4339 if (gsi_end_p (gsi))
4341 cgraph_update_edges_for_call_stmt (old_stmt,
4342 old_decl, NULL);
4343 break;
4345 if (gsi_end_p (i2))
4346 i2 = gsi_start_bb (BASIC_BLOCK (first));
4347 else
4348 gsi_next (&i2);
4349 while (1)
4351 new_stmt = gsi_stmt (i2);
4352 update_stmt (new_stmt);
4353 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4354 new_stmt);
4356 if (new_stmt == gsi_stmt (gsi))
4358 /* It is okay to check only for the very last
4359 of these statements. If it is a throwing
4360 statement nothing will change. If it isn't
4361 this can remove EH edges. If that weren't
4362 correct then because some intermediate stmts
4363 throw, but not the last one. That would mean
4364 we'd have to split the block, which we can't
4365 here and we'd loose anyway. And as builtins
4366 probably never throw, this all
4367 is mood anyway. */
4368 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4369 new_stmt))
4370 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4371 break;
4373 gsi_next (&i2);
4377 else if (fold_stmt (&gsi))
4379 /* Re-read the statement from GSI as fold_stmt() may
4380 have changed it. */
4381 gimple new_stmt = gsi_stmt (gsi);
4382 update_stmt (new_stmt);
4384 if (is_gimple_call (old_stmt)
4385 || is_gimple_call (new_stmt))
4386 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4387 new_stmt);
4389 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4390 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4396 /* Return true if BB has at least one abnormal outgoing edge. */
4398 static inline bool
4399 has_abnormal_outgoing_edge_p (basic_block bb)
4401 edge e;
4402 edge_iterator ei;
4404 FOR_EACH_EDGE (e, ei, bb->succs)
4405 if (e->flags & EDGE_ABNORMAL)
4406 return true;
4408 return false;
4411 /* Expand calls to inline functions in the body of FN. */
4413 unsigned int
4414 optimize_inline_calls (tree fn)
4416 copy_body_data id;
4417 basic_block bb;
4418 int last = n_basic_blocks;
4419 struct gimplify_ctx gctx;
4420 bool inlined_p = false;
4422 /* Clear out ID. */
4423 memset (&id, 0, sizeof (id));
4425 id.src_node = id.dst_node = cgraph_get_node (fn);
4426 gcc_assert (id.dst_node->symbol.definition);
4427 id.dst_fn = fn;
4428 /* Or any functions that aren't finished yet. */
4429 if (current_function_decl)
4430 id.dst_fn = current_function_decl;
4432 id.copy_decl = copy_decl_maybe_to_var;
4433 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4434 id.transform_new_cfg = false;
4435 id.transform_return_to_modify = true;
4436 id.transform_lang_insert_block = NULL;
4437 id.statements_to_fold = pointer_set_create ();
4439 push_gimplify_context (&gctx);
4441 /* We make no attempts to keep dominance info up-to-date. */
4442 free_dominance_info (CDI_DOMINATORS);
4443 free_dominance_info (CDI_POST_DOMINATORS);
4445 /* Register specific gimple functions. */
4446 gimple_register_cfg_hooks ();
4448 /* Reach the trees by walking over the CFG, and note the
4449 enclosing basic-blocks in the call edges. */
4450 /* We walk the blocks going forward, because inlined function bodies
4451 will split id->current_basic_block, and the new blocks will
4452 follow it; we'll trudge through them, processing their CALL_EXPRs
4453 along the way. */
4454 FOR_EACH_BB (bb)
4455 inlined_p |= gimple_expand_calls_inline (bb, &id);
4457 pop_gimplify_context (NULL);
4459 #ifdef ENABLE_CHECKING
4461 struct cgraph_edge *e;
4463 verify_cgraph_node (id.dst_node);
4465 /* Double check that we inlined everything we are supposed to inline. */
4466 for (e = id.dst_node->callees; e; e = e->next_callee)
4467 gcc_assert (e->inline_failed);
4469 #endif
4471 /* Fold queued statements. */
4472 fold_marked_statements (last, id.statements_to_fold);
4473 pointer_set_destroy (id.statements_to_fold);
4475 gcc_assert (!id.debug_stmts.exists ());
4477 /* If we didn't inline into the function there is nothing to do. */
4478 if (!inlined_p)
4479 return 0;
4481 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4482 number_blocks (fn);
4484 delete_unreachable_blocks_update_callgraph (&id);
4485 #ifdef ENABLE_CHECKING
4486 verify_cgraph_node (id.dst_node);
4487 #endif
4489 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4490 not possible yet - the IPA passes might make various functions to not
4491 throw and they don't care to proactively update local EH info. This is
4492 done later in fixup_cfg pass that also execute the verification. */
4493 return (TODO_update_ssa
4494 | TODO_cleanup_cfg
4495 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4496 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4497 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4500 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4502 tree
4503 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4505 enum tree_code code = TREE_CODE (*tp);
4506 enum tree_code_class cl = TREE_CODE_CLASS (code);
4508 /* We make copies of most nodes. */
4509 if (IS_EXPR_CODE_CLASS (cl)
4510 || code == TREE_LIST
4511 || code == TREE_VEC
4512 || code == TYPE_DECL
4513 || code == OMP_CLAUSE)
4515 /* Because the chain gets clobbered when we make a copy, we save it
4516 here. */
4517 tree chain = NULL_TREE, new_tree;
4519 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4520 chain = TREE_CHAIN (*tp);
4522 /* Copy the node. */
4523 new_tree = copy_node (*tp);
4525 /* Propagate mudflap marked-ness. */
4526 if (flag_mudflap && mf_marked_p (*tp))
4527 mf_mark (new_tree);
4529 *tp = new_tree;
4531 /* Now, restore the chain, if appropriate. That will cause
4532 walk_tree to walk into the chain as well. */
4533 if (code == PARM_DECL
4534 || code == TREE_LIST
4535 || code == OMP_CLAUSE)
4536 TREE_CHAIN (*tp) = chain;
4538 /* For now, we don't update BLOCKs when we make copies. So, we
4539 have to nullify all BIND_EXPRs. */
4540 if (TREE_CODE (*tp) == BIND_EXPR)
4541 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4543 else if (code == CONSTRUCTOR)
4545 /* CONSTRUCTOR nodes need special handling because
4546 we need to duplicate the vector of elements. */
4547 tree new_tree;
4549 new_tree = copy_node (*tp);
4551 /* Propagate mudflap marked-ness. */
4552 if (flag_mudflap && mf_marked_p (*tp))
4553 mf_mark (new_tree);
4555 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
4556 *tp = new_tree;
4558 else if (code == STATEMENT_LIST)
4559 /* We used to just abort on STATEMENT_LIST, but we can run into them
4560 with statement-expressions (c++/40975). */
4561 copy_statement_list (tp);
4562 else if (TREE_CODE_CLASS (code) == tcc_type)
4563 *walk_subtrees = 0;
4564 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4565 *walk_subtrees = 0;
4566 else if (TREE_CODE_CLASS (code) == tcc_constant)
4567 *walk_subtrees = 0;
4568 return NULL_TREE;
4571 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4572 information indicating to what new SAVE_EXPR this one should be mapped,
4573 use that one. Otherwise, create a new node and enter it in ST. FN is
4574 the function into which the copy will be placed. */
4576 static void
4577 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4579 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4580 tree *n;
4581 tree t;
4583 /* See if we already encountered this SAVE_EXPR. */
4584 n = (tree *) pointer_map_contains (st, *tp);
4586 /* If we didn't already remap this SAVE_EXPR, do so now. */
4587 if (!n)
4589 t = copy_node (*tp);
4591 /* Remember this SAVE_EXPR. */
4592 *pointer_map_insert (st, *tp) = t;
4593 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4594 *pointer_map_insert (st, t) = t;
4596 else
4598 /* We've already walked into this SAVE_EXPR; don't do it again. */
4599 *walk_subtrees = 0;
4600 t = *n;
4603 /* Replace this SAVE_EXPR with the copy. */
4604 *tp = t;
4607 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4608 label, copies the declaration and enters it in the splay_tree in DATA (which
4609 is really a 'copy_body_data *'. */
4611 static tree
4612 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4613 bool *handled_ops_p ATTRIBUTE_UNUSED,
4614 struct walk_stmt_info *wi)
4616 copy_body_data *id = (copy_body_data *) wi->info;
4617 gimple stmt = gsi_stmt (*gsip);
4619 if (gimple_code (stmt) == GIMPLE_LABEL)
4621 tree decl = gimple_label_label (stmt);
4623 /* Copy the decl and remember the copy. */
4624 insert_decl_map (id, decl, id->copy_decl (decl, id));
4627 return NULL_TREE;
4631 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4632 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4633 remaps all local declarations to appropriate replacements in gimple
4634 operands. */
4636 static tree
4637 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4639 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4640 copy_body_data *id = (copy_body_data *) wi->info;
4641 struct pointer_map_t *st = id->decl_map;
4642 tree *n;
4643 tree expr = *tp;
4645 /* Only a local declaration (variable or label). */
4646 if ((TREE_CODE (expr) == VAR_DECL
4647 && !TREE_STATIC (expr))
4648 || TREE_CODE (expr) == LABEL_DECL)
4650 /* Lookup the declaration. */
4651 n = (tree *) pointer_map_contains (st, expr);
4653 /* If it's there, remap it. */
4654 if (n)
4655 *tp = *n;
4656 *walk_subtrees = 0;
4658 else if (TREE_CODE (expr) == STATEMENT_LIST
4659 || TREE_CODE (expr) == BIND_EXPR
4660 || TREE_CODE (expr) == SAVE_EXPR)
4661 gcc_unreachable ();
4662 else if (TREE_CODE (expr) == TARGET_EXPR)
4664 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4665 It's OK for this to happen if it was part of a subtree that
4666 isn't immediately expanded, such as operand 2 of another
4667 TARGET_EXPR. */
4668 if (!TREE_OPERAND (expr, 1))
4670 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4671 TREE_OPERAND (expr, 3) = NULL_TREE;
4675 /* Keep iterating. */
4676 return NULL_TREE;
4680 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4681 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4682 remaps all local declarations to appropriate replacements in gimple
4683 statements. */
4685 static tree
4686 replace_locals_stmt (gimple_stmt_iterator *gsip,
4687 bool *handled_ops_p ATTRIBUTE_UNUSED,
4688 struct walk_stmt_info *wi)
4690 copy_body_data *id = (copy_body_data *) wi->info;
4691 gimple stmt = gsi_stmt (*gsip);
4693 if (gimple_code (stmt) == GIMPLE_BIND)
4695 tree block = gimple_bind_block (stmt);
4697 if (block)
4699 remap_block (&block, id);
4700 gimple_bind_set_block (stmt, block);
4703 /* This will remap a lot of the same decls again, but this should be
4704 harmless. */
4705 if (gimple_bind_vars (stmt))
4706 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
4707 NULL, id));
4710 /* Keep iterating. */
4711 return NULL_TREE;
4715 /* Copies everything in SEQ and replaces variables and labels local to
4716 current_function_decl. */
4718 gimple_seq
4719 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4721 copy_body_data id;
4722 struct walk_stmt_info wi;
4723 struct pointer_set_t *visited;
4724 gimple_seq copy;
4726 /* There's nothing to do for NULL_TREE. */
4727 if (seq == NULL)
4728 return seq;
4730 /* Set up ID. */
4731 memset (&id, 0, sizeof (id));
4732 id.src_fn = current_function_decl;
4733 id.dst_fn = current_function_decl;
4734 id.decl_map = pointer_map_create ();
4735 id.debug_map = NULL;
4737 id.copy_decl = copy_decl_no_change;
4738 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4739 id.transform_new_cfg = false;
4740 id.transform_return_to_modify = false;
4741 id.transform_lang_insert_block = NULL;
4743 /* Walk the tree once to find local labels. */
4744 memset (&wi, 0, sizeof (wi));
4745 visited = pointer_set_create ();
4746 wi.info = &id;
4747 wi.pset = visited;
4748 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4749 pointer_set_destroy (visited);
4751 copy = gimple_seq_copy (seq);
4753 /* Walk the copy, remapping decls. */
4754 memset (&wi, 0, sizeof (wi));
4755 wi.info = &id;
4756 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4758 /* Clean up. */
4759 pointer_map_destroy (id.decl_map);
4760 if (id.debug_map)
4761 pointer_map_destroy (id.debug_map);
4763 return copy;
4767 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4769 static tree
4770 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4772 if (*tp == data)
4773 return (tree) data;
4774 else
4775 return NULL;
4778 DEBUG_FUNCTION bool
4779 debug_find_tree (tree top, tree search)
4781 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4785 /* Declare the variables created by the inliner. Add all the variables in
4786 VARS to BIND_EXPR. */
4788 static void
4789 declare_inline_vars (tree block, tree vars)
4791 tree t;
4792 for (t = vars; t; t = DECL_CHAIN (t))
4794 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4795 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4796 add_local_decl (cfun, t);
4799 if (block)
4800 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4803 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4804 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4805 VAR_DECL translation. */
4807 static tree
4808 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4810 /* Don't generate debug information for the copy if we wouldn't have
4811 generated it for the copy either. */
4812 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4813 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4815 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4816 declaration inspired this copy. */
4817 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4819 /* The new variable/label has no RTL, yet. */
4820 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4821 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4822 SET_DECL_RTL (copy, 0);
4824 /* These args would always appear unused, if not for this. */
4825 TREE_USED (copy) = 1;
4827 /* Set the context for the new declaration. */
4828 if (!DECL_CONTEXT (decl))
4829 /* Globals stay global. */
4831 else if (DECL_CONTEXT (decl) != id->src_fn)
4832 /* Things that weren't in the scope of the function we're inlining
4833 from aren't in the scope we're inlining to, either. */
4835 else if (TREE_STATIC (decl))
4836 /* Function-scoped static variables should stay in the original
4837 function. */
4839 else
4840 /* Ordinary automatic local variables are now in the scope of the
4841 new function. */
4842 DECL_CONTEXT (copy) = id->dst_fn;
4844 return copy;
4847 static tree
4848 copy_decl_to_var (tree decl, copy_body_data *id)
4850 tree copy, type;
4852 gcc_assert (TREE_CODE (decl) == PARM_DECL
4853 || TREE_CODE (decl) == RESULT_DECL);
4855 type = TREE_TYPE (decl);
4857 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4858 VAR_DECL, DECL_NAME (decl), type);
4859 if (DECL_PT_UID_SET_P (decl))
4860 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4861 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4862 TREE_READONLY (copy) = TREE_READONLY (decl);
4863 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4864 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4866 return copy_decl_for_dup_finish (id, decl, copy);
4869 /* Like copy_decl_to_var, but create a return slot object instead of a
4870 pointer variable for return by invisible reference. */
4872 static tree
4873 copy_result_decl_to_var (tree decl, copy_body_data *id)
4875 tree copy, type;
4877 gcc_assert (TREE_CODE (decl) == PARM_DECL
4878 || TREE_CODE (decl) == RESULT_DECL);
4880 type = TREE_TYPE (decl);
4881 if (DECL_BY_REFERENCE (decl))
4882 type = TREE_TYPE (type);
4884 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4885 VAR_DECL, DECL_NAME (decl), type);
4886 if (DECL_PT_UID_SET_P (decl))
4887 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4888 TREE_READONLY (copy) = TREE_READONLY (decl);
4889 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4890 if (!DECL_BY_REFERENCE (decl))
4892 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4893 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4896 return copy_decl_for_dup_finish (id, decl, copy);
4899 tree
4900 copy_decl_no_change (tree decl, copy_body_data *id)
4902 tree copy;
4904 copy = copy_node (decl);
4906 /* The COPY is not abstract; it will be generated in DST_FN. */
4907 DECL_ABSTRACT (copy) = 0;
4908 lang_hooks.dup_lang_specific_decl (copy);
4910 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4911 been taken; it's for internal bookkeeping in expand_goto_internal. */
4912 if (TREE_CODE (copy) == LABEL_DECL)
4914 TREE_ADDRESSABLE (copy) = 0;
4915 LABEL_DECL_UID (copy) = -1;
4918 return copy_decl_for_dup_finish (id, decl, copy);
4921 static tree
4922 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4924 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4925 return copy_decl_to_var (decl, id);
4926 else
4927 return copy_decl_no_change (decl, id);
4930 /* Return a copy of the function's argument tree. */
4931 static tree
4932 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4933 bitmap args_to_skip, tree *vars)
4935 tree arg, *parg;
4936 tree new_parm = NULL;
4937 int i = 0;
4939 parg = &new_parm;
4941 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
4942 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4944 tree new_tree = remap_decl (arg, id);
4945 if (TREE_CODE (new_tree) != PARM_DECL)
4946 new_tree = id->copy_decl (arg, id);
4947 lang_hooks.dup_lang_specific_decl (new_tree);
4948 *parg = new_tree;
4949 parg = &DECL_CHAIN (new_tree);
4951 else if (!pointer_map_contains (id->decl_map, arg))
4953 /* Make an equivalent VAR_DECL. If the argument was used
4954 as temporary variable later in function, the uses will be
4955 replaced by local variable. */
4956 tree var = copy_decl_to_var (arg, id);
4957 insert_decl_map (id, arg, var);
4958 /* Declare this new variable. */
4959 DECL_CHAIN (var) = *vars;
4960 *vars = var;
4962 return new_parm;
4965 /* Return a copy of the function's static chain. */
4966 static tree
4967 copy_static_chain (tree static_chain, copy_body_data * id)
4969 tree *chain_copy, *pvar;
4971 chain_copy = &static_chain;
4972 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
4974 tree new_tree = remap_decl (*pvar, id);
4975 lang_hooks.dup_lang_specific_decl (new_tree);
4976 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
4977 *pvar = new_tree;
4979 return static_chain;
4982 /* Return true if the function is allowed to be versioned.
4983 This is a guard for the versioning functionality. */
4985 bool
4986 tree_versionable_function_p (tree fndecl)
4988 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4989 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4992 /* Delete all unreachable basic blocks and update callgraph.
4993 Doing so is somewhat nontrivial because we need to update all clones and
4994 remove inline function that become unreachable. */
4996 static bool
4997 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4999 bool changed = false;
5000 basic_block b, next_bb;
5002 find_unreachable_blocks ();
5004 /* Delete all unreachable basic blocks. */
5006 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
5008 next_bb = b->next_bb;
5010 if (!(b->flags & BB_REACHABLE))
5012 gimple_stmt_iterator bsi;
5014 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5016 struct cgraph_edge *e;
5017 struct cgraph_node *node;
5019 ipa_remove_stmt_references ((symtab_node)id->dst_node, gsi_stmt (bsi));
5021 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5022 &&(e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
5024 if (!e->inline_failed)
5025 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5026 else
5027 cgraph_remove_edge (e);
5029 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5030 && id->dst_node->clones)
5031 for (node = id->dst_node->clones; node != id->dst_node;)
5033 ipa_remove_stmt_references ((symtab_node)node, gsi_stmt (bsi));
5034 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5035 && (e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
5037 if (!e->inline_failed)
5038 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5039 else
5040 cgraph_remove_edge (e);
5043 if (node->clones)
5044 node = node->clones;
5045 else if (node->next_sibling_clone)
5046 node = node->next_sibling_clone;
5047 else
5049 while (node != id->dst_node && !node->next_sibling_clone)
5050 node = node->clone_of;
5051 if (node != id->dst_node)
5052 node = node->next_sibling_clone;
5056 delete_basic_block (b);
5057 changed = true;
5061 return changed;
5064 /* Update clone info after duplication. */
5066 static void
5067 update_clone_info (copy_body_data * id)
5069 struct cgraph_node *node;
5070 if (!id->dst_node->clones)
5071 return;
5072 for (node = id->dst_node->clones; node != id->dst_node;)
5074 /* First update replace maps to match the new body. */
5075 if (node->clone.tree_map)
5077 unsigned int i;
5078 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5080 struct ipa_replace_map *replace_info;
5081 replace_info = (*node->clone.tree_map)[i];
5082 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5083 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5086 if (node->clones)
5087 node = node->clones;
5088 else if (node->next_sibling_clone)
5089 node = node->next_sibling_clone;
5090 else
5092 while (node != id->dst_node && !node->next_sibling_clone)
5093 node = node->clone_of;
5094 if (node != id->dst_node)
5095 node = node->next_sibling_clone;
5100 /* Create a copy of a function's tree.
5101 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5102 of the original function and the new copied function
5103 respectively. In case we want to replace a DECL
5104 tree with another tree while duplicating the function's
5105 body, TREE_MAP represents the mapping between these
5106 trees. If UPDATE_CLONES is set, the call_stmt fields
5107 of edges of clones of the function will be updated.
5109 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5110 from new version.
5111 If SKIP_RETURN is true, the new version will return void.
5112 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5113 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5115 void
5116 tree_function_versioning (tree old_decl, tree new_decl,
5117 vec<ipa_replace_map_p, va_gc> *tree_map,
5118 bool update_clones, bitmap args_to_skip,
5119 bool skip_return, bitmap blocks_to_copy,
5120 basic_block new_entry)
5122 struct cgraph_node *old_version_node;
5123 struct cgraph_node *new_version_node;
5124 copy_body_data id;
5125 tree p;
5126 unsigned i;
5127 struct ipa_replace_map *replace_info;
5128 basic_block old_entry_block, bb;
5129 vec<gimple> init_stmts;
5130 init_stmts.create (10);
5131 tree vars = NULL_TREE;
5133 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5134 && TREE_CODE (new_decl) == FUNCTION_DECL);
5135 DECL_POSSIBLY_INLINED (old_decl) = 1;
5137 old_version_node = cgraph_get_node (old_decl);
5138 gcc_checking_assert (old_version_node);
5139 new_version_node = cgraph_get_node (new_decl);
5140 gcc_checking_assert (new_version_node);
5142 /* Copy over debug args. */
5143 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5145 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5146 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5147 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5148 old_debug_args = decl_debug_args_lookup (old_decl);
5149 if (old_debug_args)
5151 new_debug_args = decl_debug_args_insert (new_decl);
5152 *new_debug_args = vec_safe_copy (*old_debug_args);
5156 /* Output the inlining info for this abstract function, since it has been
5157 inlined. If we don't do this now, we can lose the information about the
5158 variables in the function when the blocks get blown away as soon as we
5159 remove the cgraph node. */
5160 (*debug_hooks->outlining_inline_function) (old_decl);
5162 DECL_ARTIFICIAL (new_decl) = 1;
5163 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5164 if (DECL_ORIGIN (old_decl) == old_decl)
5165 old_version_node->used_as_abstract_origin = true;
5166 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5168 /* Prepare the data structures for the tree copy. */
5169 memset (&id, 0, sizeof (id));
5171 /* Generate a new name for the new version. */
5172 id.statements_to_fold = pointer_set_create ();
5174 id.decl_map = pointer_map_create ();
5175 id.debug_map = NULL;
5176 id.src_fn = old_decl;
5177 id.dst_fn = new_decl;
5178 id.src_node = old_version_node;
5179 id.dst_node = new_version_node;
5180 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5181 id.blocks_to_copy = blocks_to_copy;
5182 if (id.src_node->ipa_transforms_to_apply.exists ())
5184 vec<ipa_opt_pass> old_transforms_to_apply
5185 = id.dst_node->ipa_transforms_to_apply;
5186 unsigned int i;
5188 id.dst_node->ipa_transforms_to_apply
5189 = id.src_node->ipa_transforms_to_apply.copy ();
5190 for (i = 0; i < old_transforms_to_apply.length (); i++)
5191 id.dst_node->ipa_transforms_to_apply.safe_push (old_transforms_to_apply[i]);
5192 old_transforms_to_apply.release ();
5195 id.copy_decl = copy_decl_no_change;
5196 id.transform_call_graph_edges
5197 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5198 id.transform_new_cfg = true;
5199 id.transform_return_to_modify = false;
5200 id.transform_lang_insert_block = NULL;
5202 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5203 (DECL_STRUCT_FUNCTION (old_decl));
5204 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5205 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5206 initialize_cfun (new_decl, old_decl,
5207 old_entry_block->count);
5208 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5209 = id.src_cfun->gimple_df->ipa_pta;
5211 /* Copy the function's static chain. */
5212 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5213 if (p)
5214 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5215 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5216 &id);
5218 /* If there's a tree_map, prepare for substitution. */
5219 if (tree_map)
5220 for (i = 0; i < tree_map->length (); i++)
5222 gimple init;
5223 replace_info = (*tree_map)[i];
5224 if (replace_info->replace_p)
5226 if (!replace_info->old_tree)
5228 int i = replace_info->parm_num;
5229 tree parm;
5230 tree req_type;
5232 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5233 i --;
5234 replace_info->old_tree = parm;
5235 req_type = TREE_TYPE (parm);
5236 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5238 if (fold_convertible_p (req_type, replace_info->new_tree))
5239 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5240 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5241 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5242 else
5244 if (dump_file)
5246 fprintf (dump_file, " const ");
5247 print_generic_expr (dump_file, replace_info->new_tree, 0);
5248 fprintf (dump_file, " can't be converted to param ");
5249 print_generic_expr (dump_file, parm, 0);
5250 fprintf (dump_file, "\n");
5252 replace_info->old_tree = NULL;
5256 else
5257 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5258 if (replace_info->old_tree)
5260 init = setup_one_parameter (&id, replace_info->old_tree,
5261 replace_info->new_tree, id.src_fn,
5262 NULL,
5263 &vars);
5264 if (init)
5265 init_stmts.safe_push (init);
5269 /* Copy the function's arguments. */
5270 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5271 DECL_ARGUMENTS (new_decl) =
5272 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5273 args_to_skip, &vars);
5275 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5276 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5278 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5280 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5281 /* Add local vars. */
5282 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5284 if (DECL_RESULT (old_decl) == NULL_TREE)
5286 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5288 DECL_RESULT (new_decl)
5289 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5290 RESULT_DECL, NULL_TREE, void_type_node);
5291 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5292 cfun->returns_struct = 0;
5293 cfun->returns_pcc_struct = 0;
5295 else
5297 tree old_name;
5298 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5299 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5300 if (gimple_in_ssa_p (id.src_cfun)
5301 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5302 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5304 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5305 insert_decl_map (&id, old_name, new_name);
5306 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5307 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5311 /* Set up the destination functions loop tree. */
5312 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5314 cfun->curr_properties &= ~PROP_loops;
5315 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5316 cfun->curr_properties |= PROP_loops;
5319 /* Copy the Function's body. */
5320 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5321 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry);
5323 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5324 number_blocks (new_decl);
5326 /* We want to create the BB unconditionally, so that the addition of
5327 debug stmts doesn't affect BB count, which may in the end cause
5328 codegen differences. */
5329 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5330 while (init_stmts.length ())
5331 insert_init_stmt (&id, bb, init_stmts.pop ());
5332 update_clone_info (&id);
5334 /* Remap the nonlocal_goto_save_area, if any. */
5335 if (cfun->nonlocal_goto_save_area)
5337 struct walk_stmt_info wi;
5339 memset (&wi, 0, sizeof (wi));
5340 wi.info = &id;
5341 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5344 /* Clean up. */
5345 pointer_map_destroy (id.decl_map);
5346 if (id.debug_map)
5347 pointer_map_destroy (id.debug_map);
5348 free_dominance_info (CDI_DOMINATORS);
5349 free_dominance_info (CDI_POST_DOMINATORS);
5351 fold_marked_statements (0, id.statements_to_fold);
5352 pointer_set_destroy (id.statements_to_fold);
5353 fold_cond_expr_cond ();
5354 delete_unreachable_blocks_update_callgraph (&id);
5355 if (id.dst_node->symbol.definition)
5356 cgraph_rebuild_references ();
5357 update_ssa (TODO_update_ssa);
5359 /* After partial cloning we need to rescale frequencies, so they are
5360 within proper range in the cloned function. */
5361 if (new_entry)
5363 struct cgraph_edge *e;
5364 rebuild_frequencies ();
5366 new_version_node->count = ENTRY_BLOCK_PTR->count;
5367 for (e = new_version_node->callees; e; e = e->next_callee)
5369 basic_block bb = gimple_bb (e->call_stmt);
5370 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5371 bb);
5372 e->count = bb->count;
5374 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5376 basic_block bb = gimple_bb (e->call_stmt);
5377 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5378 bb);
5379 e->count = bb->count;
5383 free_dominance_info (CDI_DOMINATORS);
5384 free_dominance_info (CDI_POST_DOMINATORS);
5386 gcc_assert (!id.debug_stmts.exists ());
5387 init_stmts.release ();
5388 pop_cfun ();
5389 return;
5392 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5393 the callee and return the inlined body on success. */
5395 tree
5396 maybe_inline_call_in_expr (tree exp)
5398 tree fn = get_callee_fndecl (exp);
5400 /* We can only try to inline "const" functions. */
5401 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5403 struct pointer_map_t *decl_map = pointer_map_create ();
5404 call_expr_arg_iterator iter;
5405 copy_body_data id;
5406 tree param, arg, t;
5408 /* Remap the parameters. */
5409 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5410 param;
5411 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5412 *pointer_map_insert (decl_map, param) = arg;
5414 memset (&id, 0, sizeof (id));
5415 id.src_fn = fn;
5416 id.dst_fn = current_function_decl;
5417 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5418 id.decl_map = decl_map;
5420 id.copy_decl = copy_decl_no_change;
5421 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5422 id.transform_new_cfg = false;
5423 id.transform_return_to_modify = true;
5424 id.transform_lang_insert_block = NULL;
5426 /* Make sure not to unshare trees behind the front-end's back
5427 since front-end specific mechanisms may rely on sharing. */
5428 id.regimplify = false;
5429 id.do_not_unshare = true;
5431 /* We're not inside any EH region. */
5432 id.eh_lp_nr = 0;
5434 t = copy_tree_body (&id);
5435 pointer_map_destroy (decl_map);
5437 /* We can only return something suitable for use in a GENERIC
5438 expression tree. */
5439 if (TREE_CODE (t) == MODIFY_EXPR)
5440 return TREE_OPERAND (t, 1);
5443 return NULL_TREE;
5446 /* Duplicate a type, fields and all. */
5448 tree
5449 build_duplicate_type (tree type)
5451 struct copy_body_data id;
5453 memset (&id, 0, sizeof (id));
5454 id.src_fn = current_function_decl;
5455 id.dst_fn = current_function_decl;
5456 id.src_cfun = cfun;
5457 id.decl_map = pointer_map_create ();
5458 id.debug_map = NULL;
5459 id.copy_decl = copy_decl_no_change;
5461 type = remap_type_1 (type, &id);
5463 pointer_map_destroy (id.decl_map);
5464 if (id.debug_map)
5465 pointer_map_destroy (id.debug_map);
5467 TYPE_CANONICAL (type) = type;
5469 return type;