Update count_scale for AutoFDO to prevent over-scale.
[official-gcc.git] / gcc-4_8 / gcc / tree-inline.c
blob866dea6a08f80cc3e436613c27c1b31bd7c4cc1b
1 /* Tree inlining.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "tree.h"
27 #include "tree-inline.h"
28 #include "flags.h"
29 #include "params.h"
30 #include "input.h"
31 #include "insn-config.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-iterator.h"
36 #include "cgraph.h"
37 #include "intl.h"
38 #include "tree-mudflap.h"
39 #include "tree-flow.h"
40 #include "function.h"
41 #include "tree-flow.h"
42 #include "tree-pretty-print.h"
43 #include "except.h"
44 #include "debug.h"
45 #include "pointer-set.h"
46 #include "ipa-prop.h"
47 #include "value-prof.h"
48 #include "tree-pass.h"
49 #include "target.h"
50 #include "langhooks.h"
51 #include "l-ipo.h"
53 #include "rtl.h" /* FIXME: For asm_str_count. */
55 /* I'm not real happy about this, but we need to handle gimple and
56 non-gimple trees. */
57 #include "gimple.h"
59 /* Inlining, Cloning, Versioning, Parallelization
61 Inlining: a function body is duplicated, but the PARM_DECLs are
62 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
63 MODIFY_EXPRs that store to a dedicated returned-value variable.
64 The duplicated eh_region info of the copy will later be appended
65 to the info for the caller; the eh_region info in copied throwing
66 statements and RESX statements are adjusted accordingly.
68 Cloning: (only in C++) We have one body for a con/de/structor, and
69 multiple function decls, each with a unique parameter list.
70 Duplicate the body, using the given splay tree; some parameters
71 will become constants (like 0 or 1).
73 Versioning: a function body is duplicated and the result is a new
74 function rather than into blocks of an existing function as with
75 inlining. Some parameters will become constants.
77 Parallelization: a region of a function is duplicated resulting in
78 a new function. Variables may be replaced with complex expressions
79 to enable shared variable semantics.
81 All of these will simultaneously lookup any callgraph edges. If
82 we're going to inline the duplicated function body, and the given
83 function has some cloned callgraph nodes (one for each place this
84 function will be inlined) those callgraph edges will be duplicated.
85 If we're cloning the body, those callgraph edges will be
86 updated to point into the new body. (Note that the original
87 callgraph node and edge list will not be altered.)
89 See the CALL_EXPR handling case in copy_tree_body_r (). */
91 /* To Do:
93 o In order to make inlining-on-trees work, we pessimized
94 function-local static constants. In particular, they are now
95 always output, even when not addressed. Fix this by treating
96 function-local static constants just like global static
97 constants; the back-end already knows not to output them if they
98 are not needed.
100 o Provide heuristics to clamp inlining of recursive template
101 calls? */
104 /* Weights that estimate_num_insns uses to estimate the size of the
105 produced code. */
107 eni_weights eni_size_weights;
109 /* Weights that estimate_num_insns uses to estimate the time necessary
110 to execute the produced code. */
112 eni_weights eni_time_weights;
114 /* Prototypes. */
116 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
117 static void remap_block (tree *, copy_body_data *);
118 static void copy_bind_expr (tree *, int *, copy_body_data *);
119 static tree mark_local_for_remap_r (tree *, int *, void *);
120 static void unsave_expr_1 (tree);
121 static tree unsave_r (tree *, int *, void *);
122 static void declare_inline_vars (tree, tree);
123 static void remap_save_expr (tree *, void *, int *);
124 static void prepend_lexical_block (tree current_block, tree new_block);
125 static tree copy_decl_to_var (tree, copy_body_data *);
126 static tree copy_result_decl_to_var (tree, copy_body_data *);
127 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
128 static gimple remap_gimple_stmt (gimple, copy_body_data *);
129 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
131 /* Insert a tree->tree mapping for ID. Despite the name suggests
132 that the trees should be variables, it is used for more than that. */
134 void
135 insert_decl_map (copy_body_data *id, tree key, tree value)
137 *pointer_map_insert (id->decl_map, key) = value;
139 /* Always insert an identity map as well. If we see this same new
140 node again, we won't want to duplicate it a second time. */
141 if (key != value)
142 *pointer_map_insert (id->decl_map, value) = value;
145 /* Insert a tree->tree mapping for ID. This is only used for
146 variables. */
148 static void
149 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
151 if (!gimple_in_ssa_p (id->src_cfun))
152 return;
154 if (!MAY_HAVE_DEBUG_STMTS)
155 return;
157 if (!target_for_debug_bind (key))
158 return;
160 gcc_assert (TREE_CODE (key) == PARM_DECL);
161 gcc_assert (TREE_CODE (value) == VAR_DECL);
163 if (!id->debug_map)
164 id->debug_map = pointer_map_create ();
166 *pointer_map_insert (id->debug_map, key) = value;
169 /* If nonzero, we're remapping the contents of inlined debug
170 statements. If negative, an error has occurred, such as a
171 reference to a variable that isn't available in the inlined
172 context. */
173 static int processing_debug_stmt = 0;
175 /* Construct new SSA name for old NAME. ID is the inline context. */
177 static tree
178 remap_ssa_name (tree name, copy_body_data *id)
180 tree new_tree, var;
181 tree *n;
183 gcc_assert (TREE_CODE (name) == SSA_NAME);
185 n = (tree *) pointer_map_contains (id->decl_map, name);
186 if (n)
187 return unshare_expr (*n);
189 if (processing_debug_stmt)
191 if (SSA_NAME_IS_DEFAULT_DEF (name)
192 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
193 && id->entry_bb == NULL
194 && single_succ_p (ENTRY_BLOCK_PTR))
196 tree vexpr = make_node (DEBUG_EXPR_DECL);
197 gimple def_temp;
198 gimple_stmt_iterator gsi;
199 tree val = SSA_NAME_VAR (name);
201 n = (tree *) pointer_map_contains (id->decl_map, val);
202 if (n != NULL)
203 val = *n;
204 if (TREE_CODE (val) != PARM_DECL)
206 processing_debug_stmt = -1;
207 return name;
209 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
210 DECL_ARTIFICIAL (vexpr) = 1;
211 TREE_TYPE (vexpr) = TREE_TYPE (name);
212 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
213 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
214 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
215 return vexpr;
218 processing_debug_stmt = -1;
219 return name;
222 /* Remap anonymous SSA names or SSA names of anonymous decls. */
223 var = SSA_NAME_VAR (name);
224 if (!var
225 || (!SSA_NAME_IS_DEFAULT_DEF (name)
226 && TREE_CODE (var) == VAR_DECL
227 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
228 && DECL_ARTIFICIAL (var)
229 && DECL_IGNORED_P (var)
230 && !DECL_NAME (var)))
232 struct ptr_info_def *pi;
233 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id), NULL);
234 if (!var && SSA_NAME_IDENTIFIER (name))
235 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
236 insert_decl_map (id, name, new_tree);
237 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
238 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
239 /* At least IPA points-to info can be directly transferred. */
240 if (id->src_cfun->gimple_df
241 && id->src_cfun->gimple_df->ipa_pta
242 && (pi = SSA_NAME_PTR_INFO (name))
243 && !pi->pt.anything)
245 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
246 new_pi->pt = pi->pt;
248 return new_tree;
251 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
252 in copy_bb. */
253 new_tree = remap_decl (var, id);
255 /* We might've substituted constant or another SSA_NAME for
256 the variable.
258 Replace the SSA name representing RESULT_DECL by variable during
259 inlining: this saves us from need to introduce PHI node in a case
260 return value is just partly initialized. */
261 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
262 && (!SSA_NAME_VAR (name)
263 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
264 || !id->transform_return_to_modify))
266 struct ptr_info_def *pi;
267 new_tree = make_ssa_name (new_tree, NULL);
268 insert_decl_map (id, name, new_tree);
269 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
270 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
271 /* At least IPA points-to info can be directly transferred. */
272 if (id->src_cfun->gimple_df
273 && id->src_cfun->gimple_df->ipa_pta
274 && (pi = SSA_NAME_PTR_INFO (name))
275 && !pi->pt.anything)
277 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
278 new_pi->pt = pi->pt;
280 if (SSA_NAME_IS_DEFAULT_DEF (name))
282 /* By inlining function having uninitialized variable, we might
283 extend the lifetime (variable might get reused). This cause
284 ICE in the case we end up extending lifetime of SSA name across
285 abnormal edge, but also increase register pressure.
287 We simply initialize all uninitialized vars by 0 except
288 for case we are inlining to very first BB. We can avoid
289 this for all BBs that are not inside strongly connected
290 regions of the CFG, but this is expensive to test. */
291 if (id->entry_bb
292 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
293 && (!SSA_NAME_VAR (name)
294 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
295 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
296 || EDGE_COUNT (id->entry_bb->preds) != 1))
298 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
299 gimple init_stmt;
300 tree zero = build_zero_cst (TREE_TYPE (new_tree));
302 init_stmt = gimple_build_assign (new_tree, zero);
303 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
304 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
306 else
308 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
309 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
313 else
314 insert_decl_map (id, name, new_tree);
315 return new_tree;
318 /* Remap DECL during the copying of the BLOCK tree for the function. */
320 tree
321 remap_decl (tree decl, copy_body_data *id)
323 tree *n;
325 /* We only remap local variables in the current function. */
327 /* See if we have remapped this declaration. */
329 n = (tree *) pointer_map_contains (id->decl_map, decl);
331 if (!n && processing_debug_stmt)
333 processing_debug_stmt = -1;
334 return decl;
337 /* If we didn't already have an equivalent for this declaration,
338 create one now. */
339 if (!n)
341 /* Make a copy of the variable or label. */
342 tree t = id->copy_decl (decl, id);
344 /* Remember it, so that if we encounter this local entity again
345 we can reuse this copy. Do this early because remap_type may
346 need this decl for TYPE_STUB_DECL. */
347 insert_decl_map (id, decl, t);
349 if (!DECL_P (t))
350 return t;
352 /* Remap types, if necessary. */
353 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
354 if (TREE_CODE (t) == TYPE_DECL)
355 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
357 /* Remap sizes as necessary. */
358 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
359 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
361 /* If fields, do likewise for offset and qualifier. */
362 if (TREE_CODE (t) == FIELD_DECL)
364 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
365 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
366 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
369 return t;
372 if (id->do_not_unshare)
373 return *n;
374 else
375 return unshare_expr (*n);
378 static tree
379 remap_type_1 (tree type, copy_body_data *id)
381 tree new_tree, t;
383 /* We do need a copy. build and register it now. If this is a pointer or
384 reference type, remap the designated type and make a new pointer or
385 reference type. */
386 if (TREE_CODE (type) == POINTER_TYPE)
388 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
389 TYPE_MODE (type),
390 TYPE_REF_CAN_ALIAS_ALL (type));
391 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
392 new_tree = build_type_attribute_qual_variant (new_tree,
393 TYPE_ATTRIBUTES (type),
394 TYPE_QUALS (type));
395 insert_decl_map (id, type, new_tree);
396 return new_tree;
398 else if (TREE_CODE (type) == REFERENCE_TYPE)
400 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
401 TYPE_MODE (type),
402 TYPE_REF_CAN_ALIAS_ALL (type));
403 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
404 new_tree = build_type_attribute_qual_variant (new_tree,
405 TYPE_ATTRIBUTES (type),
406 TYPE_QUALS (type));
407 insert_decl_map (id, type, new_tree);
408 return new_tree;
410 else
411 new_tree = copy_node (type);
413 insert_decl_map (id, type, new_tree);
415 /* This is a new type, not a copy of an old type. Need to reassociate
416 variants. We can handle everything except the main variant lazily. */
417 t = TYPE_MAIN_VARIANT (type);
418 if (type != t)
420 t = remap_type (t, id);
421 TYPE_MAIN_VARIANT (new_tree) = t;
422 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
423 TYPE_NEXT_VARIANT (t) = new_tree;
425 else
427 TYPE_MAIN_VARIANT (new_tree) = new_tree;
428 TYPE_NEXT_VARIANT (new_tree) = NULL;
431 if (TYPE_STUB_DECL (type))
432 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
434 /* Lazily create pointer and reference types. */
435 TYPE_POINTER_TO (new_tree) = NULL;
436 TYPE_REFERENCE_TO (new_tree) = NULL;
438 switch (TREE_CODE (new_tree))
440 case INTEGER_TYPE:
441 case REAL_TYPE:
442 case FIXED_POINT_TYPE:
443 case ENUMERAL_TYPE:
444 case BOOLEAN_TYPE:
445 t = TYPE_MIN_VALUE (new_tree);
446 if (t && TREE_CODE (t) != INTEGER_CST)
447 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
449 t = TYPE_MAX_VALUE (new_tree);
450 if (t && TREE_CODE (t) != INTEGER_CST)
451 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
452 return new_tree;
454 case FUNCTION_TYPE:
455 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
456 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
457 return new_tree;
459 case ARRAY_TYPE:
460 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
461 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
462 break;
464 case RECORD_TYPE:
465 case UNION_TYPE:
466 case QUAL_UNION_TYPE:
468 tree f, nf = NULL;
470 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
472 t = remap_decl (f, id);
473 DECL_CONTEXT (t) = new_tree;
474 DECL_CHAIN (t) = nf;
475 nf = t;
477 TYPE_FIELDS (new_tree) = nreverse (nf);
479 break;
481 case OFFSET_TYPE:
482 default:
483 /* Shouldn't have been thought variable sized. */
484 gcc_unreachable ();
487 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
488 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
490 return new_tree;
493 tree
494 remap_type (tree type, copy_body_data *id)
496 tree *node;
497 tree tmp;
499 if (type == NULL)
500 return type;
502 /* See if we have remapped this type. */
503 node = (tree *) pointer_map_contains (id->decl_map, type);
504 if (node)
505 return *node;
507 /* The type only needs remapping if it's variably modified. */
508 if (! variably_modified_type_p (type, id->src_fn))
510 insert_decl_map (id, type, type);
511 return type;
514 id->remapping_type_depth++;
515 tmp = remap_type_1 (type, id);
516 id->remapping_type_depth--;
518 return tmp;
521 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
523 static bool
524 can_be_nonlocal (tree decl, copy_body_data *id)
526 /* We can not duplicate function decls. */
527 if (TREE_CODE (decl) == FUNCTION_DECL)
528 return true;
530 /* Local static vars must be non-local or we get multiple declaration
531 problems. */
532 if (TREE_CODE (decl) == VAR_DECL
533 && !auto_var_in_fn_p (decl, id->src_fn))
534 return true;
536 return false;
539 static tree
540 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
541 copy_body_data *id)
543 tree old_var;
544 tree new_decls = NULL_TREE;
546 /* Remap its variables. */
547 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
549 tree new_var;
551 if (can_be_nonlocal (old_var, id))
553 /* We need to add this variable to the local decls as otherwise
554 nothing else will do so. */
555 if (TREE_CODE (old_var) == VAR_DECL
556 && ! DECL_EXTERNAL (old_var))
557 add_local_decl (cfun, old_var);
558 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
559 && !DECL_IGNORED_P (old_var)
560 && nonlocalized_list)
561 vec_safe_push (*nonlocalized_list, old_var);
562 continue;
565 /* Remap the variable. */
566 new_var = remap_decl (old_var, id);
568 /* If we didn't remap this variable, we can't mess with its
569 TREE_CHAIN. If we remapped this variable to the return slot, it's
570 already declared somewhere else, so don't declare it here. */
572 if (new_var == id->retvar)
574 else if (!new_var)
576 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
577 && !DECL_IGNORED_P (old_var)
578 && nonlocalized_list)
579 vec_safe_push (*nonlocalized_list, old_var);
581 else
583 gcc_assert (DECL_P (new_var));
584 DECL_CHAIN (new_var) = new_decls;
585 new_decls = new_var;
587 /* Also copy value-expressions. */
588 if (TREE_CODE (new_var) == VAR_DECL
589 && DECL_HAS_VALUE_EXPR_P (new_var))
591 tree tem = DECL_VALUE_EXPR (new_var);
592 bool old_regimplify = id->regimplify;
593 id->remapping_type_depth++;
594 walk_tree (&tem, copy_tree_body_r, id, NULL);
595 id->remapping_type_depth--;
596 id->regimplify = old_regimplify;
597 SET_DECL_VALUE_EXPR (new_var, tem);
602 return nreverse (new_decls);
605 /* Copy the BLOCK to contain remapped versions of the variables
606 therein. And hook the new block into the block-tree. */
608 static void
609 remap_block (tree *block, copy_body_data *id)
611 tree old_block;
612 tree new_block;
614 /* Make the new block. */
615 old_block = *block;
616 new_block = make_node (BLOCK);
617 TREE_USED (new_block) = TREE_USED (old_block);
618 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
619 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
620 BLOCK_NONLOCALIZED_VARS (new_block)
621 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
622 *block = new_block;
624 /* Remap its variables. */
625 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
626 &BLOCK_NONLOCALIZED_VARS (new_block),
627 id);
629 if (id->transform_lang_insert_block)
630 id->transform_lang_insert_block (new_block);
632 /* Remember the remapped block. */
633 insert_decl_map (id, old_block, new_block);
636 /* Copy the whole block tree and root it in id->block. */
637 static tree
638 remap_blocks (tree block, copy_body_data *id)
640 tree t;
641 tree new_tree = block;
643 if (!block)
644 return NULL;
646 remap_block (&new_tree, id);
647 gcc_assert (new_tree != block);
648 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
649 prepend_lexical_block (new_tree, remap_blocks (t, id));
650 /* Blocks are in arbitrary order, but make things slightly prettier and do
651 not swap order when producing a copy. */
652 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
653 return new_tree;
656 /* Remap the block tree rooted at BLOCK to nothing. */
657 static void
658 remap_blocks_to_null (tree block, copy_body_data *id)
660 tree t;
661 insert_decl_map (id, block, NULL_TREE);
662 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
663 remap_blocks_to_null (t, id);
666 static void
667 copy_statement_list (tree *tp)
669 tree_stmt_iterator oi, ni;
670 tree new_tree;
672 new_tree = alloc_stmt_list ();
673 ni = tsi_start (new_tree);
674 oi = tsi_start (*tp);
675 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
676 *tp = new_tree;
678 for (; !tsi_end_p (oi); tsi_next (&oi))
680 tree stmt = tsi_stmt (oi);
681 if (TREE_CODE (stmt) == STATEMENT_LIST)
682 /* This copy is not redundant; tsi_link_after will smash this
683 STATEMENT_LIST into the end of the one we're building, and we
684 don't want to do that with the original. */
685 copy_statement_list (&stmt);
686 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
690 static void
691 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
693 tree block = BIND_EXPR_BLOCK (*tp);
694 /* Copy (and replace) the statement. */
695 copy_tree_r (tp, walk_subtrees, NULL);
696 if (block)
698 remap_block (&block, id);
699 BIND_EXPR_BLOCK (*tp) = block;
702 if (BIND_EXPR_VARS (*tp))
703 /* This will remap a lot of the same decls again, but this should be
704 harmless. */
705 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
709 /* Create a new gimple_seq by remapping all the statements in BODY
710 using the inlining information in ID. */
712 static gimple_seq
713 remap_gimple_seq (gimple_seq body, copy_body_data *id)
715 gimple_stmt_iterator si;
716 gimple_seq new_body = NULL;
718 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
720 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
721 gimple_seq_add_stmt (&new_body, new_stmt);
724 return new_body;
728 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
729 block using the mapping information in ID. */
731 static gimple
732 copy_gimple_bind (gimple stmt, copy_body_data *id)
734 gimple new_bind;
735 tree new_block, new_vars;
736 gimple_seq body, new_body;
738 /* Copy the statement. Note that we purposely don't use copy_stmt
739 here because we need to remap statements as we copy. */
740 body = gimple_bind_body (stmt);
741 new_body = remap_gimple_seq (body, id);
743 new_block = gimple_bind_block (stmt);
744 if (new_block)
745 remap_block (&new_block, id);
747 /* This will remap a lot of the same decls again, but this should be
748 harmless. */
749 new_vars = gimple_bind_vars (stmt);
750 if (new_vars)
751 new_vars = remap_decls (new_vars, NULL, id);
753 new_bind = gimple_build_bind (new_vars, new_body, new_block);
755 return new_bind;
759 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
760 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
761 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
762 recursing into the children nodes of *TP. */
764 static tree
765 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
767 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
768 copy_body_data *id = (copy_body_data *) wi_p->info;
769 tree fn = id->src_fn;
771 if (TREE_CODE (*tp) == SSA_NAME)
773 *tp = remap_ssa_name (*tp, id);
774 *walk_subtrees = 0;
775 return NULL;
777 else if (auto_var_in_fn_p (*tp, fn))
779 /* Local variables and labels need to be replaced by equivalent
780 variables. We don't want to copy static variables; there's
781 only one of those, no matter how many times we inline the
782 containing function. Similarly for globals from an outer
783 function. */
784 tree new_decl;
786 /* Remap the declaration. */
787 new_decl = remap_decl (*tp, id);
788 gcc_assert (new_decl);
789 /* Replace this variable with the copy. */
790 STRIP_TYPE_NOPS (new_decl);
791 /* ??? The C++ frontend uses void * pointer zero to initialize
792 any other type. This confuses the middle-end type verification.
793 As cloned bodies do not go through gimplification again the fixup
794 there doesn't trigger. */
795 if (TREE_CODE (new_decl) == INTEGER_CST
796 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
797 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
798 *tp = new_decl;
799 *walk_subtrees = 0;
801 else if (TREE_CODE (*tp) == STATEMENT_LIST)
802 gcc_unreachable ();
803 else if (TREE_CODE (*tp) == SAVE_EXPR)
804 gcc_unreachable ();
805 else if (TREE_CODE (*tp) == LABEL_DECL
806 && (!DECL_CONTEXT (*tp)
807 || decl_function_context (*tp) == id->src_fn))
808 /* These may need to be remapped for EH handling. */
809 *tp = remap_decl (*tp, id);
810 else if (TREE_CODE (*tp) == FIELD_DECL)
812 /* If the enclosing record type is variably_modified_type_p, the field
813 has already been remapped. Otherwise, it need not be. */
814 tree *n = (tree *) pointer_map_contains (id->decl_map, *tp);
815 if (n)
816 *tp = *n;
817 *walk_subtrees = 0;
819 else if (TYPE_P (*tp))
820 /* Types may need remapping as well. */
821 *tp = remap_type (*tp, id);
822 else if (CONSTANT_CLASS_P (*tp))
824 /* If this is a constant, we have to copy the node iff the type
825 will be remapped. copy_tree_r will not copy a constant. */
826 tree new_type = remap_type (TREE_TYPE (*tp), id);
828 if (new_type == TREE_TYPE (*tp))
829 *walk_subtrees = 0;
831 else if (TREE_CODE (*tp) == INTEGER_CST)
832 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
833 TREE_INT_CST_HIGH (*tp));
834 else
836 *tp = copy_node (*tp);
837 TREE_TYPE (*tp) = new_type;
840 else
842 /* Otherwise, just copy the node. Note that copy_tree_r already
843 knows not to copy VAR_DECLs, etc., so this is safe. */
845 if (TREE_CODE (*tp) == MEM_REF)
847 tree ptr = TREE_OPERAND (*tp, 0);
848 tree type = remap_type (TREE_TYPE (*tp), id);
849 tree old = *tp;
851 /* We need to re-canonicalize MEM_REFs from inline substitutions
852 that can happen when a pointer argument is an ADDR_EXPR.
853 Recurse here manually to allow that. */
854 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
855 *tp = fold_build2 (MEM_REF, type,
856 ptr, TREE_OPERAND (*tp, 1));
857 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
858 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
859 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
860 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
861 *walk_subtrees = 0;
862 return NULL;
865 /* Here is the "usual case". Copy this tree node, and then
866 tweak some special cases. */
867 copy_tree_r (tp, walk_subtrees, NULL);
869 if (TREE_CODE (*tp) != OMP_CLAUSE)
870 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
872 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
874 /* The copied TARGET_EXPR has never been expanded, even if the
875 original node was expanded already. */
876 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
877 TREE_OPERAND (*tp, 3) = NULL_TREE;
879 else if (TREE_CODE (*tp) == ADDR_EXPR)
881 /* Variable substitution need not be simple. In particular,
882 the MEM_REF substitution above. Make sure that
883 TREE_CONSTANT and friends are up-to-date. */
884 int invariant = is_gimple_min_invariant (*tp);
885 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
886 recompute_tree_invariant_for_addr_expr (*tp);
888 /* If this used to be invariant, but is not any longer,
889 then regimplification is probably needed. */
890 if (invariant && !is_gimple_min_invariant (*tp))
891 id->regimplify = true;
893 *walk_subtrees = 0;
897 /* Update the TREE_BLOCK for the cloned expr. */
898 if (EXPR_P (*tp))
900 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
901 tree old_block = TREE_BLOCK (*tp);
902 if (old_block)
904 tree *n;
905 n = (tree *) pointer_map_contains (id->decl_map,
906 TREE_BLOCK (*tp));
907 if (n)
908 new_block = *n;
910 TREE_SET_BLOCK (*tp, new_block);
913 /* Keep iterating. */
914 return NULL_TREE;
918 /* Called from copy_body_id via walk_tree. DATA is really a
919 `copy_body_data *'. */
921 tree
922 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
924 copy_body_data *id = (copy_body_data *) data;
925 tree fn = id->src_fn;
926 tree new_block;
928 /* Begin by recognizing trees that we'll completely rewrite for the
929 inlining context. Our output for these trees is completely
930 different from out input (e.g. RETURN_EXPR is deleted, and morphs
931 into an edge). Further down, we'll handle trees that get
932 duplicated and/or tweaked. */
934 /* When requested, RETURN_EXPRs should be transformed to just the
935 contained MODIFY_EXPR. The branch semantics of the return will
936 be handled elsewhere by manipulating the CFG rather than a statement. */
937 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
939 tree assignment = TREE_OPERAND (*tp, 0);
941 /* If we're returning something, just turn that into an
942 assignment into the equivalent of the original RESULT_DECL.
943 If the "assignment" is just the result decl, the result
944 decl has already been set (e.g. a recent "foo (&result_decl,
945 ...)"); just toss the entire RETURN_EXPR. */
946 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
948 /* Replace the RETURN_EXPR with (a copy of) the
949 MODIFY_EXPR hanging underneath. */
950 *tp = copy_node (assignment);
952 else /* Else the RETURN_EXPR returns no value. */
954 *tp = NULL;
955 return (tree) (void *)1;
958 else if (TREE_CODE (*tp) == SSA_NAME)
960 *tp = remap_ssa_name (*tp, id);
961 *walk_subtrees = 0;
962 return NULL;
965 /* Local variables and labels need to be replaced by equivalent
966 variables. We don't want to copy static variables; there's only
967 one of those, no matter how many times we inline the containing
968 function. Similarly for globals from an outer function. */
969 else if (auto_var_in_fn_p (*tp, fn))
971 tree new_decl;
973 /* Remap the declaration. */
974 new_decl = remap_decl (*tp, id);
975 gcc_assert (new_decl);
976 /* Replace this variable with the copy. */
977 STRIP_TYPE_NOPS (new_decl);
978 *tp = new_decl;
979 *walk_subtrees = 0;
981 else if (TREE_CODE (*tp) == STATEMENT_LIST)
982 copy_statement_list (tp);
983 else if (TREE_CODE (*tp) == SAVE_EXPR
984 || TREE_CODE (*tp) == TARGET_EXPR)
985 remap_save_expr (tp, id->decl_map, walk_subtrees);
986 else if (TREE_CODE (*tp) == LABEL_DECL
987 && (! DECL_CONTEXT (*tp)
988 || decl_function_context (*tp) == id->src_fn))
989 /* These may need to be remapped for EH handling. */
990 *tp = remap_decl (*tp, id);
991 else if (TREE_CODE (*tp) == BIND_EXPR)
992 copy_bind_expr (tp, walk_subtrees, id);
993 /* Types may need remapping as well. */
994 else if (TYPE_P (*tp))
995 *tp = remap_type (*tp, id);
997 /* If this is a constant, we have to copy the node iff the type will be
998 remapped. copy_tree_r will not copy a constant. */
999 else if (CONSTANT_CLASS_P (*tp))
1001 tree new_type = remap_type (TREE_TYPE (*tp), id);
1003 if (new_type == TREE_TYPE (*tp))
1004 *walk_subtrees = 0;
1006 else if (TREE_CODE (*tp) == INTEGER_CST)
1007 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1008 TREE_INT_CST_HIGH (*tp));
1009 else
1011 *tp = copy_node (*tp);
1012 TREE_TYPE (*tp) = new_type;
1016 /* Otherwise, just copy the node. Note that copy_tree_r already
1017 knows not to copy VAR_DECLs, etc., so this is safe. */
1018 else
1020 /* Here we handle trees that are not completely rewritten.
1021 First we detect some inlining-induced bogosities for
1022 discarding. */
1023 if (TREE_CODE (*tp) == MODIFY_EXPR
1024 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1025 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1027 /* Some assignments VAR = VAR; don't generate any rtl code
1028 and thus don't count as variable modification. Avoid
1029 keeping bogosities like 0 = 0. */
1030 tree decl = TREE_OPERAND (*tp, 0), value;
1031 tree *n;
1033 n = (tree *) pointer_map_contains (id->decl_map, decl);
1034 if (n)
1036 value = *n;
1037 STRIP_TYPE_NOPS (value);
1038 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1040 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1041 return copy_tree_body_r (tp, walk_subtrees, data);
1045 else if (TREE_CODE (*tp) == INDIRECT_REF)
1047 /* Get rid of *& from inline substitutions that can happen when a
1048 pointer argument is an ADDR_EXPR. */
1049 tree decl = TREE_OPERAND (*tp, 0);
1050 tree *n;
1052 n = (tree *) pointer_map_contains (id->decl_map, decl);
1053 if (n)
1055 tree new_tree;
1056 tree old;
1057 /* If we happen to get an ADDR_EXPR in n->value, strip
1058 it manually here as we'll eventually get ADDR_EXPRs
1059 which lie about their types pointed to. In this case
1060 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1061 but we absolutely rely on that. As fold_indirect_ref
1062 does other useful transformations, try that first, though. */
1063 tree type = TREE_TYPE (TREE_TYPE (*n));
1064 if (id->do_not_unshare)
1065 new_tree = *n;
1066 else
1067 new_tree = unshare_expr (*n);
1068 old = *tp;
1069 *tp = gimple_fold_indirect_ref (new_tree);
1070 if (! *tp)
1072 if (TREE_CODE (new_tree) == ADDR_EXPR)
1074 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1075 type, new_tree);
1076 /* ??? We should either assert here or build
1077 a VIEW_CONVERT_EXPR instead of blindly leaking
1078 incompatible types to our IL. */
1079 if (! *tp)
1080 *tp = TREE_OPERAND (new_tree, 0);
1082 else
1084 *tp = build1 (INDIRECT_REF, type, new_tree);
1085 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1086 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1087 TREE_READONLY (*tp) = TREE_READONLY (old);
1088 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
1091 *walk_subtrees = 0;
1092 return NULL;
1095 else if (TREE_CODE (*tp) == MEM_REF)
1097 /* We need to re-canonicalize MEM_REFs from inline substitutions
1098 that can happen when a pointer argument is an ADDR_EXPR. */
1099 tree decl = TREE_OPERAND (*tp, 0);
1100 tree *n;
1102 n = (tree *) pointer_map_contains (id->decl_map, decl);
1103 if (n)
1105 tree old = *tp;
1106 *tp = fold_build2 (MEM_REF, TREE_TYPE (*tp),
1107 unshare_expr (*n), TREE_OPERAND (*tp, 1));
1108 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1109 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1110 *walk_subtrees = 0;
1111 return NULL;
1115 /* Here is the "usual case". Copy this tree node, and then
1116 tweak some special cases. */
1117 copy_tree_r (tp, walk_subtrees, NULL);
1119 /* If EXPR has block defined, map it to newly constructed block.
1120 When inlining we want EXPRs without block appear in the block
1121 of function call if we are not remapping a type. */
1122 if (EXPR_P (*tp))
1124 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1125 if (TREE_BLOCK (*tp))
1127 tree *n;
1128 n = (tree *) pointer_map_contains (id->decl_map,
1129 TREE_BLOCK (*tp));
1130 if (n)
1131 new_block = *n;
1133 TREE_SET_BLOCK (*tp, new_block);
1136 if (TREE_CODE (*tp) != OMP_CLAUSE)
1137 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1139 /* The copied TARGET_EXPR has never been expanded, even if the
1140 original node was expanded already. */
1141 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1143 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1144 TREE_OPERAND (*tp, 3) = NULL_TREE;
1147 /* Variable substitution need not be simple. In particular, the
1148 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1149 and friends are up-to-date. */
1150 else if (TREE_CODE (*tp) == ADDR_EXPR)
1152 int invariant = is_gimple_min_invariant (*tp);
1153 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1155 /* Handle the case where we substituted an INDIRECT_REF
1156 into the operand of the ADDR_EXPR. */
1157 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1158 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1159 else
1160 recompute_tree_invariant_for_addr_expr (*tp);
1162 /* If this used to be invariant, but is not any longer,
1163 then regimplification is probably needed. */
1164 if (invariant && !is_gimple_min_invariant (*tp))
1165 id->regimplify = true;
1167 *walk_subtrees = 0;
1171 /* Keep iterating. */
1172 return NULL_TREE;
1175 /* Helper for remap_gimple_stmt. Given an EH region number for the
1176 source function, map that to the duplicate EH region number in
1177 the destination function. */
1179 static int
1180 remap_eh_region_nr (int old_nr, copy_body_data *id)
1182 eh_region old_r, new_r;
1183 void **slot;
1185 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1186 slot = pointer_map_contains (id->eh_map, old_r);
1187 new_r = (eh_region) *slot;
1189 return new_r->index;
1192 /* Similar, but operate on INTEGER_CSTs. */
1194 static tree
1195 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1197 int old_nr, new_nr;
1199 old_nr = tree_low_cst (old_t_nr, 0);
1200 new_nr = remap_eh_region_nr (old_nr, id);
1202 return build_int_cst (integer_type_node, new_nr);
1205 /* Helper for copy_bb. Remap statement STMT using the inlining
1206 information in ID. Return the new statement copy. */
1208 static gimple
1209 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1211 gimple copy = NULL;
1212 struct walk_stmt_info wi;
1213 bool skip_first = false;
1215 /* Begin by recognizing trees that we'll completely rewrite for the
1216 inlining context. Our output for these trees is completely
1217 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1218 into an edge). Further down, we'll handle trees that get
1219 duplicated and/or tweaked. */
1221 /* When requested, GIMPLE_RETURNs should be transformed to just the
1222 contained GIMPLE_ASSIGN. The branch semantics of the return will
1223 be handled elsewhere by manipulating the CFG rather than the
1224 statement. */
1225 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1227 tree retval = gimple_return_retval (stmt);
1229 /* If we're returning something, just turn that into an
1230 assignment into the equivalent of the original RESULT_DECL.
1231 If RETVAL is just the result decl, the result decl has
1232 already been set (e.g. a recent "foo (&result_decl, ...)");
1233 just toss the entire GIMPLE_RETURN. */
1234 if (retval
1235 && (TREE_CODE (retval) != RESULT_DECL
1236 && (TREE_CODE (retval) != SSA_NAME
1237 || ! SSA_NAME_VAR (retval)
1238 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1240 copy = gimple_build_assign (id->retvar, retval);
1241 /* id->retvar is already substituted. Skip it on later remapping. */
1242 skip_first = true;
1244 else
1245 return gimple_build_nop ();
1247 else if (gimple_has_substatements (stmt))
1249 gimple_seq s1, s2;
1251 /* When cloning bodies from the C++ front end, we will be handed bodies
1252 in High GIMPLE form. Handle here all the High GIMPLE statements that
1253 have embedded statements. */
1254 switch (gimple_code (stmt))
1256 case GIMPLE_BIND:
1257 copy = copy_gimple_bind (stmt, id);
1258 break;
1260 case GIMPLE_CATCH:
1261 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1262 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1263 break;
1265 case GIMPLE_EH_FILTER:
1266 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1267 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1268 break;
1270 case GIMPLE_TRY:
1271 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1272 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1273 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1274 break;
1276 case GIMPLE_WITH_CLEANUP_EXPR:
1277 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1278 copy = gimple_build_wce (s1);
1279 break;
1281 case GIMPLE_OMP_PARALLEL:
1282 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1283 copy = gimple_build_omp_parallel
1284 (s1,
1285 gimple_omp_parallel_clauses (stmt),
1286 gimple_omp_parallel_child_fn (stmt),
1287 gimple_omp_parallel_data_arg (stmt));
1288 break;
1290 case GIMPLE_OMP_TASK:
1291 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1292 copy = gimple_build_omp_task
1293 (s1,
1294 gimple_omp_task_clauses (stmt),
1295 gimple_omp_task_child_fn (stmt),
1296 gimple_omp_task_data_arg (stmt),
1297 gimple_omp_task_copy_fn (stmt),
1298 gimple_omp_task_arg_size (stmt),
1299 gimple_omp_task_arg_align (stmt));
1300 break;
1302 case GIMPLE_OMP_FOR:
1303 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1304 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1305 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1306 gimple_omp_for_collapse (stmt), s2);
1308 size_t i;
1309 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1311 gimple_omp_for_set_index (copy, i,
1312 gimple_omp_for_index (stmt, i));
1313 gimple_omp_for_set_initial (copy, i,
1314 gimple_omp_for_initial (stmt, i));
1315 gimple_omp_for_set_final (copy, i,
1316 gimple_omp_for_final (stmt, i));
1317 gimple_omp_for_set_incr (copy, i,
1318 gimple_omp_for_incr (stmt, i));
1319 gimple_omp_for_set_cond (copy, i,
1320 gimple_omp_for_cond (stmt, i));
1323 break;
1325 case GIMPLE_OMP_MASTER:
1326 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1327 copy = gimple_build_omp_master (s1);
1328 break;
1330 case GIMPLE_OMP_ORDERED:
1331 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1332 copy = gimple_build_omp_ordered (s1);
1333 break;
1335 case GIMPLE_OMP_SECTION:
1336 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1337 copy = gimple_build_omp_section (s1);
1338 break;
1340 case GIMPLE_OMP_SECTIONS:
1341 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1342 copy = gimple_build_omp_sections
1343 (s1, gimple_omp_sections_clauses (stmt));
1344 break;
1346 case GIMPLE_OMP_SINGLE:
1347 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1348 copy = gimple_build_omp_single
1349 (s1, gimple_omp_single_clauses (stmt));
1350 break;
1352 case GIMPLE_OMP_CRITICAL:
1353 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1354 copy
1355 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1356 break;
1358 case GIMPLE_TRANSACTION:
1359 s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1360 copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1361 gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1362 break;
1364 default:
1365 gcc_unreachable ();
1368 else
1370 if (gimple_assign_copy_p (stmt)
1371 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1372 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1374 /* Here we handle statements that are not completely rewritten.
1375 First we detect some inlining-induced bogosities for
1376 discarding. */
1378 /* Some assignments VAR = VAR; don't generate any rtl code
1379 and thus don't count as variable modification. Avoid
1380 keeping bogosities like 0 = 0. */
1381 tree decl = gimple_assign_lhs (stmt), value;
1382 tree *n;
1384 n = (tree *) pointer_map_contains (id->decl_map, decl);
1385 if (n)
1387 value = *n;
1388 STRIP_TYPE_NOPS (value);
1389 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1390 return gimple_build_nop ();
1394 if (gimple_debug_bind_p (stmt))
1396 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1397 gimple_debug_bind_get_value (stmt),
1398 stmt);
1399 id->debug_stmts.safe_push (copy);
1400 return copy;
1402 if (gimple_debug_source_bind_p (stmt))
1404 copy = gimple_build_debug_source_bind
1405 (gimple_debug_source_bind_get_var (stmt),
1406 gimple_debug_source_bind_get_value (stmt), stmt);
1407 id->debug_stmts.safe_push (copy);
1408 return copy;
1411 /* Create a new deep copy of the statement. */
1412 copy = gimple_copy (stmt);
1414 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1415 RESX and EH_DISPATCH. */
1416 if (id->eh_map)
1417 switch (gimple_code (copy))
1419 case GIMPLE_CALL:
1421 tree r, fndecl = gimple_call_fndecl (copy);
1422 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1423 switch (DECL_FUNCTION_CODE (fndecl))
1425 case BUILT_IN_EH_COPY_VALUES:
1426 r = gimple_call_arg (copy, 1);
1427 r = remap_eh_region_tree_nr (r, id);
1428 gimple_call_set_arg (copy, 1, r);
1429 /* FALLTHRU */
1431 case BUILT_IN_EH_POINTER:
1432 case BUILT_IN_EH_FILTER:
1433 r = gimple_call_arg (copy, 0);
1434 r = remap_eh_region_tree_nr (r, id);
1435 gimple_call_set_arg (copy, 0, r);
1436 break;
1438 default:
1439 break;
1442 /* Reset alias info if we didn't apply measures to
1443 keep it valid over inlining by setting DECL_PT_UID. */
1444 if (!id->src_cfun->gimple_df
1445 || !id->src_cfun->gimple_df->ipa_pta)
1446 gimple_call_reset_alias_info (copy);
1448 break;
1450 case GIMPLE_RESX:
1452 int r = gimple_resx_region (copy);
1453 r = remap_eh_region_nr (r, id);
1454 gimple_resx_set_region (copy, r);
1456 break;
1458 case GIMPLE_EH_DISPATCH:
1460 int r = gimple_eh_dispatch_region (copy);
1461 r = remap_eh_region_nr (r, id);
1462 gimple_eh_dispatch_set_region (copy, r);
1464 break;
1466 default:
1467 break;
1471 /* If STMT has a block defined, map it to the newly constructed
1472 block. */
1473 if (gimple_block (copy))
1475 tree *n;
1476 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1477 gcc_assert (n);
1478 gimple_set_block (copy, *n);
1481 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1482 return copy;
1484 /* Remap all the operands in COPY. */
1485 memset (&wi, 0, sizeof (wi));
1486 wi.info = id;
1487 if (skip_first)
1488 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1489 else
1490 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1492 /* Clear the copied virtual operands. We are not remapping them here
1493 but are going to recreate them from scratch. */
1494 if (gimple_has_mem_ops (copy))
1496 gimple_set_vdef (copy, NULL_TREE);
1497 gimple_set_vuse (copy, NULL_TREE);
1500 return copy;
1504 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1505 later */
1507 static basic_block
1508 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1509 gcov_type count_scale)
1511 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1512 basic_block copy_basic_block;
1513 tree decl;
1514 gcov_type freq;
1515 basic_block prev;
1517 /* Search for previous copied basic block. */
1518 prev = bb->prev_bb;
1519 while (!prev->aux)
1520 prev = prev->prev_bb;
1522 /* create_basic_block() will append every new block to
1523 basic_block_info automatically. */
1524 copy_basic_block = create_basic_block (NULL, (void *) 0,
1525 (basic_block) prev->aux);
1526 copy_basic_block->count = (double)bb->count * count_scale / REG_BR_PROB_BASE;
1528 /* We are going to rebuild frequencies from scratch. These values
1529 have just small importance to drive canonicalize_loop_headers. */
1530 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1532 /* We recompute frequencies after inlining, so this is quite safe. */
1533 if (freq > BB_FREQ_MAX)
1534 freq = BB_FREQ_MAX;
1535 copy_basic_block->frequency = freq;
1537 copy_gsi = gsi_start_bb (copy_basic_block);
1539 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1541 gimple stmt = gsi_stmt (gsi);
1542 gimple orig_stmt = stmt;
1544 id->regimplify = false;
1545 stmt = remap_gimple_stmt (stmt, id);
1546 if (gimple_nop_p (stmt))
1547 continue;
1549 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1550 seq_gsi = copy_gsi;
1552 /* With return slot optimization we can end up with
1553 non-gimple (foo *)&this->m, fix that here. */
1554 if (is_gimple_assign (stmt)
1555 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1556 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1558 tree new_rhs;
1559 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1560 gimple_assign_rhs1 (stmt),
1561 true, NULL, false,
1562 GSI_CONTINUE_LINKING);
1563 gimple_assign_set_rhs1 (stmt, new_rhs);
1564 id->regimplify = false;
1567 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1569 if (id->regimplify)
1570 gimple_regimplify_operands (stmt, &seq_gsi);
1572 /* If copy_basic_block has been empty at the start of this iteration,
1573 call gsi_start_bb again to get at the newly added statements. */
1574 if (gsi_end_p (copy_gsi))
1575 copy_gsi = gsi_start_bb (copy_basic_block);
1576 else
1577 gsi_next (&copy_gsi);
1579 /* Process the new statement. The call to gimple_regimplify_operands
1580 possibly turned the statement into multiple statements, we
1581 need to process all of them. */
1584 tree fn;
1586 stmt = gsi_stmt (copy_gsi);
1587 if (is_gimple_call (stmt)
1588 && gimple_call_va_arg_pack_p (stmt)
1589 && id->gimple_call)
1591 /* __builtin_va_arg_pack () should be replaced by
1592 all arguments corresponding to ... in the caller. */
1593 tree p;
1594 gimple new_call;
1595 vec<tree> argarray;
1596 size_t nargs = gimple_call_num_args (id->gimple_call);
1597 size_t n;
1599 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1600 nargs--;
1602 /* Create the new array of arguments. */
1603 n = nargs + gimple_call_num_args (stmt);
1604 argarray.create (n);
1605 argarray.safe_grow_cleared (n);
1607 /* Copy all the arguments before '...' */
1608 memcpy (argarray.address (),
1609 gimple_call_arg_ptr (stmt, 0),
1610 gimple_call_num_args (stmt) * sizeof (tree));
1612 /* Append the arguments passed in '...' */
1613 memcpy (argarray.address () + gimple_call_num_args (stmt),
1614 gimple_call_arg_ptr (id->gimple_call, 0)
1615 + (gimple_call_num_args (id->gimple_call) - nargs),
1616 nargs * sizeof (tree));
1618 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1619 argarray);
1621 argarray.release ();
1623 /* Copy all GIMPLE_CALL flags, location and block, except
1624 GF_CALL_VA_ARG_PACK. */
1625 gimple_call_copy_flags (new_call, stmt);
1626 gimple_call_set_va_arg_pack (new_call, false);
1627 gimple_set_location (new_call, gimple_location (stmt));
1628 gimple_set_block (new_call, gimple_block (stmt));
1629 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1631 gsi_replace (&copy_gsi, new_call, false);
1632 stmt = new_call;
1634 else if (is_gimple_call (stmt)
1635 && id->gimple_call
1636 && (decl = gimple_call_fndecl (stmt))
1637 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1638 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1640 /* __builtin_va_arg_pack_len () should be replaced by
1641 the number of anonymous arguments. */
1642 size_t nargs = gimple_call_num_args (id->gimple_call);
1643 tree count, p;
1644 gimple new_stmt;
1646 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1647 nargs--;
1649 count = build_int_cst (integer_type_node, nargs);
1650 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1651 gsi_replace (&copy_gsi, new_stmt, false);
1652 stmt = new_stmt;
1655 /* Statements produced by inlining can be unfolded, especially
1656 when we constant propagated some operands. We can't fold
1657 them right now for two reasons:
1658 1) folding require SSA_NAME_DEF_STMTs to be correct
1659 2) we can't change function calls to builtins.
1660 So we just mark statement for later folding. We mark
1661 all new statements, instead just statements that has changed
1662 by some nontrivial substitution so even statements made
1663 foldable indirectly are updated. If this turns out to be
1664 expensive, copy_body can be told to watch for nontrivial
1665 changes. */
1666 if (id->statements_to_fold)
1667 pointer_set_insert (id->statements_to_fold, stmt);
1669 /* We're duplicating a CALL_EXPR. Find any corresponding
1670 callgraph edges and update or duplicate them. */
1671 if (is_gimple_call (stmt))
1673 struct cgraph_edge *edge;
1674 int flags;
1676 switch (id->transform_call_graph_edges)
1678 case CB_CGE_DUPLICATE:
1679 edge = cgraph_edge (id->src_node, orig_stmt);
1680 if (edge)
1682 int edge_freq = edge->frequency;
1683 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1684 gimple_uid (stmt),
1685 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1686 true);
1687 /* We could also just rescale the frequency, but
1688 doing so would introduce roundoff errors and make
1689 verifier unhappy. */
1690 edge->frequency
1691 = compute_call_stmt_bb_frequency (id->dst_node->symbol.decl,
1692 copy_basic_block);
1693 if (dump_file
1694 && profile_status_for_function (cfun) != PROFILE_ABSENT
1695 && (edge_freq > edge->frequency + 10
1696 || edge_freq < edge->frequency - 10))
1698 fprintf (dump_file, "Edge frequency estimated by "
1699 "cgraph %i diverge from inliner's estimate %i\n",
1700 edge_freq,
1701 edge->frequency);
1702 fprintf (dump_file,
1703 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1704 bb->index,
1705 bb->frequency,
1706 copy_basic_block->frequency);
1708 stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
1710 break;
1712 case CB_CGE_MOVE_CLONES:
1713 cgraph_set_call_stmt_including_clones (id->dst_node,
1714 orig_stmt, stmt);
1715 edge = cgraph_edge (id->dst_node, stmt);
1716 break;
1718 case CB_CGE_MOVE:
1719 edge = cgraph_edge (id->dst_node, orig_stmt);
1720 if (edge)
1721 cgraph_set_call_stmt (edge, stmt);
1722 break;
1724 default:
1725 gcc_unreachable ();
1728 /* Constant propagation on argument done during inlining
1729 may create new direct call. Produce an edge for it. */
1730 if ((!edge
1731 || (edge->indirect_inlining_edge
1732 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1733 && id->dst_node->analyzed
1734 && (fn = gimple_call_fndecl (stmt)) != NULL)
1736 struct cgraph_node *dest = cgraph_get_node (fn);
1738 /* We have missing edge in the callgraph. This can happen
1739 when previous inlining turned an indirect call into a
1740 direct call by constant propagating arguments or we are
1741 producing dead clone (for further cloning). In all
1742 other cases we hit a bug (incorrect node sharing is the
1743 most common reason for missing edges). */
1744 gcc_assert (!dest->analyzed
1745 || dest->symbol.address_taken
1746 || !id->src_node->analyzed
1747 || !id->dst_node->analyzed);
1748 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1749 cgraph_create_edge_including_clones
1750 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1751 compute_call_stmt_bb_frequency (id->dst_node->symbol.decl,
1752 copy_basic_block),
1753 CIF_ORIGINALLY_INDIRECT_CALL);
1754 else
1755 cgraph_create_edge (id->dst_node, dest, stmt,
1756 bb->count,
1757 compute_call_stmt_bb_frequency
1758 (id->dst_node->symbol.decl,
1759 copy_basic_block))->inline_failed
1760 = CIF_ORIGINALLY_INDIRECT_CALL;
1761 if (dump_file)
1763 fprintf (dump_file, "Created new direct edge to %s\n",
1764 cgraph_node_name (dest));
1768 flags = gimple_call_flags (stmt);
1769 if (flags & ECF_MAY_BE_ALLOCA)
1770 cfun->calls_alloca = true;
1771 if (flags & ECF_RETURNS_TWICE)
1772 cfun->calls_setjmp = true;
1775 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1776 id->eh_map, id->eh_lp_nr);
1778 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1780 ssa_op_iter i;
1781 tree def;
1783 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1784 if (TREE_CODE (def) == SSA_NAME)
1785 SSA_NAME_DEF_STMT (def) = stmt;
1788 gsi_next (&copy_gsi);
1790 while (!gsi_end_p (copy_gsi));
1792 copy_gsi = gsi_last_bb (copy_basic_block);
1795 return copy_basic_block;
1798 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1799 form is quite easy, since dominator relationship for old basic blocks does
1800 not change.
1802 There is however exception where inlining might change dominator relation
1803 across EH edges from basic block within inlined functions destinating
1804 to landing pads in function we inline into.
1806 The function fills in PHI_RESULTs of such PHI nodes if they refer
1807 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1808 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1809 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1810 set, and this means that there will be no overlapping live ranges
1811 for the underlying symbol.
1813 This might change in future if we allow redirecting of EH edges and
1814 we might want to change way build CFG pre-inlining to include
1815 all the possible edges then. */
1816 static void
1817 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1818 bool can_throw, bool nonlocal_goto)
1820 edge e;
1821 edge_iterator ei;
1823 FOR_EACH_EDGE (e, ei, bb->succs)
1824 if (!e->dest->aux
1825 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1827 gimple phi;
1828 gimple_stmt_iterator si;
1830 if (!nonlocal_goto)
1831 gcc_assert (e->flags & EDGE_EH);
1833 if (!can_throw)
1834 gcc_assert (!(e->flags & EDGE_EH));
1836 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1838 edge re;
1840 phi = gsi_stmt (si);
1842 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1843 gcc_assert (!e->dest->aux);
1845 gcc_assert ((e->flags & EDGE_EH)
1846 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1848 if (virtual_operand_p (PHI_RESULT (phi)))
1850 mark_virtual_operands_for_renaming (cfun);
1851 continue;
1854 re = find_edge (ret_bb, e->dest);
1855 gcc_assert (re);
1856 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1857 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1859 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1860 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1866 /* Copy edges from BB into its copy constructed earlier, scale profile
1867 accordingly. Edges will be taken care of later. Assume aux
1868 pointers to point to the copies of each BB. Return true if any
1869 debug stmts are left after a statement that must end the basic block. */
1871 static bool
1872 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1874 basic_block new_bb = (basic_block) bb->aux;
1875 edge_iterator ei;
1876 edge old_edge;
1877 gimple_stmt_iterator si;
1878 int flags;
1879 bool need_debug_cleanup = false;
1881 /* Use the indices from the original blocks to create edges for the
1882 new ones. */
1883 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1884 if (!(old_edge->flags & EDGE_EH))
1886 edge new_edge;
1888 flags = old_edge->flags;
1889 flags &= (~EDGE_ANNOTATED);
1891 /* Return edges do get a FALLTHRU flag when the get inlined. */
1892 if (old_edge->dest->index == EXIT_BLOCK && !flags
1893 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1894 flags |= EDGE_FALLTHRU;
1895 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1896 new_edge->count
1897 = old_edge->count * (double)count_scale / REG_BR_PROB_BASE;
1898 new_edge->probability = old_edge->probability;
1901 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1902 return false;
1904 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1906 gimple copy_stmt;
1907 bool can_throw, nonlocal_goto;
1909 copy_stmt = gsi_stmt (si);
1910 if (!is_gimple_debug (copy_stmt))
1911 update_stmt (copy_stmt);
1913 /* Do this before the possible split_block. */
1914 gsi_next (&si);
1916 /* If this tree could throw an exception, there are two
1917 cases where we need to add abnormal edge(s): the
1918 tree wasn't in a region and there is a "current
1919 region" in the caller; or the original tree had
1920 EH edges. In both cases split the block after the tree,
1921 and add abnormal edge(s) as needed; we need both
1922 those from the callee and the caller.
1923 We check whether the copy can throw, because the const
1924 propagation can change an INDIRECT_REF which throws
1925 into a COMPONENT_REF which doesn't. If the copy
1926 can throw, the original could also throw. */
1927 can_throw = stmt_can_throw_internal (copy_stmt);
1928 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1930 if (can_throw || nonlocal_goto)
1932 if (!gsi_end_p (si))
1934 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1935 gsi_next (&si);
1936 if (gsi_end_p (si))
1937 need_debug_cleanup = true;
1939 if (!gsi_end_p (si))
1940 /* Note that bb's predecessor edges aren't necessarily
1941 right at this point; split_block doesn't care. */
1943 edge e = split_block (new_bb, copy_stmt);
1945 new_bb = e->dest;
1946 new_bb->aux = e->src->aux;
1947 si = gsi_start_bb (new_bb);
1951 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1952 make_eh_dispatch_edges (copy_stmt);
1953 else if (can_throw)
1954 make_eh_edges (copy_stmt);
1956 if (nonlocal_goto)
1957 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1959 if ((can_throw || nonlocal_goto)
1960 && gimple_in_ssa_p (cfun))
1961 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1962 can_throw, nonlocal_goto);
1964 return need_debug_cleanup;
1967 /* Copy the PHIs. All blocks and edges are copied, some blocks
1968 was possibly split and new outgoing EH edges inserted.
1969 BB points to the block of original function and AUX pointers links
1970 the original and newly copied blocks. */
1972 static void
1973 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1975 basic_block const new_bb = (basic_block) bb->aux;
1976 edge_iterator ei;
1977 gimple phi;
1978 gimple_stmt_iterator si;
1979 edge new_edge;
1980 bool inserted = false;
1982 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
1984 tree res, new_res;
1985 gimple new_phi;
1987 phi = gsi_stmt (si);
1988 res = PHI_RESULT (phi);
1989 new_res = res;
1990 if (!virtual_operand_p (res))
1992 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1993 new_phi = create_phi_node (new_res, new_bb);
1994 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1996 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
1997 tree arg;
1998 tree new_arg;
1999 edge_iterator ei2;
2000 location_t locus;
2002 /* When doing partial cloning, we allow PHIs on the entry block
2003 as long as all the arguments are the same. Find any input
2004 edge to see argument to copy. */
2005 if (!old_edge)
2006 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2007 if (!old_edge->src->aux)
2008 break;
2010 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2011 new_arg = arg;
2012 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2013 gcc_assert (new_arg);
2014 /* With return slot optimization we can end up with
2015 non-gimple (foo *)&this->m, fix that here. */
2016 if (TREE_CODE (new_arg) != SSA_NAME
2017 && TREE_CODE (new_arg) != FUNCTION_DECL
2018 && !is_gimple_val (new_arg))
2020 gimple_seq stmts = NULL;
2021 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2022 gsi_insert_seq_on_edge (new_edge, stmts);
2023 inserted = true;
2025 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2026 if (LOCATION_BLOCK (locus))
2028 tree *n;
2029 n = (tree *) pointer_map_contains (id->decl_map,
2030 LOCATION_BLOCK (locus));
2031 gcc_assert (n);
2032 if (*n)
2033 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2034 else
2035 locus = LOCATION_LOCUS (locus);
2037 else
2038 locus = LOCATION_LOCUS (locus);
2040 add_phi_arg (new_phi, new_arg, new_edge, locus);
2045 /* Commit the delayed edge insertions. */
2046 if (inserted)
2047 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2048 gsi_commit_one_edge_insert (new_edge, NULL);
2052 /* Wrapper for remap_decl so it can be used as a callback. */
2054 static tree
2055 remap_decl_1 (tree decl, void *data)
2057 return remap_decl (decl, (copy_body_data *) data);
2060 /* Build struct function and associated datastructures for the new clone
2061 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2062 the cfun to the function of new_fndecl (and current_function_decl too). */
2064 static void
2065 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2067 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2068 gcov_type count_scale;
2070 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2071 count_scale = (REG_BR_PROB_BASE * (double)count
2072 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2073 else
2074 count_scale = REG_BR_PROB_BASE;
2076 /* Register specific tree functions. */
2077 gimple_register_cfg_hooks ();
2079 /* Get clean struct function. */
2080 push_struct_function (new_fndecl);
2082 /* We will rebuild these, so just sanity check that they are empty. */
2083 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2084 gcc_assert (cfun->local_decls == NULL);
2085 gcc_assert (cfun->cfg == NULL);
2086 gcc_assert (cfun->decl == new_fndecl);
2088 /* Copy items we preserve during cloning. */
2089 cfun->static_chain_decl = src_cfun->static_chain_decl;
2090 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2091 cfun->function_end_locus = src_cfun->function_end_locus;
2092 cfun->curr_properties = src_cfun->curr_properties & ~PROP_loops;
2093 cfun->last_verified = src_cfun->last_verified;
2094 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2095 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2096 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2097 cfun->stdarg = src_cfun->stdarg;
2098 cfun->after_inlining = src_cfun->after_inlining;
2099 cfun->can_throw_non_call_exceptions
2100 = src_cfun->can_throw_non_call_exceptions;
2101 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2102 cfun->returns_struct = src_cfun->returns_struct;
2103 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2104 cfun->module_id = src_cfun->module_id;
2106 init_empty_tree_cfg ();
2108 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2109 ENTRY_BLOCK_PTR->count =
2110 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * (double)count_scale /
2111 REG_BR_PROB_BASE);
2112 ENTRY_BLOCK_PTR->frequency
2113 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2114 EXIT_BLOCK_PTR->count =
2115 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * (double)count_scale /
2116 REG_BR_PROB_BASE);
2117 EXIT_BLOCK_PTR->frequency =
2118 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2119 if (src_cfun->eh)
2120 init_eh_for_function ();
2122 if (src_cfun->gimple_df)
2124 init_tree_ssa (cfun);
2125 cfun->gimple_df->in_ssa_p = true;
2126 init_ssa_operands (cfun);
2130 /* Helper function for copy_cfg_body. Move debug stmts from the end
2131 of NEW_BB to the beginning of successor basic blocks when needed. If the
2132 successor has multiple predecessors, reset them, otherwise keep
2133 their value. */
2135 static void
2136 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2138 edge e;
2139 edge_iterator ei;
2140 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2142 if (gsi_end_p (si)
2143 || gsi_one_before_end_p (si)
2144 || !(stmt_can_throw_internal (gsi_stmt (si))
2145 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2146 return;
2148 FOR_EACH_EDGE (e, ei, new_bb->succs)
2150 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2151 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2152 while (is_gimple_debug (gsi_stmt (ssi)))
2154 gimple stmt = gsi_stmt (ssi), new_stmt;
2155 tree var;
2156 tree value;
2158 /* For the last edge move the debug stmts instead of copying
2159 them. */
2160 if (ei_one_before_end_p (ei))
2162 si = ssi;
2163 gsi_prev (&ssi);
2164 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2165 gimple_debug_bind_reset_value (stmt);
2166 gsi_remove (&si, false);
2167 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2168 continue;
2171 if (gimple_debug_bind_p (stmt))
2173 var = gimple_debug_bind_get_var (stmt);
2174 if (single_pred_p (e->dest))
2176 value = gimple_debug_bind_get_value (stmt);
2177 value = unshare_expr (value);
2179 else
2180 value = NULL_TREE;
2181 new_stmt = gimple_build_debug_bind (var, value, stmt);
2183 else if (gimple_debug_source_bind_p (stmt))
2185 var = gimple_debug_source_bind_get_var (stmt);
2186 value = gimple_debug_source_bind_get_value (stmt);
2187 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2189 else
2190 gcc_unreachable ();
2191 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2192 id->debug_stmts.safe_push (new_stmt);
2193 gsi_prev (&ssi);
2198 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2199 with each bb's frequency. Used when NODE has a 0-weight entry
2200 but we are about to inline it into a non-zero count call bb.
2201 See the comments for handle_missing_profiles() in predict.c for
2202 when this can happen for COMDATs. */
2204 void
2205 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2207 basic_block bb;
2208 edge_iterator ei;
2209 edge e;
2210 struct function *fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
2212 FOR_ALL_BB_FN(bb, fn)
2214 bb->count = apply_scale (count,
2215 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2216 FOR_EACH_EDGE (e, ei, bb->succs)
2217 e->count = apply_probability (e->src->count, e->probability);
2221 /* Make a copy of the body of FN so that it can be inserted inline in
2222 another function. Walks FN via CFG, returns new fndecl. */
2224 static tree
2225 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2226 basic_block entry_block_map, basic_block exit_block_map,
2227 bitmap blocks_to_copy, basic_block new_entry)
2229 tree callee_fndecl = id->src_fn;
2230 /* Original cfun for the callee, doesn't change. */
2231 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2232 struct function *cfun_to_copy;
2233 basic_block bb;
2234 tree new_fndecl = NULL;
2235 bool need_debug_cleanup = false;
2236 gcov_type count_scale;
2237 int last;
2238 int incoming_frequency = 0;
2239 gcov_type incoming_count = 0;
2241 /* This can happen for COMDAT routines that end up with 0 counts
2242 despite being called (see the comments for handle_missing_profiles()
2243 in predict.c as to why). Apply counts to the blocks in the callee
2244 before inlining, using the guessed edge frequencies, so that we don't
2245 end up with a 0-count inline body which can confuse downstream
2246 optimizations such as function splitting. */
2247 if (!ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count && count)
2249 /* Apply the larger of the call bb count and the total incoming
2250 call edge count to the callee. */
2251 gcov_type in_count = 0;
2252 struct cgraph_edge *in_edge;
2253 for (in_edge = id->src_node->callers; in_edge;
2254 in_edge = in_edge->next_caller)
2255 in_count += in_edge->count;
2256 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2259 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2260 count_scale = (REG_BR_PROB_BASE * (double)count
2261 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2262 else
2263 count_scale = REG_BR_PROB_BASE;
2265 if (flag_auto_profile && count_scale > REG_BR_PROB_BASE)
2266 count_scale = REG_BR_PROB_BASE;
2268 /* Register specific tree functions. */
2269 gimple_register_cfg_hooks ();
2271 /* If we are inlining just region of the function, make sure to connect new entry
2272 to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute
2273 frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
2274 probabilities of edges incoming from nonduplicated region. */
2275 if (new_entry)
2277 edge e;
2278 edge_iterator ei;
2280 FOR_EACH_EDGE (e, ei, new_entry->preds)
2281 if (!e->src->aux)
2283 incoming_frequency += EDGE_FREQUENCY (e);
2284 incoming_count += e->count;
2286 incoming_count = incoming_count * count_scale / REG_BR_PROB_BASE;
2287 incoming_frequency
2288 = incoming_frequency * frequency_scale / REG_BR_PROB_BASE;
2289 ENTRY_BLOCK_PTR->count = incoming_count;
2290 ENTRY_BLOCK_PTR->frequency = incoming_frequency;
2293 /* Must have a CFG here at this point. */
2294 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2295 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2297 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2299 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2300 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2301 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2302 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2304 /* Duplicate any exception-handling regions. */
2305 if (cfun->eh)
2306 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2307 remap_decl_1, id);
2309 /* Use aux pointers to map the original blocks to copy. */
2310 FOR_EACH_BB_FN (bb, cfun_to_copy)
2311 if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index))
2313 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2314 bb->aux = new_bb;
2315 new_bb->aux = bb;
2318 last = last_basic_block;
2320 /* Now that we've duplicated the blocks, duplicate their edges. */
2321 FOR_ALL_BB_FN (bb, cfun_to_copy)
2322 if (!blocks_to_copy
2323 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2324 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map);
2326 if (new_entry)
2328 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2329 e->probability = REG_BR_PROB_BASE;
2330 e->count = incoming_count;
2333 if (gimple_in_ssa_p (cfun))
2334 FOR_ALL_BB_FN (bb, cfun_to_copy)
2335 if (!blocks_to_copy
2336 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2337 copy_phis_for_bb (bb, id);
2339 FOR_ALL_BB_FN (bb, cfun_to_copy)
2340 if (bb->aux)
2342 if (need_debug_cleanup
2343 && bb->index != ENTRY_BLOCK
2344 && bb->index != EXIT_BLOCK)
2345 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2346 ((basic_block)bb->aux)->aux = NULL;
2347 bb->aux = NULL;
2350 /* Zero out AUX fields of newly created block during EH edge
2351 insertion. */
2352 for (; last < last_basic_block; last++)
2354 if (need_debug_cleanup)
2355 maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2356 BASIC_BLOCK (last)->aux = NULL;
2358 entry_block_map->aux = NULL;
2359 exit_block_map->aux = NULL;
2361 if (id->eh_map)
2363 pointer_map_destroy (id->eh_map);
2364 id->eh_map = NULL;
2367 return new_fndecl;
2370 /* Copy the debug STMT using ID. We deal with these statements in a
2371 special way: if any variable in their VALUE expression wasn't
2372 remapped yet, we won't remap it, because that would get decl uids
2373 out of sync, causing codegen differences between -g and -g0. If
2374 this arises, we drop the VALUE expression altogether. */
2376 static void
2377 copy_debug_stmt (gimple stmt, copy_body_data *id)
2379 tree t, *n;
2380 struct walk_stmt_info wi;
2382 if (gimple_block (stmt))
2384 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2385 gimple_set_block (stmt, n ? *n : id->block);
2388 /* Remap all the operands in COPY. */
2389 memset (&wi, 0, sizeof (wi));
2390 wi.info = id;
2392 processing_debug_stmt = 1;
2394 if (gimple_debug_source_bind_p (stmt))
2395 t = gimple_debug_source_bind_get_var (stmt);
2396 else
2397 t = gimple_debug_bind_get_var (stmt);
2399 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2400 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2402 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2403 t = *n;
2405 else if (TREE_CODE (t) == VAR_DECL
2406 && !is_global_var (t)
2407 && !pointer_map_contains (id->decl_map, t))
2408 /* T is a non-localized variable. */;
2409 else
2410 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2412 if (gimple_debug_bind_p (stmt))
2414 gimple_debug_bind_set_var (stmt, t);
2416 if (gimple_debug_bind_has_value_p (stmt))
2418 tree v = gimple_debug_bind_get_value (stmt);
2419 if (TREE_CODE (v) == ADDR_EXPR)
2420 v = TREE_OPERAND (v, 0);
2422 /* The global var may be deleted */
2423 if (L_IPO_COMP_MODE &&
2424 ((TREE_CODE (v) != VAR_DECL)
2425 || is_global_var (v)))
2426 processing_debug_stmt = -1;
2427 else
2428 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2429 remap_gimple_op_r, &wi, NULL);
2432 /* Punt if any decl couldn't be remapped. */
2433 if (processing_debug_stmt < 0)
2434 gimple_debug_bind_reset_value (stmt);
2436 else if (gimple_debug_source_bind_p (stmt))
2438 gimple_debug_source_bind_set_var (stmt, t);
2439 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2440 remap_gimple_op_r, &wi, NULL);
2441 /* When inlining and source bind refers to one of the optimized
2442 away parameters, change the source bind into normal debug bind
2443 referring to the corresponding DEBUG_EXPR_DECL that should have
2444 been bound before the call stmt. */
2445 t = gimple_debug_source_bind_get_value (stmt);
2446 if (t != NULL_TREE
2447 && TREE_CODE (t) == PARM_DECL
2448 && id->gimple_call)
2450 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2451 unsigned int i;
2452 if (debug_args != NULL)
2454 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2455 if ((**debug_args)[i] == DECL_ORIGIN (t)
2456 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2458 t = (**debug_args)[i + 1];
2459 stmt->gsbase.subcode = GIMPLE_DEBUG_BIND;
2460 gimple_debug_bind_set_value (stmt, t);
2461 break;
2467 processing_debug_stmt = 0;
2469 update_stmt (stmt);
2472 /* Process deferred debug stmts. In order to give values better odds
2473 of being successfully remapped, we delay the processing of debug
2474 stmts until all other stmts that might require remapping are
2475 processed. */
2477 static void
2478 copy_debug_stmts (copy_body_data *id)
2480 size_t i;
2481 gimple stmt;
2483 if (!id->debug_stmts.exists ())
2484 return;
2486 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2487 copy_debug_stmt (stmt, id);
2489 id->debug_stmts.release ();
2492 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2493 another function. */
2495 static tree
2496 copy_tree_body (copy_body_data *id)
2498 tree fndecl = id->src_fn;
2499 tree body = DECL_SAVED_TREE (fndecl);
2501 walk_tree (&body, copy_tree_body_r, id, NULL);
2503 return body;
2506 /* Make a copy of the body of FN so that it can be inserted inline in
2507 another function. */
2509 static tree
2510 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2511 basic_block entry_block_map, basic_block exit_block_map,
2512 bitmap blocks_to_copy, basic_block new_entry)
2514 tree fndecl = id->src_fn;
2515 tree body;
2517 /* If this body has a CFG, walk CFG and copy. */
2518 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2519 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2520 blocks_to_copy, new_entry);
2521 copy_debug_stmts (id);
2523 return body;
2526 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2527 defined in function FN, or of a data member thereof. */
2529 static bool
2530 self_inlining_addr_expr (tree value, tree fn)
2532 tree var;
2534 if (TREE_CODE (value) != ADDR_EXPR)
2535 return false;
2537 var = get_base_address (TREE_OPERAND (value, 0));
2539 return var && auto_var_in_fn_p (var, fn);
2542 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2543 lexical block and line number information from base_stmt, if given,
2544 or from the last stmt of the block otherwise. */
2546 static gimple
2547 insert_init_debug_bind (copy_body_data *id,
2548 basic_block bb, tree var, tree value,
2549 gimple base_stmt)
2551 gimple note;
2552 gimple_stmt_iterator gsi;
2553 tree tracked_var;
2555 if (!gimple_in_ssa_p (id->src_cfun))
2556 return NULL;
2558 if (!MAY_HAVE_DEBUG_STMTS)
2559 return NULL;
2561 tracked_var = target_for_debug_bind (var);
2562 if (!tracked_var)
2563 return NULL;
2565 if (bb)
2567 gsi = gsi_last_bb (bb);
2568 if (!base_stmt && !gsi_end_p (gsi))
2569 base_stmt = gsi_stmt (gsi);
2572 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2574 if (bb)
2576 if (!gsi_end_p (gsi))
2577 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2578 else
2579 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2582 return note;
2585 static void
2586 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2588 /* If VAR represents a zero-sized variable, it's possible that the
2589 assignment statement may result in no gimple statements. */
2590 if (init_stmt)
2592 gimple_stmt_iterator si = gsi_last_bb (bb);
2594 /* We can end up with init statements that store to a non-register
2595 from a rhs with a conversion. Handle that here by forcing the
2596 rhs into a temporary. gimple_regimplify_operands is not
2597 prepared to do this for us. */
2598 if (!is_gimple_debug (init_stmt)
2599 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2600 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2601 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2603 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2604 gimple_expr_type (init_stmt),
2605 gimple_assign_rhs1 (init_stmt));
2606 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2607 GSI_NEW_STMT);
2608 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2609 gimple_assign_set_rhs1 (init_stmt, rhs);
2611 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2612 gimple_regimplify_operands (init_stmt, &si);
2614 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2616 tree def = gimple_assign_lhs (init_stmt);
2617 insert_init_debug_bind (id, bb, def, def, init_stmt);
2622 /* Initialize parameter P with VALUE. If needed, produce init statement
2623 at the end of BB. When BB is NULL, we return init statement to be
2624 output later. */
2625 static gimple
2626 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2627 basic_block bb, tree *vars)
2629 gimple init_stmt = NULL;
2630 tree var;
2631 tree rhs = value;
2632 tree def = (gimple_in_ssa_p (cfun)
2633 ? ssa_default_def (id->src_cfun, p) : NULL);
2635 if (value
2636 && value != error_mark_node
2637 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2639 /* If we can match up types by promotion/demotion do so. */
2640 if (fold_convertible_p (TREE_TYPE (p), value))
2641 rhs = fold_convert (TREE_TYPE (p), value);
2642 else
2644 /* ??? For valid programs we should not end up here.
2645 Still if we end up with truly mismatched types here, fall back
2646 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2647 GIMPLE to the following passes. */
2648 if (!is_gimple_reg_type (TREE_TYPE (value))
2649 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2650 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2651 else
2652 rhs = build_zero_cst (TREE_TYPE (p));
2656 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2657 here since the type of this decl must be visible to the calling
2658 function. */
2659 var = copy_decl_to_var (p, id);
2661 /* Declare this new variable. */
2662 DECL_CHAIN (var) = *vars;
2663 *vars = var;
2665 /* Make gimplifier happy about this variable. */
2666 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2668 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2669 we would not need to create a new variable here at all, if it
2670 weren't for debug info. Still, we can just use the argument
2671 value. */
2672 if (TREE_READONLY (p)
2673 && !TREE_ADDRESSABLE (p)
2674 && value && !TREE_SIDE_EFFECTS (value)
2675 && !def)
2677 /* We may produce non-gimple trees by adding NOPs or introduce
2678 invalid sharing when operand is not really constant.
2679 It is not big deal to prohibit constant propagation here as
2680 we will constant propagate in DOM1 pass anyway. */
2681 if (is_gimple_min_invariant (value)
2682 && useless_type_conversion_p (TREE_TYPE (p),
2683 TREE_TYPE (value))
2684 /* We have to be very careful about ADDR_EXPR. Make sure
2685 the base variable isn't a local variable of the inlined
2686 function, e.g., when doing recursive inlining, direct or
2687 mutually-recursive or whatever, which is why we don't
2688 just test whether fn == current_function_decl. */
2689 && ! self_inlining_addr_expr (value, fn))
2691 insert_decl_map (id, p, value);
2692 insert_debug_decl_map (id, p, var);
2693 return insert_init_debug_bind (id, bb, var, value, NULL);
2697 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2698 that way, when the PARM_DECL is encountered, it will be
2699 automatically replaced by the VAR_DECL. */
2700 insert_decl_map (id, p, var);
2702 /* Even if P was TREE_READONLY, the new VAR should not be.
2703 In the original code, we would have constructed a
2704 temporary, and then the function body would have never
2705 changed the value of P. However, now, we will be
2706 constructing VAR directly. The constructor body may
2707 change its value multiple times as it is being
2708 constructed. Therefore, it must not be TREE_READONLY;
2709 the back-end assumes that TREE_READONLY variable is
2710 assigned to only once. */
2711 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2712 TREE_READONLY (var) = 0;
2714 /* If there is no setup required and we are in SSA, take the easy route
2715 replacing all SSA names representing the function parameter by the
2716 SSA name passed to function.
2718 We need to construct map for the variable anyway as it might be used
2719 in different SSA names when parameter is set in function.
2721 Do replacement at -O0 for const arguments replaced by constant.
2722 This is important for builtin_constant_p and other construct requiring
2723 constant argument to be visible in inlined function body. */
2724 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2725 && (optimize
2726 || (TREE_READONLY (p)
2727 && is_gimple_min_invariant (rhs)))
2728 && (TREE_CODE (rhs) == SSA_NAME
2729 || is_gimple_min_invariant (rhs))
2730 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2732 insert_decl_map (id, def, rhs);
2733 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2736 /* If the value of argument is never used, don't care about initializing
2737 it. */
2738 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2740 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2741 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2744 /* Initialize this VAR_DECL from the equivalent argument. Convert
2745 the argument to the proper type in case it was promoted. */
2746 if (value)
2748 if (rhs == error_mark_node)
2750 insert_decl_map (id, p, var);
2751 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2754 STRIP_USELESS_TYPE_CONVERSION (rhs);
2756 /* If we are in SSA form properly remap the default definition
2757 or assign to a dummy SSA name if the parameter is unused and
2758 we are not optimizing. */
2759 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2761 if (def)
2763 def = remap_ssa_name (def, id);
2764 init_stmt = gimple_build_assign (def, rhs);
2765 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2766 set_ssa_default_def (cfun, var, NULL);
2768 else if (!optimize)
2770 def = make_ssa_name (var, NULL);
2771 init_stmt = gimple_build_assign (def, rhs);
2774 else
2775 init_stmt = gimple_build_assign (var, rhs);
2777 if (bb && init_stmt)
2778 insert_init_stmt (id, bb, init_stmt);
2780 return init_stmt;
2783 /* Generate code to initialize the parameters of the function at the
2784 top of the stack in ID from the GIMPLE_CALL STMT. */
2786 static void
2787 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2788 tree fn, basic_block bb)
2790 tree parms;
2791 size_t i;
2792 tree p;
2793 tree vars = NULL_TREE;
2794 tree static_chain = gimple_call_chain (stmt);
2796 /* Figure out what the parameters are. */
2797 parms = DECL_ARGUMENTS (fn);
2799 /* Loop through the parameter declarations, replacing each with an
2800 equivalent VAR_DECL, appropriately initialized. */
2801 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2803 tree val;
2804 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2805 setup_one_parameter (id, p, val, fn, bb, &vars);
2807 /* After remapping parameters remap their types. This has to be done
2808 in a second loop over all parameters to appropriately remap
2809 variable sized arrays when the size is specified in a
2810 parameter following the array. */
2811 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2813 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
2814 if (varp
2815 && TREE_CODE (*varp) == VAR_DECL)
2817 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
2818 ? ssa_default_def (id->src_cfun, p) : NULL);
2819 tree var = *varp;
2820 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
2821 /* Also remap the default definition if it was remapped
2822 to the default definition of the parameter replacement
2823 by the parameter setup. */
2824 if (def)
2826 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
2827 if (defp
2828 && TREE_CODE (*defp) == SSA_NAME
2829 && SSA_NAME_VAR (*defp) == var)
2830 TREE_TYPE (*defp) = TREE_TYPE (var);
2835 /* Initialize the static chain. */
2836 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2837 gcc_assert (fn != current_function_decl);
2838 if (p)
2840 /* No static chain? Seems like a bug in tree-nested.c. */
2841 gcc_assert (static_chain);
2843 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2846 declare_inline_vars (id->block, vars);
2850 /* Declare a return variable to replace the RESULT_DECL for the
2851 function we are calling. An appropriate DECL_STMT is returned.
2852 The USE_STMT is filled to contain a use of the declaration to
2853 indicate the return value of the function.
2855 RETURN_SLOT, if non-null is place where to store the result. It
2856 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2857 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2859 The return value is a (possibly null) value that holds the result
2860 as seen by the caller. */
2862 static tree
2863 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2864 basic_block entry_bb)
2866 tree callee = id->src_fn;
2867 tree result = DECL_RESULT (callee);
2868 tree callee_type = TREE_TYPE (result);
2869 tree caller_type;
2870 tree var, use;
2872 /* Handle type-mismatches in the function declaration return type
2873 vs. the call expression. */
2874 if (modify_dest)
2875 caller_type = TREE_TYPE (modify_dest);
2876 else
2877 caller_type = TREE_TYPE (TREE_TYPE (callee));
2879 /* We don't need to do anything for functions that don't return anything. */
2880 if (VOID_TYPE_P (callee_type))
2881 return NULL_TREE;
2883 /* If there was a return slot, then the return value is the
2884 dereferenced address of that object. */
2885 if (return_slot)
2887 /* The front end shouldn't have used both return_slot and
2888 a modify expression. */
2889 gcc_assert (!modify_dest);
2890 if (DECL_BY_REFERENCE (result))
2892 tree return_slot_addr = build_fold_addr_expr (return_slot);
2893 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2895 /* We are going to construct *&return_slot and we can't do that
2896 for variables believed to be not addressable.
2898 FIXME: This check possibly can match, because values returned
2899 via return slot optimization are not believed to have address
2900 taken by alias analysis. */
2901 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2902 var = return_slot_addr;
2904 else
2906 var = return_slot;
2907 gcc_assert (TREE_CODE (var) != SSA_NAME);
2908 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2910 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2911 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2912 && !DECL_GIMPLE_REG_P (result)
2913 && DECL_P (var))
2914 DECL_GIMPLE_REG_P (var) = 0;
2915 use = NULL;
2916 goto done;
2919 /* All types requiring non-trivial constructors should have been handled. */
2920 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2922 /* Attempt to avoid creating a new temporary variable. */
2923 if (modify_dest
2924 && TREE_CODE (modify_dest) != SSA_NAME)
2926 bool use_it = false;
2928 /* We can't use MODIFY_DEST if there's type promotion involved. */
2929 if (!useless_type_conversion_p (callee_type, caller_type))
2930 use_it = false;
2932 /* ??? If we're assigning to a variable sized type, then we must
2933 reuse the destination variable, because we've no good way to
2934 create variable sized temporaries at this point. */
2935 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2936 use_it = true;
2938 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2939 reuse it as the result of the call directly. Don't do this if
2940 it would promote MODIFY_DEST to addressable. */
2941 else if (TREE_ADDRESSABLE (result))
2942 use_it = false;
2943 else
2945 tree base_m = get_base_address (modify_dest);
2947 /* If the base isn't a decl, then it's a pointer, and we don't
2948 know where that's going to go. */
2949 if (!DECL_P (base_m))
2950 use_it = false;
2951 else if (is_global_var (base_m))
2952 use_it = false;
2953 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2954 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2955 && !DECL_GIMPLE_REG_P (result)
2956 && DECL_GIMPLE_REG_P (base_m))
2957 use_it = false;
2958 else if (!TREE_ADDRESSABLE (base_m))
2959 use_it = true;
2962 if (use_it)
2964 var = modify_dest;
2965 use = NULL;
2966 goto done;
2970 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2972 var = copy_result_decl_to_var (result, id);
2973 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2975 /* Do not have the rest of GCC warn about this variable as it should
2976 not be visible to the user. */
2977 TREE_NO_WARNING (var) = 1;
2979 declare_inline_vars (id->block, var);
2981 /* Build the use expr. If the return type of the function was
2982 promoted, convert it back to the expected type. */
2983 use = var;
2984 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2986 /* If we can match up types by promotion/demotion do so. */
2987 if (fold_convertible_p (caller_type, var))
2988 use = fold_convert (caller_type, var);
2989 else
2991 /* ??? For valid programs we should not end up here.
2992 Still if we end up with truly mismatched types here, fall back
2993 to using a MEM_REF to not leak invalid GIMPLE to the following
2994 passes. */
2995 /* Prevent var from being written into SSA form. */
2996 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
2997 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
2998 DECL_GIMPLE_REG_P (var) = false;
2999 else if (is_gimple_reg_type (TREE_TYPE (var)))
3000 TREE_ADDRESSABLE (var) = true;
3001 use = fold_build2 (MEM_REF, caller_type,
3002 build_fold_addr_expr (var),
3003 build_int_cst (ptr_type_node, 0));
3007 STRIP_USELESS_TYPE_CONVERSION (use);
3009 if (DECL_BY_REFERENCE (result))
3011 TREE_ADDRESSABLE (var) = 1;
3012 var = build_fold_addr_expr (var);
3015 done:
3016 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3017 way, when the RESULT_DECL is encountered, it will be
3018 automatically replaced by the VAR_DECL.
3020 When returning by reference, ensure that RESULT_DECL remaps to
3021 gimple_val. */
3022 if (DECL_BY_REFERENCE (result)
3023 && !is_gimple_val (var))
3025 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3026 insert_decl_map (id, result, temp);
3027 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3028 it's default_def SSA_NAME. */
3029 if (gimple_in_ssa_p (id->src_cfun)
3030 && is_gimple_reg (result))
3032 temp = make_ssa_name (temp, NULL);
3033 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3035 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3037 else
3038 insert_decl_map (id, result, var);
3040 /* Remember this so we can ignore it in remap_decls. */
3041 id->retvar = var;
3043 return use;
3046 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3047 to a local label. */
3049 static tree
3050 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3052 tree node = *nodep;
3053 tree fn = (tree) fnp;
3055 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3056 return node;
3058 if (TYPE_P (node))
3059 *walk_subtrees = 0;
3061 return NULL_TREE;
3064 /* Determine if the function can be copied. If so return NULL. If
3065 not return a string describng the reason for failure. */
3067 static const char *
3068 copy_forbidden (struct function *fun, tree fndecl)
3070 const char *reason = fun->cannot_be_copied_reason;
3071 tree decl;
3072 unsigned ix;
3074 /* Only examine the function once. */
3075 if (fun->cannot_be_copied_set)
3076 return reason;
3078 /* We cannot copy a function that receives a non-local goto
3079 because we cannot remap the destination label used in the
3080 function that is performing the non-local goto. */
3081 /* ??? Actually, this should be possible, if we work at it.
3082 No doubt there's just a handful of places that simply
3083 assume it doesn't happen and don't substitute properly. */
3084 if (fun->has_nonlocal_label)
3086 reason = G_("function %q+F can never be copied "
3087 "because it receives a non-local goto");
3088 goto fail;
3091 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3092 if (TREE_CODE (decl) == VAR_DECL
3093 && TREE_STATIC (decl)
3094 && !DECL_EXTERNAL (decl)
3095 && DECL_INITIAL (decl)
3096 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3097 has_label_address_in_static_1,
3098 fndecl))
3100 reason = G_("function %q+F can never be copied because it saves "
3101 "address of local label in a static variable");
3102 goto fail;
3105 fail:
3106 fun->cannot_be_copied_reason = reason;
3107 fun->cannot_be_copied_set = true;
3108 return reason;
3112 static const char *inline_forbidden_reason;
3114 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3115 iff a function can not be inlined. Also sets the reason why. */
3117 static tree
3118 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3119 struct walk_stmt_info *wip)
3121 tree fn = (tree) wip->info;
3122 tree t;
3123 gimple stmt = gsi_stmt (*gsi);
3125 switch (gimple_code (stmt))
3127 case GIMPLE_CALL:
3128 /* Refuse to inline alloca call unless user explicitly forced so as
3129 this may change program's memory overhead drastically when the
3130 function using alloca is called in loop. In GCC present in
3131 SPEC2000 inlining into schedule_block cause it to require 2GB of
3132 RAM instead of 256MB. Don't do so for alloca calls emitted for
3133 VLA objects as those can't cause unbounded growth (they're always
3134 wrapped inside stack_save/stack_restore regions. */
3135 if (gimple_alloca_call_p (stmt)
3136 && !gimple_call_alloca_for_var_p (stmt)
3137 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3139 inline_forbidden_reason
3140 = G_("function %q+F can never be inlined because it uses "
3141 "alloca (override using the always_inline attribute)");
3142 *handled_ops_p = true;
3143 return fn;
3146 t = gimple_call_fndecl (stmt);
3147 if (t == NULL_TREE)
3148 break;
3150 /* We cannot inline functions that call setjmp. */
3151 if (setjmp_call_p (t))
3153 inline_forbidden_reason
3154 = G_("function %q+F can never be inlined because it uses setjmp");
3155 *handled_ops_p = true;
3156 return t;
3159 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3160 switch (DECL_FUNCTION_CODE (t))
3162 /* We cannot inline functions that take a variable number of
3163 arguments. */
3164 case BUILT_IN_VA_START:
3165 case BUILT_IN_NEXT_ARG:
3166 case BUILT_IN_VA_END:
3167 inline_forbidden_reason
3168 = G_("function %q+F can never be inlined because it "
3169 "uses variable argument lists");
3170 *handled_ops_p = true;
3171 return t;
3173 case BUILT_IN_LONGJMP:
3174 /* We can't inline functions that call __builtin_longjmp at
3175 all. The non-local goto machinery really requires the
3176 destination be in a different function. If we allow the
3177 function calling __builtin_longjmp to be inlined into the
3178 function calling __builtin_setjmp, Things will Go Awry. */
3179 inline_forbidden_reason
3180 = G_("function %q+F can never be inlined because "
3181 "it uses setjmp-longjmp exception handling");
3182 *handled_ops_p = true;
3183 return t;
3185 case BUILT_IN_NONLOCAL_GOTO:
3186 /* Similarly. */
3187 inline_forbidden_reason
3188 = G_("function %q+F can never be inlined because "
3189 "it uses non-local goto");
3190 *handled_ops_p = true;
3191 return t;
3193 case BUILT_IN_RETURN:
3194 case BUILT_IN_APPLY_ARGS:
3195 /* If a __builtin_apply_args caller would be inlined,
3196 it would be saving arguments of the function it has
3197 been inlined into. Similarly __builtin_return would
3198 return from the function the inline has been inlined into. */
3199 inline_forbidden_reason
3200 = G_("function %q+F can never be inlined because "
3201 "it uses __builtin_return or __builtin_apply_args");
3202 *handled_ops_p = true;
3203 return t;
3205 default:
3206 break;
3208 break;
3210 case GIMPLE_GOTO:
3211 t = gimple_goto_dest (stmt);
3213 /* We will not inline a function which uses computed goto. The
3214 addresses of its local labels, which may be tucked into
3215 global storage, are of course not constant across
3216 instantiations, which causes unexpected behavior. */
3217 if (TREE_CODE (t) != LABEL_DECL)
3219 inline_forbidden_reason
3220 = G_("function %q+F can never be inlined "
3221 "because it contains a computed goto");
3222 *handled_ops_p = true;
3223 return t;
3225 break;
3227 default:
3228 break;
3231 *handled_ops_p = false;
3232 return NULL_TREE;
3235 /* Return true if FNDECL is a function that cannot be inlined into
3236 another one. */
3238 static bool
3239 inline_forbidden_p (tree fndecl)
3241 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3242 struct walk_stmt_info wi;
3243 struct pointer_set_t *visited_nodes;
3244 basic_block bb;
3245 bool forbidden_p = false;
3247 /* First check for shared reasons not to copy the code. */
3248 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3249 if (inline_forbidden_reason != NULL)
3250 return true;
3252 /* Next, walk the statements of the function looking for
3253 constraucts we can't handle, or are non-optimal for inlining. */
3254 visited_nodes = pointer_set_create ();
3255 memset (&wi, 0, sizeof (wi));
3256 wi.info = (void *) fndecl;
3257 wi.pset = visited_nodes;
3259 FOR_EACH_BB_FN (bb, fun)
3261 gimple ret;
3262 gimple_seq seq = bb_seq (bb);
3263 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3264 forbidden_p = (ret != NULL);
3265 if (forbidden_p)
3266 break;
3269 pointer_set_destroy (visited_nodes);
3270 return forbidden_p;
3273 /* Return false if the function FNDECL cannot be inlined on account of its
3274 attributes, true otherwise. */
3275 static bool
3276 function_attribute_inlinable_p (const_tree fndecl)
3278 if (targetm.attribute_table)
3280 const_tree a;
3282 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3284 const_tree name = TREE_PURPOSE (a);
3285 int i;
3287 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3288 if (is_attribute_p (targetm.attribute_table[i].name, name))
3289 return targetm.function_attribute_inlinable_p (fndecl);
3293 return true;
3296 /* Returns nonzero if FN is a function that does not have any
3297 fundamental inline blocking properties. */
3299 bool
3300 tree_inlinable_function_p (tree fn)
3302 bool inlinable = true;
3303 bool do_warning;
3304 tree always_inline;
3306 /* If we've already decided this function shouldn't be inlined,
3307 there's no need to check again. But the cached bit from analysis
3308 can be reset during decl merge in multi-module compilation (C FE only).
3309 The problem is we can not really use a 2 state cached value --
3310 can not tell the init state (unknown value) from a computed value. */
3311 if (DECL_UNINLINABLE (fn)
3312 && (!L_IPO_COMP_MODE
3313 || lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))))
3314 return false;
3316 /* We only warn for functions declared `inline' by the user. */
3317 do_warning = (warn_inline
3318 && DECL_DECLARED_INLINE_P (fn)
3319 && !DECL_NO_INLINE_WARNING_P (fn)
3320 && !DECL_IN_SYSTEM_HEADER (fn));
3322 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3324 if (flag_no_inline
3325 && always_inline == NULL)
3327 if (do_warning)
3328 warning (OPT_Winline, "function %q+F can never be inlined because it "
3329 "is suppressed using -fno-inline", fn);
3330 inlinable = false;
3333 else if (!function_attribute_inlinable_p (fn))
3335 if (do_warning)
3336 warning (OPT_Winline, "function %q+F can never be inlined because it "
3337 "uses attributes conflicting with inlining", fn);
3338 inlinable = false;
3341 else if (inline_forbidden_p (fn))
3343 /* See if we should warn about uninlinable functions. Previously,
3344 some of these warnings would be issued while trying to expand
3345 the function inline, but that would cause multiple warnings
3346 about functions that would for example call alloca. But since
3347 this a property of the function, just one warning is enough.
3348 As a bonus we can now give more details about the reason why a
3349 function is not inlinable. */
3350 if (always_inline)
3351 error (inline_forbidden_reason, fn);
3352 else if (do_warning)
3353 warning (OPT_Winline, inline_forbidden_reason, fn);
3355 inlinable = false;
3358 /* Squirrel away the result so that we don't have to check again. */
3359 DECL_UNINLINABLE (fn) = !inlinable;
3361 return inlinable;
3364 /* Estimate the cost of a memory move. Use machine dependent
3365 word size and take possible memcpy call into account. */
3368 estimate_move_cost (tree type)
3370 HOST_WIDE_INT size;
3372 gcc_assert (!VOID_TYPE_P (type));
3374 if (TREE_CODE (type) == VECTOR_TYPE)
3376 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3377 enum machine_mode simd
3378 = targetm.vectorize.preferred_simd_mode (inner);
3379 int simd_mode_size = GET_MODE_SIZE (simd);
3380 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3381 / simd_mode_size);
3384 size = int_size_in_bytes (type);
3386 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3387 /* Cost of a memcpy call, 3 arguments and the call. */
3388 return 4;
3389 else
3390 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3393 /* Returns cost of operation CODE, according to WEIGHTS */
3395 static int
3396 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3397 tree op1 ATTRIBUTE_UNUSED, tree op2)
3399 switch (code)
3401 /* These are "free" conversions, or their presumed cost
3402 is folded into other operations. */
3403 case RANGE_EXPR:
3404 CASE_CONVERT:
3405 case COMPLEX_EXPR:
3406 case PAREN_EXPR:
3407 case VIEW_CONVERT_EXPR:
3408 return 0;
3410 /* Assign cost of 1 to usual operations.
3411 ??? We may consider mapping RTL costs to this. */
3412 case COND_EXPR:
3413 case VEC_COND_EXPR:
3414 case VEC_PERM_EXPR:
3416 case PLUS_EXPR:
3417 case POINTER_PLUS_EXPR:
3418 case MINUS_EXPR:
3419 case MULT_EXPR:
3420 case MULT_HIGHPART_EXPR:
3421 case FMA_EXPR:
3423 case ADDR_SPACE_CONVERT_EXPR:
3424 case FIXED_CONVERT_EXPR:
3425 case FIX_TRUNC_EXPR:
3427 case NEGATE_EXPR:
3428 case FLOAT_EXPR:
3429 case MIN_EXPR:
3430 case MAX_EXPR:
3431 case ABS_EXPR:
3433 case LSHIFT_EXPR:
3434 case RSHIFT_EXPR:
3435 case LROTATE_EXPR:
3436 case RROTATE_EXPR:
3437 case VEC_LSHIFT_EXPR:
3438 case VEC_RSHIFT_EXPR:
3440 case BIT_IOR_EXPR:
3441 case BIT_XOR_EXPR:
3442 case BIT_AND_EXPR:
3443 case BIT_NOT_EXPR:
3445 case TRUTH_ANDIF_EXPR:
3446 case TRUTH_ORIF_EXPR:
3447 case TRUTH_AND_EXPR:
3448 case TRUTH_OR_EXPR:
3449 case TRUTH_XOR_EXPR:
3450 case TRUTH_NOT_EXPR:
3452 case LT_EXPR:
3453 case LE_EXPR:
3454 case GT_EXPR:
3455 case GE_EXPR:
3456 case EQ_EXPR:
3457 case NE_EXPR:
3458 case ORDERED_EXPR:
3459 case UNORDERED_EXPR:
3461 case UNLT_EXPR:
3462 case UNLE_EXPR:
3463 case UNGT_EXPR:
3464 case UNGE_EXPR:
3465 case UNEQ_EXPR:
3466 case LTGT_EXPR:
3468 case CONJ_EXPR:
3470 case PREDECREMENT_EXPR:
3471 case PREINCREMENT_EXPR:
3472 case POSTDECREMENT_EXPR:
3473 case POSTINCREMENT_EXPR:
3475 case REALIGN_LOAD_EXPR:
3477 case REDUC_MAX_EXPR:
3478 case REDUC_MIN_EXPR:
3479 case REDUC_PLUS_EXPR:
3480 case WIDEN_SUM_EXPR:
3481 case WIDEN_MULT_EXPR:
3482 case DOT_PROD_EXPR:
3483 case WIDEN_MULT_PLUS_EXPR:
3484 case WIDEN_MULT_MINUS_EXPR:
3485 case WIDEN_LSHIFT_EXPR:
3487 case VEC_WIDEN_MULT_HI_EXPR:
3488 case VEC_WIDEN_MULT_LO_EXPR:
3489 case VEC_WIDEN_MULT_EVEN_EXPR:
3490 case VEC_WIDEN_MULT_ODD_EXPR:
3491 case VEC_UNPACK_HI_EXPR:
3492 case VEC_UNPACK_LO_EXPR:
3493 case VEC_UNPACK_FLOAT_HI_EXPR:
3494 case VEC_UNPACK_FLOAT_LO_EXPR:
3495 case VEC_PACK_TRUNC_EXPR:
3496 case VEC_PACK_SAT_EXPR:
3497 case VEC_PACK_FIX_TRUNC_EXPR:
3498 case VEC_WIDEN_LSHIFT_HI_EXPR:
3499 case VEC_WIDEN_LSHIFT_LO_EXPR:
3501 return 1;
3503 /* Few special cases of expensive operations. This is useful
3504 to avoid inlining on functions having too many of these. */
3505 case TRUNC_DIV_EXPR:
3506 case CEIL_DIV_EXPR:
3507 case FLOOR_DIV_EXPR:
3508 case ROUND_DIV_EXPR:
3509 case EXACT_DIV_EXPR:
3510 case TRUNC_MOD_EXPR:
3511 case CEIL_MOD_EXPR:
3512 case FLOOR_MOD_EXPR:
3513 case ROUND_MOD_EXPR:
3514 case RDIV_EXPR:
3515 if (TREE_CODE (op2) != INTEGER_CST)
3516 return weights->div_mod_cost;
3517 return 1;
3519 default:
3520 /* We expect a copy assignment with no operator. */
3521 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3522 return 0;
3527 /* Estimate number of instructions that will be created by expanding
3528 the statements in the statement sequence STMTS.
3529 WEIGHTS contains weights attributed to various constructs. */
3531 static
3532 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3534 int cost;
3535 gimple_stmt_iterator gsi;
3537 cost = 0;
3538 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3539 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3541 return cost;
3545 /* Estimate number of instructions that will be created by expanding STMT.
3546 WEIGHTS contains weights attributed to various constructs. */
3549 estimate_num_insns (gimple stmt, eni_weights *weights)
3551 unsigned cost, i;
3552 enum gimple_code code = gimple_code (stmt);
3553 tree lhs;
3554 tree rhs;
3556 switch (code)
3558 case GIMPLE_ASSIGN:
3559 /* Try to estimate the cost of assignments. We have three cases to
3560 deal with:
3561 1) Simple assignments to registers;
3562 2) Stores to things that must live in memory. This includes
3563 "normal" stores to scalars, but also assignments of large
3564 structures, or constructors of big arrays;
3566 Let us look at the first two cases, assuming we have "a = b + C":
3567 <GIMPLE_ASSIGN <var_decl "a">
3568 <plus_expr <var_decl "b"> <constant C>>
3569 If "a" is a GIMPLE register, the assignment to it is free on almost
3570 any target, because "a" usually ends up in a real register. Hence
3571 the only cost of this expression comes from the PLUS_EXPR, and we
3572 can ignore the GIMPLE_ASSIGN.
3573 If "a" is not a GIMPLE register, the assignment to "a" will most
3574 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3575 of moving something into "a", which we compute using the function
3576 estimate_move_cost. */
3577 if (gimple_clobber_p (stmt))
3578 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3580 lhs = gimple_assign_lhs (stmt);
3581 rhs = gimple_assign_rhs1 (stmt);
3583 cost = 0;
3585 /* Account for the cost of moving to / from memory. */
3586 if (gimple_store_p (stmt))
3587 cost += estimate_move_cost (TREE_TYPE (lhs));
3588 if (gimple_assign_load_p (stmt))
3589 cost += estimate_move_cost (TREE_TYPE (rhs));
3591 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3592 gimple_assign_rhs1 (stmt),
3593 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3594 == GIMPLE_BINARY_RHS
3595 ? gimple_assign_rhs2 (stmt) : NULL);
3596 break;
3598 case GIMPLE_COND:
3599 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3600 gimple_op (stmt, 0),
3601 gimple_op (stmt, 1));
3602 break;
3604 case GIMPLE_SWITCH:
3605 /* Take into account cost of the switch + guess 2 conditional jumps for
3606 each case label.
3608 TODO: once the switch expansion logic is sufficiently separated, we can
3609 do better job on estimating cost of the switch. */
3610 if (weights->time_based)
3611 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3612 else
3613 cost = gimple_switch_num_labels (stmt) * 2;
3614 break;
3616 case GIMPLE_CALL:
3618 tree decl = gimple_call_fndecl (stmt);
3619 struct cgraph_node *node = NULL;
3621 /* Do not special case builtins where we see the body.
3622 This just confuse inliner. */
3623 if (!decl || !(node = cgraph_get_node (decl)) || node->analyzed)
3625 /* For buitins that are likely expanded to nothing or
3626 inlined do not account operand costs. */
3627 else if (is_simple_builtin (decl))
3628 return 0;
3629 else if (is_inexpensive_builtin (decl))
3630 return weights->target_builtin_call_cost;
3631 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3633 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3634 specialize the cheap expansion we do here.
3635 ??? This asks for a more general solution. */
3636 switch (DECL_FUNCTION_CODE (decl))
3638 case BUILT_IN_POW:
3639 case BUILT_IN_POWF:
3640 case BUILT_IN_POWL:
3641 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3642 && REAL_VALUES_EQUAL
3643 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3644 return estimate_operator_cost (MULT_EXPR, weights,
3645 gimple_call_arg (stmt, 0),
3646 gimple_call_arg (stmt, 0));
3647 break;
3649 default:
3650 break;
3654 cost = node ? weights->call_cost : weights->indirect_call_cost;
3655 if (gimple_call_lhs (stmt))
3656 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3657 for (i = 0; i < gimple_call_num_args (stmt); i++)
3659 tree arg = gimple_call_arg (stmt, i);
3660 cost += estimate_move_cost (TREE_TYPE (arg));
3662 break;
3665 case GIMPLE_RETURN:
3666 return weights->return_cost;
3668 case GIMPLE_GOTO:
3669 case GIMPLE_LABEL:
3670 case GIMPLE_NOP:
3671 case GIMPLE_PHI:
3672 case GIMPLE_PREDICT:
3673 case GIMPLE_DEBUG:
3674 return 0;
3676 case GIMPLE_ASM:
3677 return asm_str_count (gimple_asm_string (stmt));
3679 case GIMPLE_RESX:
3680 /* This is either going to be an external function call with one
3681 argument, or two register copy statements plus a goto. */
3682 return 2;
3684 case GIMPLE_EH_DISPATCH:
3685 /* ??? This is going to turn into a switch statement. Ideally
3686 we'd have a look at the eh region and estimate the number of
3687 edges involved. */
3688 return 10;
3690 case GIMPLE_BIND:
3691 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3693 case GIMPLE_EH_FILTER:
3694 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3696 case GIMPLE_CATCH:
3697 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3699 case GIMPLE_TRY:
3700 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3701 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3703 /* OpenMP directives are generally very expensive. */
3705 case GIMPLE_OMP_RETURN:
3706 case GIMPLE_OMP_SECTIONS_SWITCH:
3707 case GIMPLE_OMP_ATOMIC_STORE:
3708 case GIMPLE_OMP_CONTINUE:
3709 /* ...except these, which are cheap. */
3710 return 0;
3712 case GIMPLE_OMP_ATOMIC_LOAD:
3713 return weights->omp_cost;
3715 case GIMPLE_OMP_FOR:
3716 return (weights->omp_cost
3717 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3718 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3720 case GIMPLE_OMP_PARALLEL:
3721 case GIMPLE_OMP_TASK:
3722 case GIMPLE_OMP_CRITICAL:
3723 case GIMPLE_OMP_MASTER:
3724 case GIMPLE_OMP_ORDERED:
3725 case GIMPLE_OMP_SECTION:
3726 case GIMPLE_OMP_SECTIONS:
3727 case GIMPLE_OMP_SINGLE:
3728 return (weights->omp_cost
3729 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3731 case GIMPLE_TRANSACTION:
3732 return (weights->tm_cost
3733 + estimate_num_insns_seq (gimple_transaction_body (stmt),
3734 weights));
3736 default:
3737 gcc_unreachable ();
3740 return cost;
3743 /* Estimate number of instructions that will be created by expanding
3744 function FNDECL. WEIGHTS contains weights attributed to various
3745 constructs. */
3748 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3750 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3751 gimple_stmt_iterator bsi;
3752 basic_block bb;
3753 int n = 0;
3755 gcc_assert (my_function && my_function->cfg);
3756 FOR_EACH_BB_FN (bb, my_function)
3758 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3759 n += estimate_num_insns (gsi_stmt (bsi), weights);
3762 return n;
3766 /* Initializes weights used by estimate_num_insns. */
3768 void
3769 init_inline_once (void)
3771 eni_size_weights.call_cost = 1;
3772 eni_size_weights.indirect_call_cost = 3;
3773 eni_size_weights.target_builtin_call_cost = 1;
3774 eni_size_weights.div_mod_cost = 1;
3775 eni_size_weights.omp_cost = 40;
3776 eni_size_weights.tm_cost = 10;
3777 eni_size_weights.time_based = false;
3778 eni_size_weights.return_cost = 1;
3780 /* Estimating time for call is difficult, since we have no idea what the
3781 called function does. In the current uses of eni_time_weights,
3782 underestimating the cost does less harm than overestimating it, so
3783 we choose a rather small value here. */
3784 eni_time_weights.call_cost = 10;
3785 eni_time_weights.indirect_call_cost = 15;
3786 eni_time_weights.target_builtin_call_cost = 1;
3787 eni_time_weights.div_mod_cost = 10;
3788 eni_time_weights.omp_cost = 40;
3789 eni_time_weights.tm_cost = 40;
3790 eni_time_weights.time_based = true;
3791 eni_time_weights.return_cost = 2;
3794 /* Estimate the number of instructions in a gimple_seq. */
3797 count_insns_seq (gimple_seq seq, eni_weights *weights)
3799 gimple_stmt_iterator gsi;
3800 int n = 0;
3801 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3802 n += estimate_num_insns (gsi_stmt (gsi), weights);
3804 return n;
3808 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3810 static void
3811 prepend_lexical_block (tree current_block, tree new_block)
3813 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3814 BLOCK_SUBBLOCKS (current_block) = new_block;
3815 BLOCK_SUPERCONTEXT (new_block) = current_block;
3818 /* Add local variables from CALLEE to CALLER. */
3820 static inline void
3821 add_local_variables (struct function *callee, struct function *caller,
3822 copy_body_data *id)
3824 tree var;
3825 unsigned ix;
3827 FOR_EACH_LOCAL_DECL (callee, ix, var)
3828 if (!can_be_nonlocal (var, id))
3830 tree new_var = remap_decl (var, id);
3832 /* Remap debug-expressions. */
3833 if (TREE_CODE (new_var) == VAR_DECL
3834 && DECL_DEBUG_EXPR_IS_FROM (new_var)
3835 && new_var != var)
3837 tree tem = DECL_DEBUG_EXPR (var);
3838 bool old_regimplify = id->regimplify;
3840 /* The mapped debug expression might be deleted
3841 as a varpool node (the reachbility analysis
3842 of varpool node does not check the reference
3843 from debug expressions.
3844 Set it to 0 for all global vars. */
3845 if (L_IPO_COMP_MODE && tem && is_global_var (tem))
3846 tem = NULL;
3848 id->remapping_type_depth++;
3849 walk_tree (&tem, copy_tree_body_r, id, NULL);
3850 id->remapping_type_depth--;
3851 id->regimplify = old_regimplify;
3852 SET_DECL_DEBUG_EXPR (new_var, tem);
3854 add_local_decl (caller, new_var);
3858 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3860 static bool
3861 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3863 tree use_retvar;
3864 tree fn;
3865 struct pointer_map_t *st, *dst;
3866 tree return_slot;
3867 tree modify_dest;
3868 location_t saved_location;
3869 struct cgraph_edge *cg_edge;
3870 cgraph_inline_failed_t reason;
3871 basic_block return_block;
3872 edge e;
3873 gimple_stmt_iterator gsi, stmt_gsi;
3874 bool successfully_inlined = FALSE;
3875 bool purge_dead_abnormal_edges;
3877 /* Set input_location here so we get the right instantiation context
3878 if we call instantiate_decl from inlinable_function_p. */
3879 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
3880 saved_location = input_location;
3881 input_location = gimple_location (stmt);
3883 /* From here on, we're only interested in CALL_EXPRs. */
3884 if (gimple_code (stmt) != GIMPLE_CALL)
3885 goto egress;
3887 cg_edge = cgraph_edge (id->dst_node, stmt);
3888 gcc_checking_assert (cg_edge);
3889 /* First, see if we can figure out what function is being called.
3890 If we cannot, then there is no hope of inlining the function. */
3891 if (cg_edge->indirect_unknown_callee)
3892 goto egress;
3893 fn = cg_edge->callee->symbol.decl;
3894 gcc_checking_assert (fn);
3896 /* If FN is a declaration of a function in a nested scope that was
3897 globally declared inline, we don't set its DECL_INITIAL.
3898 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3899 C++ front-end uses it for cdtors to refer to their internal
3900 declarations, that are not real functions. Fortunately those
3901 don't have trees to be saved, so we can tell by checking their
3902 gimple_body. */
3903 if (!DECL_INITIAL (fn)
3904 && DECL_ABSTRACT_ORIGIN (fn)
3905 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3906 fn = DECL_ABSTRACT_ORIGIN (fn);
3908 /* Don't try to inline functions that are not well-suited to inlining. */
3909 if (cg_edge->inline_failed)
3911 reason = cg_edge->inline_failed;
3912 /* If this call was originally indirect, we do not want to emit any
3913 inlining related warnings or sorry messages because there are no
3914 guarantees regarding those. */
3915 if (cg_edge->indirect_inlining_edge)
3916 goto egress;
3918 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3919 /* For extern inline functions that get redefined we always
3920 silently ignored always_inline flag. Better behaviour would
3921 be to be able to keep both bodies and use extern inline body
3922 for inlining, but we can't do that because frontends overwrite
3923 the body. */
3924 && !cg_edge->callee->local.redefined_extern_inline
3925 /* During early inline pass, report only when optimization is
3926 not turned on. */
3927 && (cgraph_global_info_ready
3928 || !optimize)
3929 /* PR 20090218-1_0.c. Body can be provided by another module. */
3930 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
3932 error ("inlining failed in call to always_inline %q+F: %s", fn,
3933 cgraph_inline_failed_string (reason));
3934 error ("called from here");
3936 else if (warn_inline
3937 && DECL_DECLARED_INLINE_P (fn)
3938 && !DECL_NO_INLINE_WARNING_P (fn)
3939 && !DECL_IN_SYSTEM_HEADER (fn)
3940 && reason != CIF_UNSPECIFIED
3941 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3942 /* Do not warn about not inlined recursive calls. */
3943 && !cgraph_edge_recursive_p (cg_edge)
3944 /* Avoid warnings during early inline pass. */
3945 && cgraph_global_info_ready)
3947 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3948 fn, _(cgraph_inline_failed_string (reason)));
3949 warning (OPT_Winline, "called from here");
3951 goto egress;
3953 fn = cg_edge->callee->symbol.decl;
3955 #ifdef ENABLE_CHECKING
3956 if (cg_edge->callee->symbol.decl != id->dst_node->symbol.decl)
3957 verify_cgraph_node (cg_edge->callee);
3958 #endif
3960 /* We will be inlining this callee. */
3961 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3963 /* Update the callers EH personality. */
3964 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->symbol.decl))
3965 DECL_FUNCTION_PERSONALITY (cg_edge->caller->symbol.decl)
3966 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->symbol.decl);
3968 /* Split the block holding the GIMPLE_CALL. */
3969 e = split_block (bb, stmt);
3970 bb = e->src;
3971 return_block = e->dest;
3972 remove_edge (e);
3974 /* split_block splits after the statement; work around this by
3975 moving the call into the second block manually. Not pretty,
3976 but seems easier than doing the CFG manipulation by hand
3977 when the GIMPLE_CALL is in the last statement of BB. */
3978 stmt_gsi = gsi_last_bb (bb);
3979 gsi_remove (&stmt_gsi, false);
3981 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3982 been the source of abnormal edges. In this case, schedule
3983 the removal of dead abnormal edges. */
3984 gsi = gsi_start_bb (return_block);
3985 if (gsi_end_p (gsi))
3987 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3988 purge_dead_abnormal_edges = true;
3990 else
3992 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3993 purge_dead_abnormal_edges = false;
3996 stmt_gsi = gsi_start_bb (return_block);
3998 /* Build a block containing code to initialize the arguments, the
3999 actual inline expansion of the body, and a label for the return
4000 statements within the function to jump to. The type of the
4001 statement expression is the return type of the function call.
4002 ??? If the call does not have an associated block then we will
4003 remap all callee blocks to NULL, effectively dropping most of
4004 its debug information. This should only happen for calls to
4005 artificial decls inserted by the compiler itself. We need to
4006 either link the inlined blocks into the caller block tree or
4007 not refer to them in any way to not break GC for locations. */
4008 if (gimple_block (stmt))
4010 id->block = make_node (BLOCK);
4011 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4012 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4013 prepend_lexical_block (gimple_block (stmt), id->block);
4016 /* Local declarations will be replaced by their equivalents in this
4017 map. */
4018 st = id->decl_map;
4019 id->decl_map = pointer_map_create ();
4020 dst = id->debug_map;
4021 id->debug_map = NULL;
4023 /* Record the function we are about to inline. */
4024 id->src_fn = fn;
4025 id->src_node = cg_edge->callee;
4026 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4027 id->gimple_call = stmt;
4029 gcc_assert (!id->src_cfun->after_inlining);
4031 id->entry_bb = bb;
4032 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4034 gimple_stmt_iterator si = gsi_last_bb (bb);
4035 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4036 NOT_TAKEN),
4037 GSI_NEW_STMT);
4039 initialize_inlined_parameters (id, stmt, fn, bb);
4041 if (DECL_INITIAL (fn))
4043 if (gimple_block (stmt))
4045 tree *var;
4047 prepend_lexical_block (id->block,
4048 remap_blocks (DECL_INITIAL (fn), id));
4049 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4050 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4051 == NULL_TREE));
4052 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4053 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4054 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4055 under it. The parameters can be then evaluated in the debugger,
4056 but don't show in backtraces. */
4057 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4058 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4060 tree v = *var;
4061 *var = TREE_CHAIN (v);
4062 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4063 BLOCK_VARS (id->block) = v;
4065 else
4066 var = &TREE_CHAIN (*var);
4068 else
4069 remap_blocks_to_null (DECL_INITIAL (fn), id);
4072 /* Return statements in the function body will be replaced by jumps
4073 to the RET_LABEL. */
4074 gcc_assert (DECL_INITIAL (fn));
4075 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4077 /* Find the LHS to which the result of this call is assigned. */
4078 return_slot = NULL;
4079 if (gimple_call_lhs (stmt))
4081 modify_dest = gimple_call_lhs (stmt);
4083 /* The function which we are inlining might not return a value,
4084 in which case we should issue a warning that the function
4085 does not return a value. In that case the optimizers will
4086 see that the variable to which the value is assigned was not
4087 initialized. We do not want to issue a warning about that
4088 uninitialized variable. */
4089 if (DECL_P (modify_dest))
4090 TREE_NO_WARNING (modify_dest) = 1;
4092 if (gimple_call_return_slot_opt_p (stmt))
4094 return_slot = modify_dest;
4095 modify_dest = NULL;
4098 else
4099 modify_dest = NULL;
4101 /* If we are inlining a call to the C++ operator new, we don't want
4102 to use type based alias analysis on the return value. Otherwise
4103 we may get confused if the compiler sees that the inlined new
4104 function returns a pointer which was just deleted. See bug
4105 33407. */
4106 if (DECL_IS_OPERATOR_NEW (fn))
4108 return_slot = NULL;
4109 modify_dest = NULL;
4112 /* Declare the return variable for the function. */
4113 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4115 /* Add local vars in this inlined callee to caller. */
4116 add_local_variables (id->src_cfun, cfun, id);
4118 if (dump_file && (dump_flags & TDF_DETAILS))
4120 fprintf (dump_file, "Inlining ");
4121 print_generic_expr (dump_file, id->src_fn, 0);
4122 fprintf (dump_file, " to ");
4123 print_generic_expr (dump_file, id->dst_fn, 0);
4124 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4127 /* This is it. Duplicate the callee body. Assume callee is
4128 pre-gimplified. Note that we must not alter the caller
4129 function in any way before this point, as this CALL_EXPR may be
4130 a self-referential call; if we're calling ourselves, we need to
4131 duplicate our body before altering anything. */
4132 copy_body (id, bb->count,
4133 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
4134 bb, return_block, NULL, NULL);
4136 /* Reset the escaped solution. */
4137 if (cfun->gimple_df)
4138 pt_solution_reset (&cfun->gimple_df->escaped);
4140 /* Clean up. */
4141 if (id->debug_map)
4143 pointer_map_destroy (id->debug_map);
4144 id->debug_map = dst;
4146 pointer_map_destroy (id->decl_map);
4147 id->decl_map = st;
4149 /* Unlink the calls virtual operands before replacing it. */
4150 unlink_stmt_vdef (stmt);
4152 /* If the inlined function returns a result that we care about,
4153 substitute the GIMPLE_CALL with an assignment of the return
4154 variable to the LHS of the call. That is, if STMT was
4155 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4156 if (use_retvar && gimple_call_lhs (stmt))
4158 gimple old_stmt = stmt;
4159 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4160 gsi_replace (&stmt_gsi, stmt, false);
4161 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4163 else
4165 /* Handle the case of inlining a function with no return
4166 statement, which causes the return value to become undefined. */
4167 if (gimple_call_lhs (stmt)
4168 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4170 tree name = gimple_call_lhs (stmt);
4171 tree var = SSA_NAME_VAR (name);
4172 tree def = ssa_default_def (cfun, var);
4174 if (def)
4176 /* If the variable is used undefined, make this name
4177 undefined via a move. */
4178 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4179 gsi_replace (&stmt_gsi, stmt, true);
4181 else
4183 /* Otherwise make this variable undefined. */
4184 gsi_remove (&stmt_gsi, true);
4185 set_ssa_default_def (cfun, var, name);
4186 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4189 else
4190 gsi_remove (&stmt_gsi, true);
4193 if (purge_dead_abnormal_edges)
4195 gimple_purge_dead_eh_edges (return_block);
4196 gimple_purge_dead_abnormal_call_edges (return_block);
4199 /* If the value of the new expression is ignored, that's OK. We
4200 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4201 the equivalent inlined version either. */
4202 if (is_gimple_assign (stmt))
4204 gcc_assert (gimple_assign_single_p (stmt)
4205 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4206 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4209 /* Output the inlining info for this abstract function, since it has been
4210 inlined. If we don't do this now, we can lose the information about the
4211 variables in the function when the blocks get blown away as soon as we
4212 remove the cgraph node. */
4213 if (gimple_block (stmt))
4214 (*debug_hooks->outlining_inline_function) (cg_edge->callee->symbol.decl);
4216 /* Update callgraph if needed. */
4217 cgraph_remove_node (cg_edge->callee);
4219 id->block = NULL_TREE;
4220 successfully_inlined = TRUE;
4222 egress:
4223 input_location = saved_location;
4224 return successfully_inlined;
4227 /* Expand call statements reachable from STMT_P.
4228 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4229 in a MODIFY_EXPR. */
4231 static bool
4232 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4234 gimple_stmt_iterator gsi;
4236 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4238 gimple stmt = gsi_stmt (gsi);
4240 if (is_gimple_call (stmt)
4241 && expand_call_inline (bb, stmt, id))
4242 return true;
4245 return false;
4249 /* Walk all basic blocks created after FIRST and try to fold every statement
4250 in the STATEMENTS pointer set. */
4252 static void
4253 fold_marked_statements (int first, struct pointer_set_t *statements)
4255 for (; first < n_basic_blocks; first++)
4256 if (BASIC_BLOCK (first))
4258 gimple_stmt_iterator gsi;
4260 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4261 !gsi_end_p (gsi);
4262 gsi_next (&gsi))
4263 if (pointer_set_contains (statements, gsi_stmt (gsi)))
4265 gimple old_stmt = gsi_stmt (gsi);
4266 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4268 if (old_decl && DECL_BUILT_IN (old_decl))
4270 /* Folding builtins can create multiple instructions,
4271 we need to look at all of them. */
4272 gimple_stmt_iterator i2 = gsi;
4273 gsi_prev (&i2);
4274 if (fold_stmt (&gsi))
4276 gimple new_stmt;
4277 /* If a builtin at the end of a bb folded into nothing,
4278 the following loop won't work. */
4279 if (gsi_end_p (gsi))
4281 cgraph_update_edges_for_call_stmt (old_stmt,
4282 old_decl, NULL);
4283 break;
4285 if (gsi_end_p (i2))
4286 i2 = gsi_start_bb (BASIC_BLOCK (first));
4287 else
4288 gsi_next (&i2);
4289 while (1)
4291 new_stmt = gsi_stmt (i2);
4292 update_stmt (new_stmt);
4293 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4294 new_stmt);
4296 if (new_stmt == gsi_stmt (gsi))
4298 /* It is okay to check only for the very last
4299 of these statements. If it is a throwing
4300 statement nothing will change. If it isn't
4301 this can remove EH edges. If that weren't
4302 correct then because some intermediate stmts
4303 throw, but not the last one. That would mean
4304 we'd have to split the block, which we can't
4305 here and we'd loose anyway. And as builtins
4306 probably never throw, this all
4307 is mood anyway. */
4308 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4309 new_stmt))
4310 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4311 break;
4313 gsi_next (&i2);
4317 else if (fold_stmt (&gsi))
4319 /* Re-read the statement from GSI as fold_stmt() may
4320 have changed it. */
4321 gimple new_stmt = gsi_stmt (gsi);
4322 update_stmt (new_stmt);
4324 if (is_gimple_call (old_stmt)
4325 || is_gimple_call (new_stmt))
4326 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4327 new_stmt);
4329 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4330 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4336 /* Return true if BB has at least one abnormal outgoing edge. */
4338 static inline bool
4339 has_abnormal_outgoing_edge_p (basic_block bb)
4341 edge e;
4342 edge_iterator ei;
4344 FOR_EACH_EDGE (e, ei, bb->succs)
4345 if (e->flags & EDGE_ABNORMAL)
4346 return true;
4348 return false;
4351 /* Expand calls to inline functions in the body of FN. */
4353 unsigned int
4354 optimize_inline_calls (tree fn)
4356 copy_body_data id;
4357 basic_block bb;
4358 int last = n_basic_blocks;
4359 struct gimplify_ctx gctx;
4360 bool inlined_p = false;
4362 /* Clear out ID. */
4363 memset (&id, 0, sizeof (id));
4365 id.src_node = id.dst_node = cgraph_get_node (fn);
4366 gcc_assert (id.dst_node->analyzed);
4367 id.dst_fn = fn;
4368 /* Or any functions that aren't finished yet. */
4369 if (current_function_decl)
4370 id.dst_fn = current_function_decl;
4372 id.copy_decl = copy_decl_maybe_to_var;
4373 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4374 id.transform_new_cfg = false;
4375 id.transform_return_to_modify = true;
4376 id.transform_lang_insert_block = NULL;
4377 id.statements_to_fold = pointer_set_create ();
4379 push_gimplify_context (&gctx);
4381 /* We make no attempts to keep dominance info up-to-date. */
4382 free_dominance_info (CDI_DOMINATORS);
4383 free_dominance_info (CDI_POST_DOMINATORS);
4385 /* Register specific gimple functions. */
4386 gimple_register_cfg_hooks ();
4388 /* Reach the trees by walking over the CFG, and note the
4389 enclosing basic-blocks in the call edges. */
4390 /* We walk the blocks going forward, because inlined function bodies
4391 will split id->current_basic_block, and the new blocks will
4392 follow it; we'll trudge through them, processing their CALL_EXPRs
4393 along the way. */
4394 FOR_EACH_BB (bb)
4395 inlined_p |= gimple_expand_calls_inline (bb, &id);
4397 pop_gimplify_context (NULL);
4399 #ifdef ENABLE_CHECKING
4401 struct cgraph_edge *e;
4403 verify_cgraph_node (id.dst_node);
4405 /* Double check that we inlined everything we are supposed to inline. */
4406 for (e = id.dst_node->callees; e; e = e->next_callee)
4407 gcc_assert (e->inline_failed || !e->call_stmt /*fake edge*/);
4409 #endif
4411 /* Fold queued statements. */
4412 fold_marked_statements (last, id.statements_to_fold);
4413 pointer_set_destroy (id.statements_to_fold);
4415 gcc_assert (!id.debug_stmts.exists ());
4417 /* If we didn't inline into the function there is nothing to do. */
4418 if (!inlined_p)
4419 return 0;
4421 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4422 number_blocks (fn);
4424 delete_unreachable_blocks_update_callgraph (&id);
4425 #ifdef ENABLE_CHECKING
4426 verify_cgraph_node (id.dst_node);
4427 #endif
4429 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4430 not possible yet - the IPA passes might make various functions to not
4431 throw and they don't care to proactively update local EH info. This is
4432 done later in fixup_cfg pass that also execute the verification. */
4433 return (TODO_update_ssa
4434 | TODO_cleanup_cfg
4435 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4436 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4437 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4440 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4442 tree
4443 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4445 enum tree_code code = TREE_CODE (*tp);
4446 enum tree_code_class cl = TREE_CODE_CLASS (code);
4448 /* We make copies of most nodes. */
4449 if (IS_EXPR_CODE_CLASS (cl)
4450 || code == TREE_LIST
4451 || code == TREE_VEC
4452 || code == TYPE_DECL
4453 || code == OMP_CLAUSE)
4455 /* Because the chain gets clobbered when we make a copy, we save it
4456 here. */
4457 tree chain = NULL_TREE, new_tree;
4459 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4460 chain = TREE_CHAIN (*tp);
4462 /* Copy the node. */
4463 new_tree = copy_node (*tp);
4465 /* Propagate mudflap marked-ness. */
4466 if (flag_mudflap && mf_marked_p (*tp))
4467 mf_mark (new_tree);
4469 *tp = new_tree;
4471 /* Now, restore the chain, if appropriate. That will cause
4472 walk_tree to walk into the chain as well. */
4473 if (code == PARM_DECL
4474 || code == TREE_LIST
4475 || code == OMP_CLAUSE)
4476 TREE_CHAIN (*tp) = chain;
4478 /* For now, we don't update BLOCKs when we make copies. So, we
4479 have to nullify all BIND_EXPRs. */
4480 if (TREE_CODE (*tp) == BIND_EXPR)
4481 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4483 else if (code == CONSTRUCTOR)
4485 /* CONSTRUCTOR nodes need special handling because
4486 we need to duplicate the vector of elements. */
4487 tree new_tree;
4489 new_tree = copy_node (*tp);
4491 /* Propagate mudflap marked-ness. */
4492 if (flag_mudflap && mf_marked_p (*tp))
4493 mf_mark (new_tree);
4495 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
4496 *tp = new_tree;
4498 else if (code == STATEMENT_LIST)
4499 /* We used to just abort on STATEMENT_LIST, but we can run into them
4500 with statement-expressions (c++/40975). */
4501 copy_statement_list (tp);
4502 else if (TREE_CODE_CLASS (code) == tcc_type)
4503 *walk_subtrees = 0;
4504 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4506 *walk_subtrees = 0;
4507 if (L_IPO_COMP_MODE
4508 && (code == VAR_DECL)
4509 && (TREE_STATIC (*tp) || DECL_EXTERNAL (*tp)))
4511 tree resolved_decl = real_varpool_node (*tp)->symbol.decl;
4512 if (resolved_decl != *tp)
4514 *tp = resolved_decl;
4518 else if (TREE_CODE_CLASS (code) == tcc_constant)
4519 *walk_subtrees = 0;
4520 return NULL_TREE;
4523 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4524 information indicating to what new SAVE_EXPR this one should be mapped,
4525 use that one. Otherwise, create a new node and enter it in ST. FN is
4526 the function into which the copy will be placed. */
4528 static void
4529 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4531 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4532 tree *n;
4533 tree t;
4535 /* See if we already encountered this SAVE_EXPR. */
4536 n = (tree *) pointer_map_contains (st, *tp);
4538 /* If we didn't already remap this SAVE_EXPR, do so now. */
4539 if (!n)
4541 t = copy_node (*tp);
4543 /* Remember this SAVE_EXPR. */
4544 *pointer_map_insert (st, *tp) = t;
4545 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4546 *pointer_map_insert (st, t) = t;
4548 else
4550 /* We've already walked into this SAVE_EXPR; don't do it again. */
4551 *walk_subtrees = 0;
4552 t = *n;
4555 /* Replace this SAVE_EXPR with the copy. */
4556 *tp = t;
4559 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4560 copies the declaration and enters it in the splay_tree in DATA (which is
4561 really an `copy_body_data *'). */
4563 static tree
4564 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4565 void *data)
4567 copy_body_data *id = (copy_body_data *) data;
4569 /* Don't walk into types. */
4570 if (TYPE_P (*tp))
4571 *walk_subtrees = 0;
4573 else if (TREE_CODE (*tp) == LABEL_EXPR)
4575 tree decl = TREE_OPERAND (*tp, 0);
4577 /* Copy the decl and remember the copy. */
4578 insert_decl_map (id, decl, id->copy_decl (decl, id));
4581 return NULL_TREE;
4584 /* Perform any modifications to EXPR required when it is unsaved. Does
4585 not recurse into EXPR's subtrees. */
4587 static void
4588 unsave_expr_1 (tree expr)
4590 switch (TREE_CODE (expr))
4592 case TARGET_EXPR:
4593 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4594 It's OK for this to happen if it was part of a subtree that
4595 isn't immediately expanded, such as operand 2 of another
4596 TARGET_EXPR. */
4597 if (TREE_OPERAND (expr, 1))
4598 break;
4600 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4601 TREE_OPERAND (expr, 3) = NULL_TREE;
4602 break;
4604 default:
4605 break;
4609 /* Called via walk_tree when an expression is unsaved. Using the
4610 splay_tree pointed to by ST (which is really a `splay_tree'),
4611 remaps all local declarations to appropriate replacements. */
4613 static tree
4614 unsave_r (tree *tp, int *walk_subtrees, void *data)
4616 copy_body_data *id = (copy_body_data *) data;
4617 struct pointer_map_t *st = id->decl_map;
4618 tree *n;
4620 /* Only a local declaration (variable or label). */
4621 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4622 || TREE_CODE (*tp) == LABEL_DECL)
4624 /* Lookup the declaration. */
4625 n = (tree *) pointer_map_contains (st, *tp);
4627 /* If it's there, remap it. */
4628 if (n)
4629 *tp = *n;
4632 else if (TREE_CODE (*tp) == STATEMENT_LIST)
4633 gcc_unreachable ();
4634 else if (TREE_CODE (*tp) == BIND_EXPR)
4635 copy_bind_expr (tp, walk_subtrees, id);
4636 else if (TREE_CODE (*tp) == SAVE_EXPR
4637 || TREE_CODE (*tp) == TARGET_EXPR)
4638 remap_save_expr (tp, st, walk_subtrees);
4639 else
4641 copy_tree_r (tp, walk_subtrees, NULL);
4643 /* Do whatever unsaving is required. */
4644 unsave_expr_1 (*tp);
4647 /* Keep iterating. */
4648 return NULL_TREE;
4651 /* Copies everything in EXPR and replaces variables, labels
4652 and SAVE_EXPRs local to EXPR. */
4654 tree
4655 unsave_expr_now (tree expr)
4657 copy_body_data id;
4659 /* There's nothing to do for NULL_TREE. */
4660 if (expr == 0)
4661 return expr;
4663 /* Set up ID. */
4664 memset (&id, 0, sizeof (id));
4665 id.src_fn = current_function_decl;
4666 id.dst_fn = current_function_decl;
4667 id.decl_map = pointer_map_create ();
4668 id.debug_map = NULL;
4670 id.copy_decl = copy_decl_no_change;
4671 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4672 id.transform_new_cfg = false;
4673 id.transform_return_to_modify = false;
4674 id.transform_lang_insert_block = NULL;
4676 /* Walk the tree once to find local labels. */
4677 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4679 /* Walk the tree again, copying, remapping, and unsaving. */
4680 walk_tree (&expr, unsave_r, &id, NULL);
4682 /* Clean up. */
4683 pointer_map_destroy (id.decl_map);
4684 if (id.debug_map)
4685 pointer_map_destroy (id.debug_map);
4687 return expr;
4690 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4691 label, copies the declaration and enters it in the splay_tree in DATA (which
4692 is really a 'copy_body_data *'. */
4694 static tree
4695 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4696 bool *handled_ops_p ATTRIBUTE_UNUSED,
4697 struct walk_stmt_info *wi)
4699 copy_body_data *id = (copy_body_data *) wi->info;
4700 gimple stmt = gsi_stmt (*gsip);
4702 if (gimple_code (stmt) == GIMPLE_LABEL)
4704 tree decl = gimple_label_label (stmt);
4706 /* Copy the decl and remember the copy. */
4707 insert_decl_map (id, decl, id->copy_decl (decl, id));
4710 return NULL_TREE;
4714 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4715 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4716 remaps all local declarations to appropriate replacements in gimple
4717 operands. */
4719 static tree
4720 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4722 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4723 copy_body_data *id = (copy_body_data *) wi->info;
4724 struct pointer_map_t *st = id->decl_map;
4725 tree *n;
4726 tree expr = *tp;
4728 /* Only a local declaration (variable or label). */
4729 if ((TREE_CODE (expr) == VAR_DECL
4730 && !TREE_STATIC (expr))
4731 || TREE_CODE (expr) == LABEL_DECL)
4733 /* Lookup the declaration. */
4734 n = (tree *) pointer_map_contains (st, expr);
4736 /* If it's there, remap it. */
4737 if (n)
4738 *tp = *n;
4739 *walk_subtrees = 0;
4741 else if (TREE_CODE (expr) == STATEMENT_LIST
4742 || TREE_CODE (expr) == BIND_EXPR
4743 || TREE_CODE (expr) == SAVE_EXPR)
4744 gcc_unreachable ();
4745 else if (TREE_CODE (expr) == TARGET_EXPR)
4747 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4748 It's OK for this to happen if it was part of a subtree that
4749 isn't immediately expanded, such as operand 2 of another
4750 TARGET_EXPR. */
4751 if (!TREE_OPERAND (expr, 1))
4753 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4754 TREE_OPERAND (expr, 3) = NULL_TREE;
4758 /* Keep iterating. */
4759 return NULL_TREE;
4763 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4764 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4765 remaps all local declarations to appropriate replacements in gimple
4766 statements. */
4768 static tree
4769 replace_locals_stmt (gimple_stmt_iterator *gsip,
4770 bool *handled_ops_p ATTRIBUTE_UNUSED,
4771 struct walk_stmt_info *wi)
4773 copy_body_data *id = (copy_body_data *) wi->info;
4774 gimple stmt = gsi_stmt (*gsip);
4776 if (gimple_code (stmt) == GIMPLE_BIND)
4778 tree block = gimple_bind_block (stmt);
4780 if (block)
4782 remap_block (&block, id);
4783 gimple_bind_set_block (stmt, block);
4786 /* This will remap a lot of the same decls again, but this should be
4787 harmless. */
4788 if (gimple_bind_vars (stmt))
4789 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
4790 NULL, id));
4793 /* Keep iterating. */
4794 return NULL_TREE;
4798 /* Copies everything in SEQ and replaces variables and labels local to
4799 current_function_decl. */
4801 gimple_seq
4802 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4804 copy_body_data id;
4805 struct walk_stmt_info wi;
4806 struct pointer_set_t *visited;
4807 gimple_seq copy;
4809 /* There's nothing to do for NULL_TREE. */
4810 if (seq == NULL)
4811 return seq;
4813 /* Set up ID. */
4814 memset (&id, 0, sizeof (id));
4815 id.src_fn = current_function_decl;
4816 id.dst_fn = current_function_decl;
4817 id.decl_map = pointer_map_create ();
4818 id.debug_map = NULL;
4820 id.copy_decl = copy_decl_no_change;
4821 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4822 id.transform_new_cfg = false;
4823 id.transform_return_to_modify = false;
4824 id.transform_lang_insert_block = NULL;
4826 /* Walk the tree once to find local labels. */
4827 memset (&wi, 0, sizeof (wi));
4828 visited = pointer_set_create ();
4829 wi.info = &id;
4830 wi.pset = visited;
4831 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4832 pointer_set_destroy (visited);
4834 copy = gimple_seq_copy (seq);
4836 /* Walk the copy, remapping decls. */
4837 memset (&wi, 0, sizeof (wi));
4838 wi.info = &id;
4839 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4841 /* Clean up. */
4842 pointer_map_destroy (id.decl_map);
4843 if (id.debug_map)
4844 pointer_map_destroy (id.debug_map);
4846 return copy;
4850 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4852 static tree
4853 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4855 if (*tp == data)
4856 return (tree) data;
4857 else
4858 return NULL;
4861 DEBUG_FUNCTION bool
4862 debug_find_tree (tree top, tree search)
4864 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4868 /* Declare the variables created by the inliner. Add all the variables in
4869 VARS to BIND_EXPR. */
4871 static void
4872 declare_inline_vars (tree block, tree vars)
4874 tree t;
4875 for (t = vars; t; t = DECL_CHAIN (t))
4877 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4878 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4879 add_local_decl (cfun, t);
4882 if (block)
4883 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4886 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4887 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4888 VAR_DECL translation. */
4890 static tree
4891 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4893 /* Don't generate debug information for the copy if we wouldn't have
4894 generated it for the copy either. */
4895 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4896 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4898 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4899 declaration inspired this copy. */
4900 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4902 /* The new variable/label has no RTL, yet. */
4903 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4904 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4905 SET_DECL_RTL (copy, 0);
4907 /* These args would always appear unused, if not for this. */
4908 TREE_USED (copy) = 1;
4910 /* Set the context for the new declaration. */
4911 if (!DECL_CONTEXT (decl))
4912 /* Globals stay global. */
4914 else if (DECL_CONTEXT (decl) != id->src_fn)
4915 /* Things that weren't in the scope of the function we're inlining
4916 from aren't in the scope we're inlining to, either. */
4918 else if (TREE_STATIC (decl))
4919 /* Function-scoped static variables should stay in the original
4920 function. */
4922 else
4923 /* Ordinary automatic local variables are now in the scope of the
4924 new function. */
4925 DECL_CONTEXT (copy) = id->dst_fn;
4927 return copy;
4930 static tree
4931 copy_decl_to_var (tree decl, copy_body_data *id)
4933 tree copy, type;
4935 gcc_assert (TREE_CODE (decl) == PARM_DECL
4936 || TREE_CODE (decl) == RESULT_DECL);
4938 type = TREE_TYPE (decl);
4940 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4941 VAR_DECL, DECL_NAME (decl), type);
4942 if (DECL_PT_UID_SET_P (decl))
4943 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4944 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4945 TREE_READONLY (copy) = TREE_READONLY (decl);
4946 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4947 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4949 return copy_decl_for_dup_finish (id, decl, copy);
4952 /* Like copy_decl_to_var, but create a return slot object instead of a
4953 pointer variable for return by invisible reference. */
4955 static tree
4956 copy_result_decl_to_var (tree decl, copy_body_data *id)
4958 tree copy, type;
4960 gcc_assert (TREE_CODE (decl) == PARM_DECL
4961 || TREE_CODE (decl) == RESULT_DECL);
4963 type = TREE_TYPE (decl);
4964 if (DECL_BY_REFERENCE (decl))
4965 type = TREE_TYPE (type);
4967 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4968 VAR_DECL, DECL_NAME (decl), type);
4969 if (DECL_PT_UID_SET_P (decl))
4970 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4971 TREE_READONLY (copy) = TREE_READONLY (decl);
4972 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4973 if (!DECL_BY_REFERENCE (decl))
4975 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4976 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4979 return copy_decl_for_dup_finish (id, decl, copy);
4982 tree
4983 copy_decl_no_change (tree decl, copy_body_data *id)
4985 tree copy;
4987 copy = copy_node (decl);
4989 /* The COPY is not abstract; it will be generated in DST_FN. */
4990 DECL_ABSTRACT (copy) = 0;
4991 lang_hooks.dup_lang_specific_decl (copy);
4993 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4994 been taken; it's for internal bookkeeping in expand_goto_internal. */
4995 if (TREE_CODE (copy) == LABEL_DECL)
4997 TREE_ADDRESSABLE (copy) = 0;
4998 LABEL_DECL_UID (copy) = -1;
5001 return copy_decl_for_dup_finish (id, decl, copy);
5004 static tree
5005 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5007 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5008 return copy_decl_to_var (decl, id);
5009 else
5010 return copy_decl_no_change (decl, id);
5013 /* Return a copy of the function's argument tree. */
5014 static tree
5015 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5016 bitmap args_to_skip, tree *vars)
5018 tree arg, *parg;
5019 tree new_parm = NULL;
5020 int i = 0;
5022 parg = &new_parm;
5024 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5025 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5027 tree new_tree = remap_decl (arg, id);
5028 if (TREE_CODE (new_tree) != PARM_DECL)
5029 new_tree = id->copy_decl (arg, id);
5030 lang_hooks.dup_lang_specific_decl (new_tree);
5031 *parg = new_tree;
5032 parg = &DECL_CHAIN (new_tree);
5034 else if (!pointer_map_contains (id->decl_map, arg))
5036 /* Make an equivalent VAR_DECL. If the argument was used
5037 as temporary variable later in function, the uses will be
5038 replaced by local variable. */
5039 tree var = copy_decl_to_var (arg, id);
5040 insert_decl_map (id, arg, var);
5041 /* Declare this new variable. */
5042 DECL_CHAIN (var) = *vars;
5043 *vars = var;
5045 return new_parm;
5048 /* Return a copy of the function's static chain. */
5049 static tree
5050 copy_static_chain (tree static_chain, copy_body_data * id)
5052 tree *chain_copy, *pvar;
5054 chain_copy = &static_chain;
5055 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5057 tree new_tree = remap_decl (*pvar, id);
5058 lang_hooks.dup_lang_specific_decl (new_tree);
5059 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5060 *pvar = new_tree;
5062 return static_chain;
5065 /* Return true if the function is allowed to be versioned.
5066 This is a guard for the versioning functionality. */
5068 bool
5069 tree_versionable_function_p (tree fndecl)
5071 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5072 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5075 /* Delete all unreachable basic blocks and update callgraph.
5076 Doing so is somewhat nontrivial because we need to update all clones and
5077 remove inline function that become unreachable. */
5079 static bool
5080 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5082 bool changed = false;
5083 basic_block b, next_bb;
5085 find_unreachable_blocks ();
5087 /* Delete all unreachable basic blocks. */
5089 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
5091 next_bb = b->next_bb;
5093 if (!(b->flags & BB_REACHABLE))
5095 gimple_stmt_iterator bsi;
5097 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5098 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
5100 struct cgraph_edge *e;
5101 struct cgraph_node *node;
5103 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
5105 if (!e->inline_failed)
5106 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5107 else
5108 cgraph_remove_edge (e);
5110 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5111 && id->dst_node->clones)
5112 for (node = id->dst_node->clones; node != id->dst_node;)
5114 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
5116 if (!e->inline_failed)
5117 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5118 else
5119 cgraph_remove_edge (e);
5122 if (node->clones)
5123 node = node->clones;
5124 else if (node->next_sibling_clone)
5125 node = node->next_sibling_clone;
5126 else
5128 while (node != id->dst_node && !node->next_sibling_clone)
5129 node = node->clone_of;
5130 if (node != id->dst_node)
5131 node = node->next_sibling_clone;
5135 delete_basic_block (b);
5136 changed = true;
5140 return changed;
5143 /* Update clone info after duplication. */
5145 static void
5146 update_clone_info (copy_body_data * id)
5148 struct cgraph_node *node;
5149 if (!id->dst_node->clones)
5150 return;
5151 for (node = id->dst_node->clones; node != id->dst_node;)
5153 /* First update replace maps to match the new body. */
5154 if (node->clone.tree_map)
5156 unsigned int i;
5157 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5159 struct ipa_replace_map *replace_info;
5160 replace_info = (*node->clone.tree_map)[i];
5161 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5162 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5165 if (node->clones)
5166 node = node->clones;
5167 else if (node->next_sibling_clone)
5168 node = node->next_sibling_clone;
5169 else
5171 while (node != id->dst_node && !node->next_sibling_clone)
5172 node = node->clone_of;
5173 if (node != id->dst_node)
5174 node = node->next_sibling_clone;
5179 /* Create a copy of a function's tree.
5180 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5181 of the original function and the new copied function
5182 respectively. In case we want to replace a DECL
5183 tree with another tree while duplicating the function's
5184 body, TREE_MAP represents the mapping between these
5185 trees. If UPDATE_CLONES is set, the call_stmt fields
5186 of edges of clones of the function will be updated.
5188 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5189 from new version.
5190 If SKIP_RETURN is true, the new version will return void.
5191 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5192 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5194 void
5195 tree_function_versioning (tree old_decl, tree new_decl,
5196 vec<ipa_replace_map_p, va_gc> *tree_map,
5197 bool update_clones, bitmap args_to_skip,
5198 bool skip_return, bitmap blocks_to_copy,
5199 basic_block new_entry)
5201 struct cgraph_node *old_version_node;
5202 struct cgraph_node *new_version_node;
5203 copy_body_data id;
5204 tree p;
5205 unsigned i;
5206 struct ipa_replace_map *replace_info;
5207 basic_block old_entry_block, bb;
5208 vec<gimple> init_stmts;
5209 init_stmts.create (10);
5210 tree vars = NULL_TREE;
5212 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5213 && TREE_CODE (new_decl) == FUNCTION_DECL);
5214 DECL_POSSIBLY_INLINED (old_decl) = 1;
5216 old_version_node = cgraph_get_node (old_decl);
5217 gcc_checking_assert (old_version_node);
5218 new_version_node = cgraph_get_node (new_decl);
5219 gcc_checking_assert (new_version_node);
5221 /* Copy over debug args. */
5222 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5224 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5225 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5226 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5227 old_debug_args = decl_debug_args_lookup (old_decl);
5228 if (old_debug_args)
5230 new_debug_args = decl_debug_args_insert (new_decl);
5231 *new_debug_args = vec_safe_copy (*old_debug_args);
5235 /* Output the inlining info for this abstract function, since it has been
5236 inlined. If we don't do this now, we can lose the information about the
5237 variables in the function when the blocks get blown away as soon as we
5238 remove the cgraph node. */
5239 (*debug_hooks->outlining_inline_function) (old_decl);
5241 DECL_ARTIFICIAL (new_decl) = 1;
5242 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5243 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5245 /* Prepare the data structures for the tree copy. */
5246 memset (&id, 0, sizeof (id));
5248 /* Generate a new name for the new version. */
5249 id.statements_to_fold = pointer_set_create ();
5251 id.decl_map = pointer_map_create ();
5252 id.debug_map = NULL;
5253 id.src_fn = old_decl;
5254 id.dst_fn = new_decl;
5255 id.src_node = old_version_node;
5256 id.dst_node = new_version_node;
5257 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5258 if (id.src_node->ipa_transforms_to_apply.exists ())
5260 vec<ipa_opt_pass> old_transforms_to_apply
5261 = id.dst_node->ipa_transforms_to_apply;
5262 unsigned int i;
5264 id.dst_node->ipa_transforms_to_apply
5265 = id.src_node->ipa_transforms_to_apply.copy ();
5266 for (i = 0; i < old_transforms_to_apply.length (); i++)
5267 id.dst_node->ipa_transforms_to_apply.safe_push (old_transforms_to_apply[i]);
5268 old_transforms_to_apply.release ();
5271 id.copy_decl = copy_decl_no_change;
5272 id.transform_call_graph_edges
5273 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5274 id.transform_new_cfg = true;
5275 id.transform_return_to_modify = false;
5276 id.transform_lang_insert_block = NULL;
5278 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5279 (DECL_STRUCT_FUNCTION (old_decl));
5280 initialize_cfun (new_decl, old_decl,
5281 old_entry_block->count);
5282 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5283 = id.src_cfun->gimple_df->ipa_pta;
5285 /* Copy the function's static chain. */
5286 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5287 if (p)
5288 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5289 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5290 &id);
5292 /* If there's a tree_map, prepare for substitution. */
5293 if (tree_map)
5294 for (i = 0; i < tree_map->length (); i++)
5296 gimple init;
5297 replace_info = (*tree_map)[i];
5298 if (replace_info->replace_p)
5300 if (!replace_info->old_tree)
5302 int i = replace_info->parm_num;
5303 tree parm;
5304 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5305 i --;
5306 replace_info->old_tree = parm;
5308 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5309 init = setup_one_parameter (&id, replace_info->old_tree,
5310 replace_info->new_tree, id.src_fn,
5311 NULL,
5312 &vars);
5313 if (init)
5314 init_stmts.safe_push (init);
5317 /* Copy the function's arguments. */
5318 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5319 DECL_ARGUMENTS (new_decl) =
5320 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5321 args_to_skip, &vars);
5323 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5324 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5326 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5328 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5329 /* Add local vars. */
5330 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5332 if (DECL_RESULT (old_decl) == NULL_TREE)
5334 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5336 DECL_RESULT (new_decl)
5337 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5338 RESULT_DECL, NULL_TREE, void_type_node);
5339 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5340 cfun->returns_struct = 0;
5341 cfun->returns_pcc_struct = 0;
5343 else
5345 tree old_name;
5346 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5347 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5348 if (gimple_in_ssa_p (id.src_cfun)
5349 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5350 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5352 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5353 insert_decl_map (&id, old_name, new_name);
5354 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5355 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5359 /* Copy the Function's body. */
5360 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5361 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry);
5363 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5364 number_blocks (new_decl);
5366 /* We want to create the BB unconditionally, so that the addition of
5367 debug stmts doesn't affect BB count, which may in the end cause
5368 codegen differences. */
5369 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5370 while (init_stmts.length ())
5371 insert_init_stmt (&id, bb, init_stmts.pop ());
5372 update_clone_info (&id);
5374 /* Remap the nonlocal_goto_save_area, if any. */
5375 if (cfun->nonlocal_goto_save_area)
5377 struct walk_stmt_info wi;
5379 memset (&wi, 0, sizeof (wi));
5380 wi.info = &id;
5381 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5384 /* Clean up. */
5385 pointer_map_destroy (id.decl_map);
5386 if (id.debug_map)
5387 pointer_map_destroy (id.debug_map);
5388 free_dominance_info (CDI_DOMINATORS);
5389 free_dominance_info (CDI_POST_DOMINATORS);
5391 fold_marked_statements (0, id.statements_to_fold);
5392 pointer_set_destroy (id.statements_to_fold);
5393 fold_cond_expr_cond ();
5394 delete_unreachable_blocks_update_callgraph (&id);
5395 if (id.dst_node->analyzed)
5396 cgraph_rebuild_references ();
5397 update_ssa (TODO_update_ssa);
5399 /* After partial cloning we need to rescale frequencies, so they are
5400 within proper range in the cloned function. */
5401 if (new_entry)
5403 struct cgraph_edge *e;
5404 rebuild_frequencies ();
5406 new_version_node->count = ENTRY_BLOCK_PTR->count;
5407 for (e = new_version_node->callees; e; e = e->next_callee)
5409 basic_block bb = gimple_bb (e->call_stmt);
5410 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5411 bb);
5412 e->count = bb->count;
5414 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5416 basic_block bb = gimple_bb (e->call_stmt);
5417 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5418 bb);
5419 e->count = bb->count;
5423 free_dominance_info (CDI_DOMINATORS);
5424 free_dominance_info (CDI_POST_DOMINATORS);
5426 gcc_assert (!id.debug_stmts.exists ());
5427 init_stmts.release ();
5428 pop_cfun ();
5429 return;
5432 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5433 the callee and return the inlined body on success. */
5435 tree
5436 maybe_inline_call_in_expr (tree exp)
5438 tree fn = get_callee_fndecl (exp);
5440 /* We can only try to inline "const" functions. */
5441 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5443 struct pointer_map_t *decl_map = pointer_map_create ();
5444 call_expr_arg_iterator iter;
5445 copy_body_data id;
5446 tree param, arg, t;
5448 /* Remap the parameters. */
5449 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5450 param;
5451 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5452 *pointer_map_insert (decl_map, param) = arg;
5454 memset (&id, 0, sizeof (id));
5455 id.src_fn = fn;
5456 id.dst_fn = current_function_decl;
5457 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5458 id.decl_map = decl_map;
5460 id.copy_decl = copy_decl_no_change;
5461 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5462 id.transform_new_cfg = false;
5463 id.transform_return_to_modify = true;
5464 id.transform_lang_insert_block = NULL;
5466 /* Make sure not to unshare trees behind the front-end's back
5467 since front-end specific mechanisms may rely on sharing. */
5468 id.regimplify = false;
5469 id.do_not_unshare = true;
5471 /* We're not inside any EH region. */
5472 id.eh_lp_nr = 0;
5474 t = copy_tree_body (&id);
5475 pointer_map_destroy (decl_map);
5477 /* We can only return something suitable for use in a GENERIC
5478 expression tree. */
5479 if (TREE_CODE (t) == MODIFY_EXPR)
5480 return TREE_OPERAND (t, 1);
5483 return NULL_TREE;
5486 /* Duplicate a type, fields and all. */
5488 tree
5489 build_duplicate_type (tree type)
5491 struct copy_body_data id;
5493 memset (&id, 0, sizeof (id));
5494 id.src_fn = current_function_decl;
5495 id.dst_fn = current_function_decl;
5496 id.src_cfun = cfun;
5497 id.decl_map = pointer_map_create ();
5498 id.debug_map = NULL;
5499 id.copy_decl = copy_decl_no_change;
5501 type = remap_type_1 (type, &id);
5503 pointer_map_destroy (id.decl_map);
5504 if (id.debug_map)
5505 pointer_map_destroy (id.debug_map);
5507 TYPE_CANONICAL (type) = type;
5509 return type;