ipfw: Support all possible ICMP types.
[dragonfly.git] / contrib / gcc-4.7 / gcc / tree-inline.c
blob5d58b1e49e013e860e7082fc7939e256ea007fc4
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
3 2012 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "diagnostic-core.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "insn-config.h"
33 #include "hashtab.h"
34 #include "langhooks.h"
35 #include "basic-block.h"
36 #include "tree-iterator.h"
37 #include "cgraph.h"
38 #include "intl.h"
39 #include "tree-mudflap.h"
40 #include "tree-flow.h"
41 #include "function.h"
42 #include "tree-flow.h"
43 #include "tree-pretty-print.h"
44 #include "except.h"
45 #include "debug.h"
46 #include "pointer-set.h"
47 #include "ipa-prop.h"
48 #include "value-prof.h"
49 #include "tree-pass.h"
50 #include "target.h"
51 #include "integrate.h"
53 #include "rtl.h" /* FIXME: For asm_str_count. */
55 /* I'm not real happy about this, but we need to handle gimple and
56 non-gimple trees. */
57 #include "gimple.h"
59 /* Inlining, Cloning, Versioning, Parallelization
61 Inlining: a function body is duplicated, but the PARM_DECLs are
62 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
63 MODIFY_EXPRs that store to a dedicated returned-value variable.
64 The duplicated eh_region info of the copy will later be appended
65 to the info for the caller; the eh_region info in copied throwing
66 statements and RESX statements are adjusted accordingly.
68 Cloning: (only in C++) We have one body for a con/de/structor, and
69 multiple function decls, each with a unique parameter list.
70 Duplicate the body, using the given splay tree; some parameters
71 will become constants (like 0 or 1).
73 Versioning: a function body is duplicated and the result is a new
74 function rather than into blocks of an existing function as with
75 inlining. Some parameters will become constants.
77 Parallelization: a region of a function is duplicated resulting in
78 a new function. Variables may be replaced with complex expressions
79 to enable shared variable semantics.
81 All of these will simultaneously lookup any callgraph edges. If
82 we're going to inline the duplicated function body, and the given
83 function has some cloned callgraph nodes (one for each place this
84 function will be inlined) those callgraph edges will be duplicated.
85 If we're cloning the body, those callgraph edges will be
86 updated to point into the new body. (Note that the original
87 callgraph node and edge list will not be altered.)
89 See the CALL_EXPR handling case in copy_tree_body_r (). */
91 /* To Do:
93 o In order to make inlining-on-trees work, we pessimized
94 function-local static constants. In particular, they are now
95 always output, even when not addressed. Fix this by treating
96 function-local static constants just like global static
97 constants; the back-end already knows not to output them if they
98 are not needed.
100 o Provide heuristics to clamp inlining of recursive template
101 calls? */
104 /* Weights that estimate_num_insns uses to estimate the size of the
105 produced code. */
107 eni_weights eni_size_weights;
109 /* Weights that estimate_num_insns uses to estimate the time necessary
110 to execute the produced code. */
112 eni_weights eni_time_weights;
114 /* Prototypes. */
116 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
117 static void remap_block (tree *, copy_body_data *);
118 static void copy_bind_expr (tree *, int *, copy_body_data *);
119 static tree mark_local_for_remap_r (tree *, int *, void *);
120 static void unsave_expr_1 (tree);
121 static tree unsave_r (tree *, int *, void *);
122 static void declare_inline_vars (tree, tree);
123 static void remap_save_expr (tree *, void *, int *);
124 static void prepend_lexical_block (tree current_block, tree new_block);
125 static tree copy_decl_to_var (tree, copy_body_data *);
126 static tree copy_result_decl_to_var (tree, copy_body_data *);
127 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
128 static gimple remap_gimple_stmt (gimple, copy_body_data *);
129 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
131 /* Insert a tree->tree mapping for ID. Despite the name suggests
132 that the trees should be variables, it is used for more than that. */
134 void
135 insert_decl_map (copy_body_data *id, tree key, tree value)
137 *pointer_map_insert (id->decl_map, key) = value;
139 /* Always insert an identity map as well. If we see this same new
140 node again, we won't want to duplicate it a second time. */
141 if (key != value)
142 *pointer_map_insert (id->decl_map, value) = value;
145 /* Insert a tree->tree mapping for ID. This is only used for
146 variables. */
148 static void
149 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
151 if (!gimple_in_ssa_p (id->src_cfun))
152 return;
154 if (!MAY_HAVE_DEBUG_STMTS)
155 return;
157 if (!target_for_debug_bind (key))
158 return;
160 gcc_assert (TREE_CODE (key) == PARM_DECL);
161 gcc_assert (TREE_CODE (value) == VAR_DECL);
163 if (!id->debug_map)
164 id->debug_map = pointer_map_create ();
166 *pointer_map_insert (id->debug_map, key) = value;
169 /* If nonzero, we're remapping the contents of inlined debug
170 statements. If negative, an error has occurred, such as a
171 reference to a variable that isn't available in the inlined
172 context. */
173 static int processing_debug_stmt = 0;
175 /* Construct new SSA name for old NAME. ID is the inline context. */
177 static tree
178 remap_ssa_name (tree name, copy_body_data *id)
180 tree new_tree;
181 tree *n;
183 gcc_assert (TREE_CODE (name) == SSA_NAME);
185 n = (tree *) pointer_map_contains (id->decl_map, name);
186 if (n)
187 return unshare_expr (*n);
189 if (processing_debug_stmt)
191 if (TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
192 && SSA_NAME_IS_DEFAULT_DEF (name)
193 && id->entry_bb == NULL
194 && single_succ_p (ENTRY_BLOCK_PTR))
196 tree vexpr = make_node (DEBUG_EXPR_DECL);
197 gimple def_temp;
198 gimple_stmt_iterator gsi;
199 tree val = SSA_NAME_VAR (name);
201 n = (tree *) pointer_map_contains (id->decl_map, val);
202 if (n != NULL)
203 val = *n;
204 if (TREE_CODE (val) != PARM_DECL)
206 processing_debug_stmt = -1;
207 return name;
209 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
210 DECL_ARTIFICIAL (vexpr) = 1;
211 TREE_TYPE (vexpr) = TREE_TYPE (name);
212 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
213 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
214 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
215 return vexpr;
218 processing_debug_stmt = -1;
219 return name;
222 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
223 in copy_bb. */
224 new_tree = remap_decl (SSA_NAME_VAR (name), id);
226 /* We might've substituted constant or another SSA_NAME for
227 the variable.
229 Replace the SSA name representing RESULT_DECL by variable during
230 inlining: this saves us from need to introduce PHI node in a case
231 return value is just partly initialized. */
232 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
233 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
234 || !id->transform_return_to_modify))
236 struct ptr_info_def *pi;
237 new_tree = make_ssa_name (new_tree, NULL);
238 insert_decl_map (id, name, new_tree);
239 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
240 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
241 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
242 /* At least IPA points-to info can be directly transferred. */
243 if (id->src_cfun->gimple_df
244 && id->src_cfun->gimple_df->ipa_pta
245 && (pi = SSA_NAME_PTR_INFO (name))
246 && !pi->pt.anything)
248 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
249 new_pi->pt = pi->pt;
251 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
253 /* By inlining function having uninitialized variable, we might
254 extend the lifetime (variable might get reused). This cause
255 ICE in the case we end up extending lifetime of SSA name across
256 abnormal edge, but also increase register pressure.
258 We simply initialize all uninitialized vars by 0 except
259 for case we are inlining to very first BB. We can avoid
260 this for all BBs that are not inside strongly connected
261 regions of the CFG, but this is expensive to test. */
262 if (id->entry_bb
263 && is_gimple_reg (SSA_NAME_VAR (name))
264 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
265 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
266 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
267 || EDGE_COUNT (id->entry_bb->preds) != 1))
269 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
270 gimple init_stmt;
271 tree zero = build_zero_cst (TREE_TYPE (new_tree));
273 init_stmt = gimple_build_assign (new_tree, zero);
274 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
275 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
277 else
279 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
280 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
281 == name)
282 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
286 else
287 insert_decl_map (id, name, new_tree);
288 return new_tree;
291 /* Remap DECL during the copying of the BLOCK tree for the function. */
293 tree
294 remap_decl (tree decl, copy_body_data *id)
296 tree *n;
298 /* We only remap local variables in the current function. */
300 /* See if we have remapped this declaration. */
302 n = (tree *) pointer_map_contains (id->decl_map, decl);
304 if (!n && processing_debug_stmt)
306 processing_debug_stmt = -1;
307 return decl;
310 /* If we didn't already have an equivalent for this declaration,
311 create one now. */
312 if (!n)
314 /* Make a copy of the variable or label. */
315 tree t = id->copy_decl (decl, id);
317 /* Remember it, so that if we encounter this local entity again
318 we can reuse this copy. Do this early because remap_type may
319 need this decl for TYPE_STUB_DECL. */
320 insert_decl_map (id, decl, t);
322 if (!DECL_P (t))
323 return t;
325 /* Remap types, if necessary. */
326 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
327 if (TREE_CODE (t) == TYPE_DECL)
328 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
330 /* Remap sizes as necessary. */
331 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
332 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
334 /* If fields, do likewise for offset and qualifier. */
335 if (TREE_CODE (t) == FIELD_DECL)
337 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
338 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
339 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
342 if ((TREE_CODE (t) == VAR_DECL
343 || TREE_CODE (t) == RESULT_DECL
344 || TREE_CODE (t) == PARM_DECL)
345 && id->src_fn && DECL_STRUCT_FUNCTION (id->src_fn)
346 && gimple_referenced_vars (DECL_STRUCT_FUNCTION (id->src_fn))
347 /* We don't want to mark as referenced VAR_DECLs that were
348 not marked as such in the src function. */
349 && (TREE_CODE (decl) != VAR_DECL
350 || referenced_var_lookup (DECL_STRUCT_FUNCTION (id->src_fn),
351 DECL_UID (decl))))
352 add_referenced_var (t);
353 return t;
356 if (id->do_not_unshare)
357 return *n;
358 else
359 return unshare_expr (*n);
362 static tree
363 remap_type_1 (tree type, copy_body_data *id)
365 tree new_tree, t;
367 /* We do need a copy. build and register it now. If this is a pointer or
368 reference type, remap the designated type and make a new pointer or
369 reference type. */
370 if (TREE_CODE (type) == POINTER_TYPE)
372 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
373 TYPE_MODE (type),
374 TYPE_REF_CAN_ALIAS_ALL (type));
375 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
376 new_tree = build_type_attribute_qual_variant (new_tree,
377 TYPE_ATTRIBUTES (type),
378 TYPE_QUALS (type));
379 insert_decl_map (id, type, new_tree);
380 return new_tree;
382 else if (TREE_CODE (type) == REFERENCE_TYPE)
384 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
385 TYPE_MODE (type),
386 TYPE_REF_CAN_ALIAS_ALL (type));
387 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
388 new_tree = build_type_attribute_qual_variant (new_tree,
389 TYPE_ATTRIBUTES (type),
390 TYPE_QUALS (type));
391 insert_decl_map (id, type, new_tree);
392 return new_tree;
394 else
395 new_tree = copy_node (type);
397 insert_decl_map (id, type, new_tree);
399 /* This is a new type, not a copy of an old type. Need to reassociate
400 variants. We can handle everything except the main variant lazily. */
401 t = TYPE_MAIN_VARIANT (type);
402 if (type != t)
404 t = remap_type (t, id);
405 TYPE_MAIN_VARIANT (new_tree) = t;
406 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
407 TYPE_NEXT_VARIANT (t) = new_tree;
409 else
411 TYPE_MAIN_VARIANT (new_tree) = new_tree;
412 TYPE_NEXT_VARIANT (new_tree) = NULL;
415 if (TYPE_STUB_DECL (type))
416 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
418 /* Lazily create pointer and reference types. */
419 TYPE_POINTER_TO (new_tree) = NULL;
420 TYPE_REFERENCE_TO (new_tree) = NULL;
422 switch (TREE_CODE (new_tree))
424 case INTEGER_TYPE:
425 case REAL_TYPE:
426 case FIXED_POINT_TYPE:
427 case ENUMERAL_TYPE:
428 case BOOLEAN_TYPE:
429 t = TYPE_MIN_VALUE (new_tree);
430 if (t && TREE_CODE (t) != INTEGER_CST)
431 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
433 t = TYPE_MAX_VALUE (new_tree);
434 if (t && TREE_CODE (t) != INTEGER_CST)
435 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
436 return new_tree;
438 case FUNCTION_TYPE:
439 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
440 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
441 return new_tree;
443 case ARRAY_TYPE:
444 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
445 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
446 break;
448 case RECORD_TYPE:
449 case UNION_TYPE:
450 case QUAL_UNION_TYPE:
452 tree f, nf = NULL;
454 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
456 t = remap_decl (f, id);
457 DECL_CONTEXT (t) = new_tree;
458 DECL_CHAIN (t) = nf;
459 nf = t;
461 TYPE_FIELDS (new_tree) = nreverse (nf);
463 break;
465 case OFFSET_TYPE:
466 default:
467 /* Shouldn't have been thought variable sized. */
468 gcc_unreachable ();
471 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
472 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
474 return new_tree;
477 tree
478 remap_type (tree type, copy_body_data *id)
480 tree *node;
481 tree tmp;
483 if (type == NULL)
484 return type;
486 /* See if we have remapped this type. */
487 node = (tree *) pointer_map_contains (id->decl_map, type);
488 if (node)
489 return *node;
491 /* The type only needs remapping if it's variably modified. */
492 if (! variably_modified_type_p (type, id->src_fn))
494 insert_decl_map (id, type, type);
495 return type;
498 id->remapping_type_depth++;
499 tmp = remap_type_1 (type, id);
500 id->remapping_type_depth--;
502 return tmp;
505 /* Return previously remapped type of TYPE in ID. Return NULL if TYPE
506 is NULL or TYPE has not been remapped before. */
508 static tree
509 remapped_type (tree type, copy_body_data *id)
511 tree *node;
513 if (type == NULL)
514 return type;
516 /* See if we have remapped this type. */
517 node = (tree *) pointer_map_contains (id->decl_map, type);
518 if (node)
519 return *node;
520 else
521 return NULL;
524 /* The type only needs remapping if it's variably modified. */
525 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
527 static bool
528 can_be_nonlocal (tree decl, copy_body_data *id)
530 /* We can not duplicate function decls. */
531 if (TREE_CODE (decl) == FUNCTION_DECL)
532 return true;
534 /* Local static vars must be non-local or we get multiple declaration
535 problems. */
536 if (TREE_CODE (decl) == VAR_DECL
537 && !auto_var_in_fn_p (decl, id->src_fn))
538 return true;
540 /* At the moment dwarf2out can handle only these types of nodes. We
541 can support more later. */
542 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
543 return false;
545 /* We must use global type. We call remapped_type instead of
546 remap_type since we don't want to remap this type here if it
547 hasn't been remapped before. */
548 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
549 return false;
551 /* Wihtout SSA we can't tell if variable is used. */
552 if (!gimple_in_ssa_p (cfun))
553 return false;
555 /* Live variables must be copied so we can attach DECL_RTL. */
556 if (var_ann (decl))
557 return false;
559 return true;
562 static tree
563 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
565 tree old_var;
566 tree new_decls = NULL_TREE;
568 /* Remap its variables. */
569 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
571 tree new_var;
573 if (can_be_nonlocal (old_var, id))
575 if (TREE_CODE (old_var) == VAR_DECL
576 && ! DECL_EXTERNAL (old_var)
577 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
578 add_local_decl (cfun, old_var);
579 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
580 && !DECL_IGNORED_P (old_var)
581 && nonlocalized_list)
582 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
583 continue;
586 /* Remap the variable. */
587 new_var = remap_decl (old_var, id);
589 /* If we didn't remap this variable, we can't mess with its
590 TREE_CHAIN. If we remapped this variable to the return slot, it's
591 already declared somewhere else, so don't declare it here. */
593 if (new_var == id->retvar)
595 else if (!new_var)
597 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
598 && !DECL_IGNORED_P (old_var)
599 && nonlocalized_list)
600 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
602 else
604 gcc_assert (DECL_P (new_var));
605 DECL_CHAIN (new_var) = new_decls;
606 new_decls = new_var;
608 /* Also copy value-expressions. */
609 if (TREE_CODE (new_var) == VAR_DECL
610 && DECL_HAS_VALUE_EXPR_P (new_var))
612 tree tem = DECL_VALUE_EXPR (new_var);
613 bool old_regimplify = id->regimplify;
614 id->remapping_type_depth++;
615 walk_tree (&tem, copy_tree_body_r, id, NULL);
616 id->remapping_type_depth--;
617 id->regimplify = old_regimplify;
618 SET_DECL_VALUE_EXPR (new_var, tem);
623 return nreverse (new_decls);
626 /* Copy the BLOCK to contain remapped versions of the variables
627 therein. And hook the new block into the block-tree. */
629 static void
630 remap_block (tree *block, copy_body_data *id)
632 tree old_block;
633 tree new_block;
635 /* Make the new block. */
636 old_block = *block;
637 new_block = make_node (BLOCK);
638 TREE_USED (new_block) = TREE_USED (old_block);
639 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
640 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
641 BLOCK_NONLOCALIZED_VARS (new_block)
642 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
643 *block = new_block;
645 /* Remap its variables. */
646 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
647 &BLOCK_NONLOCALIZED_VARS (new_block),
648 id);
650 if (id->transform_lang_insert_block)
651 id->transform_lang_insert_block (new_block);
653 /* Remember the remapped block. */
654 insert_decl_map (id, old_block, new_block);
657 /* Copy the whole block tree and root it in id->block. */
658 static tree
659 remap_blocks (tree block, copy_body_data *id)
661 tree t;
662 tree new_tree = block;
664 if (!block)
665 return NULL;
667 remap_block (&new_tree, id);
668 gcc_assert (new_tree != block);
669 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
670 prepend_lexical_block (new_tree, remap_blocks (t, id));
671 /* Blocks are in arbitrary order, but make things slightly prettier and do
672 not swap order when producing a copy. */
673 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
674 return new_tree;
677 static void
678 copy_statement_list (tree *tp)
680 tree_stmt_iterator oi, ni;
681 tree new_tree;
683 new_tree = alloc_stmt_list ();
684 ni = tsi_start (new_tree);
685 oi = tsi_start (*tp);
686 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
687 *tp = new_tree;
689 for (; !tsi_end_p (oi); tsi_next (&oi))
691 tree stmt = tsi_stmt (oi);
692 if (TREE_CODE (stmt) == STATEMENT_LIST)
693 /* This copy is not redundant; tsi_link_after will smash this
694 STATEMENT_LIST into the end of the one we're building, and we
695 don't want to do that with the original. */
696 copy_statement_list (&stmt);
697 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
701 static void
702 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
704 tree block = BIND_EXPR_BLOCK (*tp);
705 /* Copy (and replace) the statement. */
706 copy_tree_r (tp, walk_subtrees, NULL);
707 if (block)
709 remap_block (&block, id);
710 BIND_EXPR_BLOCK (*tp) = block;
713 if (BIND_EXPR_VARS (*tp))
714 /* This will remap a lot of the same decls again, but this should be
715 harmless. */
716 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
720 /* Create a new gimple_seq by remapping all the statements in BODY
721 using the inlining information in ID. */
723 static gimple_seq
724 remap_gimple_seq (gimple_seq body, copy_body_data *id)
726 gimple_stmt_iterator si;
727 gimple_seq new_body = NULL;
729 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
731 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
732 gimple_seq_add_stmt (&new_body, new_stmt);
735 return new_body;
739 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
740 block using the mapping information in ID. */
742 static gimple
743 copy_gimple_bind (gimple stmt, copy_body_data *id)
745 gimple new_bind;
746 tree new_block, new_vars;
747 gimple_seq body, new_body;
749 /* Copy the statement. Note that we purposely don't use copy_stmt
750 here because we need to remap statements as we copy. */
751 body = gimple_bind_body (stmt);
752 new_body = remap_gimple_seq (body, id);
754 new_block = gimple_bind_block (stmt);
755 if (new_block)
756 remap_block (&new_block, id);
758 /* This will remap a lot of the same decls again, but this should be
759 harmless. */
760 new_vars = gimple_bind_vars (stmt);
761 if (new_vars)
762 new_vars = remap_decls (new_vars, NULL, id);
764 new_bind = gimple_build_bind (new_vars, new_body, new_block);
766 return new_bind;
770 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
771 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
772 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
773 recursing into the children nodes of *TP. */
775 static tree
776 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
778 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
779 copy_body_data *id = (copy_body_data *) wi_p->info;
780 tree fn = id->src_fn;
782 if (TREE_CODE (*tp) == SSA_NAME)
784 *tp = remap_ssa_name (*tp, id);
785 *walk_subtrees = 0;
786 return NULL;
788 else if (auto_var_in_fn_p (*tp, fn))
790 /* Local variables and labels need to be replaced by equivalent
791 variables. We don't want to copy static variables; there's
792 only one of those, no matter how many times we inline the
793 containing function. Similarly for globals from an outer
794 function. */
795 tree new_decl;
797 /* Remap the declaration. */
798 new_decl = remap_decl (*tp, id);
799 gcc_assert (new_decl);
800 /* Replace this variable with the copy. */
801 STRIP_TYPE_NOPS (new_decl);
802 /* ??? The C++ frontend uses void * pointer zero to initialize
803 any other type. This confuses the middle-end type verification.
804 As cloned bodies do not go through gimplification again the fixup
805 there doesn't trigger. */
806 if (TREE_CODE (new_decl) == INTEGER_CST
807 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
808 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
809 *tp = new_decl;
810 *walk_subtrees = 0;
812 else if (TREE_CODE (*tp) == STATEMENT_LIST)
813 gcc_unreachable ();
814 else if (TREE_CODE (*tp) == SAVE_EXPR)
815 gcc_unreachable ();
816 else if (TREE_CODE (*tp) == LABEL_DECL
817 && (!DECL_CONTEXT (*tp)
818 || decl_function_context (*tp) == id->src_fn))
819 /* These may need to be remapped for EH handling. */
820 *tp = remap_decl (*tp, id);
821 else if (TREE_CODE (*tp) == FIELD_DECL)
823 /* If the enclosing record type is variably_modified_type_p, the field
824 has already been remapped. Otherwise, it need not be. */
825 tree *n = (tree *) pointer_map_contains (id->decl_map, *tp);
826 if (n)
827 *tp = *n;
828 *walk_subtrees = 0;
830 else if (TYPE_P (*tp))
831 /* Types may need remapping as well. */
832 *tp = remap_type (*tp, id);
833 else if (CONSTANT_CLASS_P (*tp))
835 /* If this is a constant, we have to copy the node iff the type
836 will be remapped. copy_tree_r will not copy a constant. */
837 tree new_type = remap_type (TREE_TYPE (*tp), id);
839 if (new_type == TREE_TYPE (*tp))
840 *walk_subtrees = 0;
842 else if (TREE_CODE (*tp) == INTEGER_CST)
843 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
844 TREE_INT_CST_HIGH (*tp));
845 else
847 *tp = copy_node (*tp);
848 TREE_TYPE (*tp) = new_type;
851 else
853 /* Otherwise, just copy the node. Note that copy_tree_r already
854 knows not to copy VAR_DECLs, etc., so this is safe. */
856 /* We should never have TREE_BLOCK set on non-statements. */
857 if (EXPR_P (*tp))
858 gcc_assert (!TREE_BLOCK (*tp));
860 if (TREE_CODE (*tp) == MEM_REF)
862 tree ptr = TREE_OPERAND (*tp, 0);
863 tree type = remap_type (TREE_TYPE (*tp), id);
864 tree old = *tp;
866 /* We need to re-canonicalize MEM_REFs from inline substitutions
867 that can happen when a pointer argument is an ADDR_EXPR.
868 Recurse here manually to allow that. */
869 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
870 *tp = fold_build2 (MEM_REF, type,
871 ptr, TREE_OPERAND (*tp, 1));
872 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
873 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
874 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
875 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
876 *walk_subtrees = 0;
877 return NULL;
880 /* Here is the "usual case". Copy this tree node, and then
881 tweak some special cases. */
882 copy_tree_r (tp, walk_subtrees, NULL);
884 if (TREE_CODE (*tp) != OMP_CLAUSE)
885 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
887 /* Global variables we haven't seen yet need to go into referenced
888 vars. If not referenced from types only. */
889 if (gimple_in_ssa_p (cfun)
890 && TREE_CODE (*tp) == VAR_DECL
891 && id->remapping_type_depth == 0
892 && !processing_debug_stmt)
893 add_referenced_var (*tp);
895 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
897 /* The copied TARGET_EXPR has never been expanded, even if the
898 original node was expanded already. */
899 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
900 TREE_OPERAND (*tp, 3) = NULL_TREE;
902 else if (TREE_CODE (*tp) == ADDR_EXPR)
904 /* Variable substitution need not be simple. In particular,
905 the MEM_REF substitution above. Make sure that
906 TREE_CONSTANT and friends are up-to-date. But make sure
907 to not improperly set TREE_BLOCK on some sub-expressions. */
908 int invariant = is_gimple_min_invariant (*tp);
909 tree block = id->block;
910 id->block = NULL_TREE;
911 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
912 id->block = block;
913 recompute_tree_invariant_for_addr_expr (*tp);
915 /* If this used to be invariant, but is not any longer,
916 then regimplification is probably needed. */
917 if (invariant && !is_gimple_min_invariant (*tp))
918 id->regimplify = true;
920 *walk_subtrees = 0;
924 /* Keep iterating. */
925 return NULL_TREE;
929 /* Called from copy_body_id via walk_tree. DATA is really a
930 `copy_body_data *'. */
932 tree
933 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
935 copy_body_data *id = (copy_body_data *) data;
936 tree fn = id->src_fn;
937 tree new_block;
939 /* Begin by recognizing trees that we'll completely rewrite for the
940 inlining context. Our output for these trees is completely
941 different from out input (e.g. RETURN_EXPR is deleted, and morphs
942 into an edge). Further down, we'll handle trees that get
943 duplicated and/or tweaked. */
945 /* When requested, RETURN_EXPRs should be transformed to just the
946 contained MODIFY_EXPR. The branch semantics of the return will
947 be handled elsewhere by manipulating the CFG rather than a statement. */
948 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
950 tree assignment = TREE_OPERAND (*tp, 0);
952 /* If we're returning something, just turn that into an
953 assignment into the equivalent of the original RESULT_DECL.
954 If the "assignment" is just the result decl, the result
955 decl has already been set (e.g. a recent "foo (&result_decl,
956 ...)"); just toss the entire RETURN_EXPR. */
957 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
959 /* Replace the RETURN_EXPR with (a copy of) the
960 MODIFY_EXPR hanging underneath. */
961 *tp = copy_node (assignment);
963 else /* Else the RETURN_EXPR returns no value. */
965 *tp = NULL;
966 return (tree) (void *)1;
969 else if (TREE_CODE (*tp) == SSA_NAME)
971 *tp = remap_ssa_name (*tp, id);
972 *walk_subtrees = 0;
973 return NULL;
976 /* Local variables and labels need to be replaced by equivalent
977 variables. We don't want to copy static variables; there's only
978 one of those, no matter how many times we inline the containing
979 function. Similarly for globals from an outer function. */
980 else if (auto_var_in_fn_p (*tp, fn))
982 tree new_decl;
984 /* Remap the declaration. */
985 new_decl = remap_decl (*tp, id);
986 gcc_assert (new_decl);
987 /* Replace this variable with the copy. */
988 STRIP_TYPE_NOPS (new_decl);
989 *tp = new_decl;
990 *walk_subtrees = 0;
992 else if (TREE_CODE (*tp) == STATEMENT_LIST)
993 copy_statement_list (tp);
994 else if (TREE_CODE (*tp) == SAVE_EXPR
995 || TREE_CODE (*tp) == TARGET_EXPR)
996 remap_save_expr (tp, id->decl_map, walk_subtrees);
997 else if (TREE_CODE (*tp) == LABEL_DECL
998 && (! DECL_CONTEXT (*tp)
999 || decl_function_context (*tp) == id->src_fn))
1000 /* These may need to be remapped for EH handling. */
1001 *tp = remap_decl (*tp, id);
1002 else if (TREE_CODE (*tp) == BIND_EXPR)
1003 copy_bind_expr (tp, walk_subtrees, id);
1004 /* Types may need remapping as well. */
1005 else if (TYPE_P (*tp))
1006 *tp = remap_type (*tp, id);
1008 /* If this is a constant, we have to copy the node iff the type will be
1009 remapped. copy_tree_r will not copy a constant. */
1010 else if (CONSTANT_CLASS_P (*tp))
1012 tree new_type = remap_type (TREE_TYPE (*tp), id);
1014 if (new_type == TREE_TYPE (*tp))
1015 *walk_subtrees = 0;
1017 else if (TREE_CODE (*tp) == INTEGER_CST)
1018 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1019 TREE_INT_CST_HIGH (*tp));
1020 else
1022 *tp = copy_node (*tp);
1023 TREE_TYPE (*tp) = new_type;
1027 /* Otherwise, just copy the node. Note that copy_tree_r already
1028 knows not to copy VAR_DECLs, etc., so this is safe. */
1029 else
1031 /* Here we handle trees that are not completely rewritten.
1032 First we detect some inlining-induced bogosities for
1033 discarding. */
1034 if (TREE_CODE (*tp) == MODIFY_EXPR
1035 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1036 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1038 /* Some assignments VAR = VAR; don't generate any rtl code
1039 and thus don't count as variable modification. Avoid
1040 keeping bogosities like 0 = 0. */
1041 tree decl = TREE_OPERAND (*tp, 0), value;
1042 tree *n;
1044 n = (tree *) pointer_map_contains (id->decl_map, decl);
1045 if (n)
1047 value = *n;
1048 STRIP_TYPE_NOPS (value);
1049 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1051 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1052 return copy_tree_body_r (tp, walk_subtrees, data);
1056 else if (TREE_CODE (*tp) == INDIRECT_REF)
1058 /* Get rid of *& from inline substitutions that can happen when a
1059 pointer argument is an ADDR_EXPR. */
1060 tree decl = TREE_OPERAND (*tp, 0);
1061 tree *n;
1063 n = (tree *) pointer_map_contains (id->decl_map, decl);
1064 if (n)
1066 tree new_tree;
1067 tree old;
1068 /* If we happen to get an ADDR_EXPR in n->value, strip
1069 it manually here as we'll eventually get ADDR_EXPRs
1070 which lie about their types pointed to. In this case
1071 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1072 but we absolutely rely on that. As fold_indirect_ref
1073 does other useful transformations, try that first, though. */
1074 tree type = TREE_TYPE (TREE_TYPE (*n));
1075 if (id->do_not_unshare)
1076 new_tree = *n;
1077 else
1078 new_tree = unshare_expr (*n);
1079 old = *tp;
1080 *tp = gimple_fold_indirect_ref (new_tree);
1081 if (! *tp)
1083 if (TREE_CODE (new_tree) == ADDR_EXPR)
1085 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1086 type, new_tree);
1087 /* ??? We should either assert here or build
1088 a VIEW_CONVERT_EXPR instead of blindly leaking
1089 incompatible types to our IL. */
1090 if (! *tp)
1091 *tp = TREE_OPERAND (new_tree, 0);
1093 else
1095 *tp = build1 (INDIRECT_REF, type, new_tree);
1096 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1097 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1098 TREE_READONLY (*tp) = TREE_READONLY (old);
1099 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
1102 *walk_subtrees = 0;
1103 return NULL;
1106 else if (TREE_CODE (*tp) == MEM_REF)
1108 /* We need to re-canonicalize MEM_REFs from inline substitutions
1109 that can happen when a pointer argument is an ADDR_EXPR. */
1110 tree decl = TREE_OPERAND (*tp, 0);
1111 tree *n;
1113 n = (tree *) pointer_map_contains (id->decl_map, decl);
1114 if (n)
1116 tree old = *tp;
1117 *tp = fold_build2 (MEM_REF, TREE_TYPE (*tp),
1118 unshare_expr (*n), TREE_OPERAND (*tp, 1));
1119 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1120 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1121 *walk_subtrees = 0;
1122 return NULL;
1126 /* Here is the "usual case". Copy this tree node, and then
1127 tweak some special cases. */
1128 copy_tree_r (tp, walk_subtrees, NULL);
1130 /* Global variables we haven't seen yet needs to go into referenced
1131 vars. If not referenced from types or debug stmts only. */
1132 if (gimple_in_ssa_p (cfun)
1133 && TREE_CODE (*tp) == VAR_DECL
1134 && id->remapping_type_depth == 0
1135 && !processing_debug_stmt)
1136 add_referenced_var (*tp);
1138 /* If EXPR has block defined, map it to newly constructed block.
1139 When inlining we want EXPRs without block appear in the block
1140 of function call if we are not remapping a type. */
1141 if (EXPR_P (*tp))
1143 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1144 if (TREE_BLOCK (*tp))
1146 tree *n;
1147 n = (tree *) pointer_map_contains (id->decl_map,
1148 TREE_BLOCK (*tp));
1149 gcc_assert (n || id->remapping_type_depth != 0);
1150 if (n)
1151 new_block = *n;
1153 TREE_BLOCK (*tp) = new_block;
1156 if (TREE_CODE (*tp) != OMP_CLAUSE)
1157 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1159 /* The copied TARGET_EXPR has never been expanded, even if the
1160 original node was expanded already. */
1161 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1163 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1164 TREE_OPERAND (*tp, 3) = NULL_TREE;
1167 /* Variable substitution need not be simple. In particular, the
1168 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1169 and friends are up-to-date. */
1170 else if (TREE_CODE (*tp) == ADDR_EXPR)
1172 int invariant = is_gimple_min_invariant (*tp);
1173 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1175 /* Handle the case where we substituted an INDIRECT_REF
1176 into the operand of the ADDR_EXPR. */
1177 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1178 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1179 else
1180 recompute_tree_invariant_for_addr_expr (*tp);
1182 /* If this used to be invariant, but is not any longer,
1183 then regimplification is probably needed. */
1184 if (invariant && !is_gimple_min_invariant (*tp))
1185 id->regimplify = true;
1187 *walk_subtrees = 0;
1191 /* Keep iterating. */
1192 return NULL_TREE;
1195 /* Helper for remap_gimple_stmt. Given an EH region number for the
1196 source function, map that to the duplicate EH region number in
1197 the destination function. */
1199 static int
1200 remap_eh_region_nr (int old_nr, copy_body_data *id)
1202 eh_region old_r, new_r;
1203 void **slot;
1205 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1206 slot = pointer_map_contains (id->eh_map, old_r);
1207 new_r = (eh_region) *slot;
1209 return new_r->index;
1212 /* Similar, but operate on INTEGER_CSTs. */
1214 static tree
1215 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1217 int old_nr, new_nr;
1219 old_nr = tree_low_cst (old_t_nr, 0);
1220 new_nr = remap_eh_region_nr (old_nr, id);
1222 return build_int_cst (integer_type_node, new_nr);
1225 /* Helper for copy_bb. Remap statement STMT using the inlining
1226 information in ID. Return the new statement copy. */
1228 static gimple
1229 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1231 gimple copy = NULL;
1232 struct walk_stmt_info wi;
1233 tree new_block;
1234 bool skip_first = false;
1236 /* Begin by recognizing trees that we'll completely rewrite for the
1237 inlining context. Our output for these trees is completely
1238 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1239 into an edge). Further down, we'll handle trees that get
1240 duplicated and/or tweaked. */
1242 /* When requested, GIMPLE_RETURNs should be transformed to just the
1243 contained GIMPLE_ASSIGN. The branch semantics of the return will
1244 be handled elsewhere by manipulating the CFG rather than the
1245 statement. */
1246 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1248 tree retval = gimple_return_retval (stmt);
1250 /* If we're returning something, just turn that into an
1251 assignment into the equivalent of the original RESULT_DECL.
1252 If RETVAL is just the result decl, the result decl has
1253 already been set (e.g. a recent "foo (&result_decl, ...)");
1254 just toss the entire GIMPLE_RETURN. */
1255 if (retval
1256 && (TREE_CODE (retval) != RESULT_DECL
1257 && (TREE_CODE (retval) != SSA_NAME
1258 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1260 copy = gimple_build_assign (id->retvar, retval);
1261 /* id->retvar is already substituted. Skip it on later remapping. */
1262 skip_first = true;
1264 else
1265 return gimple_build_nop ();
1267 else if (gimple_has_substatements (stmt))
1269 gimple_seq s1, s2;
1271 /* When cloning bodies from the C++ front end, we will be handed bodies
1272 in High GIMPLE form. Handle here all the High GIMPLE statements that
1273 have embedded statements. */
1274 switch (gimple_code (stmt))
1276 case GIMPLE_BIND:
1277 copy = copy_gimple_bind (stmt, id);
1278 break;
1280 case GIMPLE_CATCH:
1281 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1282 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1283 break;
1285 case GIMPLE_EH_FILTER:
1286 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1287 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1288 break;
1290 case GIMPLE_TRY:
1291 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1292 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1293 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1294 break;
1296 case GIMPLE_WITH_CLEANUP_EXPR:
1297 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1298 copy = gimple_build_wce (s1);
1299 break;
1301 case GIMPLE_OMP_PARALLEL:
1302 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1303 copy = gimple_build_omp_parallel
1304 (s1,
1305 gimple_omp_parallel_clauses (stmt),
1306 gimple_omp_parallel_child_fn (stmt),
1307 gimple_omp_parallel_data_arg (stmt));
1308 break;
1310 case GIMPLE_OMP_TASK:
1311 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1312 copy = gimple_build_omp_task
1313 (s1,
1314 gimple_omp_task_clauses (stmt),
1315 gimple_omp_task_child_fn (stmt),
1316 gimple_omp_task_data_arg (stmt),
1317 gimple_omp_task_copy_fn (stmt),
1318 gimple_omp_task_arg_size (stmt),
1319 gimple_omp_task_arg_align (stmt));
1320 break;
1322 case GIMPLE_OMP_FOR:
1323 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1324 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1325 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1326 gimple_omp_for_collapse (stmt), s2);
1328 size_t i;
1329 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1331 gimple_omp_for_set_index (copy, i,
1332 gimple_omp_for_index (stmt, i));
1333 gimple_omp_for_set_initial (copy, i,
1334 gimple_omp_for_initial (stmt, i));
1335 gimple_omp_for_set_final (copy, i,
1336 gimple_omp_for_final (stmt, i));
1337 gimple_omp_for_set_incr (copy, i,
1338 gimple_omp_for_incr (stmt, i));
1339 gimple_omp_for_set_cond (copy, i,
1340 gimple_omp_for_cond (stmt, i));
1343 break;
1345 case GIMPLE_OMP_MASTER:
1346 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1347 copy = gimple_build_omp_master (s1);
1348 break;
1350 case GIMPLE_OMP_ORDERED:
1351 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1352 copy = gimple_build_omp_ordered (s1);
1353 break;
1355 case GIMPLE_OMP_SECTION:
1356 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1357 copy = gimple_build_omp_section (s1);
1358 break;
1360 case GIMPLE_OMP_SECTIONS:
1361 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1362 copy = gimple_build_omp_sections
1363 (s1, gimple_omp_sections_clauses (stmt));
1364 break;
1366 case GIMPLE_OMP_SINGLE:
1367 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1368 copy = gimple_build_omp_single
1369 (s1, gimple_omp_single_clauses (stmt));
1370 break;
1372 case GIMPLE_OMP_CRITICAL:
1373 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1374 copy
1375 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1376 break;
1378 case GIMPLE_TRANSACTION:
1379 s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1380 copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1381 gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1382 break;
1384 default:
1385 gcc_unreachable ();
1388 else
1390 if (gimple_assign_copy_p (stmt)
1391 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1392 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1394 /* Here we handle statements that are not completely rewritten.
1395 First we detect some inlining-induced bogosities for
1396 discarding. */
1398 /* Some assignments VAR = VAR; don't generate any rtl code
1399 and thus don't count as variable modification. Avoid
1400 keeping bogosities like 0 = 0. */
1401 tree decl = gimple_assign_lhs (stmt), value;
1402 tree *n;
1404 n = (tree *) pointer_map_contains (id->decl_map, decl);
1405 if (n)
1407 value = *n;
1408 STRIP_TYPE_NOPS (value);
1409 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1410 return gimple_build_nop ();
1414 if (gimple_debug_bind_p (stmt))
1416 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1417 gimple_debug_bind_get_value (stmt),
1418 stmt);
1419 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1420 return copy;
1422 if (gimple_debug_source_bind_p (stmt))
1424 copy = gimple_build_debug_source_bind
1425 (gimple_debug_source_bind_get_var (stmt),
1426 gimple_debug_source_bind_get_value (stmt), stmt);
1427 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1428 return copy;
1431 /* Create a new deep copy of the statement. */
1432 copy = gimple_copy (stmt);
1434 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1435 RESX and EH_DISPATCH. */
1436 if (id->eh_map)
1437 switch (gimple_code (copy))
1439 case GIMPLE_CALL:
1441 tree r, fndecl = gimple_call_fndecl (copy);
1442 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1443 switch (DECL_FUNCTION_CODE (fndecl))
1445 case BUILT_IN_EH_COPY_VALUES:
1446 r = gimple_call_arg (copy, 1);
1447 r = remap_eh_region_tree_nr (r, id);
1448 gimple_call_set_arg (copy, 1, r);
1449 /* FALLTHRU */
1451 case BUILT_IN_EH_POINTER:
1452 case BUILT_IN_EH_FILTER:
1453 r = gimple_call_arg (copy, 0);
1454 r = remap_eh_region_tree_nr (r, id);
1455 gimple_call_set_arg (copy, 0, r);
1456 break;
1458 default:
1459 break;
1462 /* Reset alias info if we didn't apply measures to
1463 keep it valid over inlining by setting DECL_PT_UID. */
1464 if (!id->src_cfun->gimple_df
1465 || !id->src_cfun->gimple_df->ipa_pta)
1466 gimple_call_reset_alias_info (copy);
1468 break;
1470 case GIMPLE_RESX:
1472 int r = gimple_resx_region (copy);
1473 r = remap_eh_region_nr (r, id);
1474 gimple_resx_set_region (copy, r);
1476 break;
1478 case GIMPLE_EH_DISPATCH:
1480 int r = gimple_eh_dispatch_region (copy);
1481 r = remap_eh_region_nr (r, id);
1482 gimple_eh_dispatch_set_region (copy, r);
1484 break;
1486 default:
1487 break;
1491 /* If STMT has a block defined, map it to the newly constructed
1492 block. When inlining we want statements without a block to
1493 appear in the block of the function call. */
1494 new_block = id->block;
1495 if (gimple_block (copy))
1497 tree *n;
1498 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1499 gcc_assert (n);
1500 new_block = *n;
1503 gimple_set_block (copy, new_block);
1505 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1506 return copy;
1508 /* Remap all the operands in COPY. */
1509 memset (&wi, 0, sizeof (wi));
1510 wi.info = id;
1511 if (skip_first)
1512 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1513 else
1514 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1516 /* Clear the copied virtual operands. We are not remapping them here
1517 but are going to recreate them from scratch. */
1518 if (gimple_has_mem_ops (copy))
1520 gimple_set_vdef (copy, NULL_TREE);
1521 gimple_set_vuse (copy, NULL_TREE);
1524 return copy;
1528 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1529 later */
1531 static basic_block
1532 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1533 gcov_type count_scale)
1535 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1536 basic_block copy_basic_block;
1537 tree decl;
1538 gcov_type freq;
1539 basic_block prev;
1541 /* Search for previous copied basic block. */
1542 prev = bb->prev_bb;
1543 while (!prev->aux)
1544 prev = prev->prev_bb;
1546 /* create_basic_block() will append every new block to
1547 basic_block_info automatically. */
1548 copy_basic_block = create_basic_block (NULL, (void *) 0,
1549 (basic_block) prev->aux);
1550 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1552 /* We are going to rebuild frequencies from scratch. These values
1553 have just small importance to drive canonicalize_loop_headers. */
1554 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1556 /* We recompute frequencies after inlining, so this is quite safe. */
1557 if (freq > BB_FREQ_MAX)
1558 freq = BB_FREQ_MAX;
1559 copy_basic_block->frequency = freq;
1561 copy_gsi = gsi_start_bb (copy_basic_block);
1563 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1565 gimple stmt = gsi_stmt (gsi);
1566 gimple orig_stmt = stmt;
1568 id->regimplify = false;
1569 stmt = remap_gimple_stmt (stmt, id);
1570 if (gimple_nop_p (stmt))
1571 continue;
1573 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1574 seq_gsi = copy_gsi;
1576 /* With return slot optimization we can end up with
1577 non-gimple (foo *)&this->m, fix that here. */
1578 if (is_gimple_assign (stmt)
1579 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1580 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1582 tree new_rhs;
1583 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1584 gimple_assign_rhs1 (stmt),
1585 true, NULL, false,
1586 GSI_CONTINUE_LINKING);
1587 gimple_assign_set_rhs1 (stmt, new_rhs);
1588 id->regimplify = false;
1591 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1593 if (id->regimplify)
1594 gimple_regimplify_operands (stmt, &seq_gsi);
1596 /* If copy_basic_block has been empty at the start of this iteration,
1597 call gsi_start_bb again to get at the newly added statements. */
1598 if (gsi_end_p (copy_gsi))
1599 copy_gsi = gsi_start_bb (copy_basic_block);
1600 else
1601 gsi_next (&copy_gsi);
1603 /* Process the new statement. The call to gimple_regimplify_operands
1604 possibly turned the statement into multiple statements, we
1605 need to process all of them. */
1608 tree fn;
1610 stmt = gsi_stmt (copy_gsi);
1611 if (is_gimple_call (stmt)
1612 && gimple_call_va_arg_pack_p (stmt)
1613 && id->gimple_call)
1615 /* __builtin_va_arg_pack () should be replaced by
1616 all arguments corresponding to ... in the caller. */
1617 tree p;
1618 gimple new_call;
1619 VEC(tree, heap) *argarray;
1620 size_t nargs = gimple_call_num_args (id->gimple_call);
1621 size_t n;
1623 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1624 nargs--;
1626 /* Create the new array of arguments. */
1627 n = nargs + gimple_call_num_args (stmt);
1628 argarray = VEC_alloc (tree, heap, n);
1629 VEC_safe_grow (tree, heap, argarray, n);
1631 /* Copy all the arguments before '...' */
1632 memcpy (VEC_address (tree, argarray),
1633 gimple_call_arg_ptr (stmt, 0),
1634 gimple_call_num_args (stmt) * sizeof (tree));
1636 /* Append the arguments passed in '...' */
1637 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1638 gimple_call_arg_ptr (id->gimple_call, 0)
1639 + (gimple_call_num_args (id->gimple_call) - nargs),
1640 nargs * sizeof (tree));
1642 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1643 argarray);
1645 VEC_free (tree, heap, argarray);
1647 /* Copy all GIMPLE_CALL flags, location and block, except
1648 GF_CALL_VA_ARG_PACK. */
1649 gimple_call_copy_flags (new_call, stmt);
1650 gimple_call_set_va_arg_pack (new_call, false);
1651 gimple_set_location (new_call, gimple_location (stmt));
1652 gimple_set_block (new_call, gimple_block (stmt));
1653 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1655 gsi_replace (&copy_gsi, new_call, false);
1656 stmt = new_call;
1658 else if (is_gimple_call (stmt)
1659 && id->gimple_call
1660 && (decl = gimple_call_fndecl (stmt))
1661 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1662 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1664 /* __builtin_va_arg_pack_len () should be replaced by
1665 the number of anonymous arguments. */
1666 size_t nargs = gimple_call_num_args (id->gimple_call);
1667 tree count, p;
1668 gimple new_stmt;
1670 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1671 nargs--;
1673 count = build_int_cst (integer_type_node, nargs);
1674 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1675 gsi_replace (&copy_gsi, new_stmt, false);
1676 stmt = new_stmt;
1679 /* Statements produced by inlining can be unfolded, especially
1680 when we constant propagated some operands. We can't fold
1681 them right now for two reasons:
1682 1) folding require SSA_NAME_DEF_STMTs to be correct
1683 2) we can't change function calls to builtins.
1684 So we just mark statement for later folding. We mark
1685 all new statements, instead just statements that has changed
1686 by some nontrivial substitution so even statements made
1687 foldable indirectly are updated. If this turns out to be
1688 expensive, copy_body can be told to watch for nontrivial
1689 changes. */
1690 if (id->statements_to_fold)
1691 pointer_set_insert (id->statements_to_fold, stmt);
1693 /* We're duplicating a CALL_EXPR. Find any corresponding
1694 callgraph edges and update or duplicate them. */
1695 if (is_gimple_call (stmt))
1697 struct cgraph_edge *edge;
1698 int flags;
1700 switch (id->transform_call_graph_edges)
1702 case CB_CGE_DUPLICATE:
1703 edge = cgraph_edge (id->src_node, orig_stmt);
1704 if (edge)
1706 int edge_freq = edge->frequency;
1707 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1708 gimple_uid (stmt),
1709 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1710 true);
1711 /* We could also just rescale the frequency, but
1712 doing so would introduce roundoff errors and make
1713 verifier unhappy. */
1714 edge->frequency
1715 = compute_call_stmt_bb_frequency (id->dst_node->decl,
1716 copy_basic_block);
1717 if (dump_file
1718 && profile_status_for_function (cfun) != PROFILE_ABSENT
1719 && (edge_freq > edge->frequency + 10
1720 || edge_freq < edge->frequency - 10))
1722 fprintf (dump_file, "Edge frequency estimated by "
1723 "cgraph %i diverge from inliner's estimate %i\n",
1724 edge_freq,
1725 edge->frequency);
1726 fprintf (dump_file,
1727 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1728 bb->index,
1729 bb->frequency,
1730 copy_basic_block->frequency);
1732 stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
1734 break;
1736 case CB_CGE_MOVE_CLONES:
1737 cgraph_set_call_stmt_including_clones (id->dst_node,
1738 orig_stmt, stmt);
1739 edge = cgraph_edge (id->dst_node, stmt);
1740 break;
1742 case CB_CGE_MOVE:
1743 edge = cgraph_edge (id->dst_node, orig_stmt);
1744 if (edge)
1745 cgraph_set_call_stmt (edge, stmt);
1746 break;
1748 default:
1749 gcc_unreachable ();
1752 /* Constant propagation on argument done during inlining
1753 may create new direct call. Produce an edge for it. */
1754 if ((!edge
1755 || (edge->indirect_inlining_edge
1756 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1757 && id->dst_node->analyzed
1758 && (fn = gimple_call_fndecl (stmt)) != NULL)
1760 struct cgraph_node *dest = cgraph_get_node (fn);
1762 /* We have missing edge in the callgraph. This can happen
1763 when previous inlining turned an indirect call into a
1764 direct call by constant propagating arguments or we are
1765 producing dead clone (for further cloning). In all
1766 other cases we hit a bug (incorrect node sharing is the
1767 most common reason for missing edges). */
1768 gcc_assert (dest->needed || !dest->analyzed
1769 || dest->address_taken
1770 || !id->src_node->analyzed
1771 || !id->dst_node->analyzed);
1772 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1773 cgraph_create_edge_including_clones
1774 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1775 compute_call_stmt_bb_frequency (id->dst_node->decl,
1776 copy_basic_block),
1777 CIF_ORIGINALLY_INDIRECT_CALL);
1778 else
1779 cgraph_create_edge (id->dst_node, dest, stmt,
1780 bb->count,
1781 compute_call_stmt_bb_frequency
1782 (id->dst_node->decl, copy_basic_block))->inline_failed
1783 = CIF_ORIGINALLY_INDIRECT_CALL;
1784 if (dump_file)
1786 fprintf (dump_file, "Created new direct edge to %s\n",
1787 cgraph_node_name (dest));
1791 flags = gimple_call_flags (stmt);
1792 if (flags & ECF_MAY_BE_ALLOCA)
1793 cfun->calls_alloca = true;
1794 if (flags & ECF_RETURNS_TWICE)
1795 cfun->calls_setjmp = true;
1798 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1799 id->eh_map, id->eh_lp_nr);
1801 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1803 ssa_op_iter i;
1804 tree def;
1806 find_new_referenced_vars (gsi_stmt (copy_gsi));
1807 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1808 if (TREE_CODE (def) == SSA_NAME)
1809 SSA_NAME_DEF_STMT (def) = stmt;
1812 gsi_next (&copy_gsi);
1814 while (!gsi_end_p (copy_gsi));
1816 copy_gsi = gsi_last_bb (copy_basic_block);
1819 return copy_basic_block;
1822 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1823 form is quite easy, since dominator relationship for old basic blocks does
1824 not change.
1826 There is however exception where inlining might change dominator relation
1827 across EH edges from basic block within inlined functions destinating
1828 to landing pads in function we inline into.
1830 The function fills in PHI_RESULTs of such PHI nodes if they refer
1831 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1832 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1833 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1834 set, and this means that there will be no overlapping live ranges
1835 for the underlying symbol.
1837 This might change in future if we allow redirecting of EH edges and
1838 we might want to change way build CFG pre-inlining to include
1839 all the possible edges then. */
1840 static void
1841 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1842 bool can_throw, bool nonlocal_goto)
1844 edge e;
1845 edge_iterator ei;
1847 FOR_EACH_EDGE (e, ei, bb->succs)
1848 if (!e->dest->aux
1849 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1851 gimple phi;
1852 gimple_stmt_iterator si;
1854 if (!nonlocal_goto)
1855 gcc_assert (e->flags & EDGE_EH);
1857 if (!can_throw)
1858 gcc_assert (!(e->flags & EDGE_EH));
1860 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1862 edge re;
1864 phi = gsi_stmt (si);
1866 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1867 gcc_assert (!e->dest->aux);
1869 gcc_assert ((e->flags & EDGE_EH)
1870 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1872 if (!is_gimple_reg (PHI_RESULT (phi)))
1874 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1875 continue;
1878 re = find_edge (ret_bb, e->dest);
1879 gcc_assert (re);
1880 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1881 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1883 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1884 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1890 /* Copy edges from BB into its copy constructed earlier, scale profile
1891 accordingly. Edges will be taken care of later. Assume aux
1892 pointers to point to the copies of each BB. Return true if any
1893 debug stmts are left after a statement that must end the basic block. */
1895 static bool
1896 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1898 basic_block new_bb = (basic_block) bb->aux;
1899 edge_iterator ei;
1900 edge old_edge;
1901 gimple_stmt_iterator si;
1902 int flags;
1903 bool need_debug_cleanup = false;
1905 /* Use the indices from the original blocks to create edges for the
1906 new ones. */
1907 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1908 if (!(old_edge->flags & EDGE_EH))
1910 edge new_edge;
1912 flags = old_edge->flags;
1914 /* Return edges do get a FALLTHRU flag when the get inlined. */
1915 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1916 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1917 flags |= EDGE_FALLTHRU;
1918 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1919 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1920 new_edge->probability = old_edge->probability;
1923 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1924 return false;
1926 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1928 gimple copy_stmt;
1929 bool can_throw, nonlocal_goto;
1931 copy_stmt = gsi_stmt (si);
1932 if (!is_gimple_debug (copy_stmt))
1934 update_stmt (copy_stmt);
1935 if (gimple_in_ssa_p (cfun))
1936 mark_symbols_for_renaming (copy_stmt);
1939 /* Do this before the possible split_block. */
1940 gsi_next (&si);
1942 /* If this tree could throw an exception, there are two
1943 cases where we need to add abnormal edge(s): the
1944 tree wasn't in a region and there is a "current
1945 region" in the caller; or the original tree had
1946 EH edges. In both cases split the block after the tree,
1947 and add abnormal edge(s) as needed; we need both
1948 those from the callee and the caller.
1949 We check whether the copy can throw, because the const
1950 propagation can change an INDIRECT_REF which throws
1951 into a COMPONENT_REF which doesn't. If the copy
1952 can throw, the original could also throw. */
1953 can_throw = stmt_can_throw_internal (copy_stmt);
1954 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1956 if (can_throw || nonlocal_goto)
1958 if (!gsi_end_p (si))
1960 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1961 gsi_next (&si);
1962 if (gsi_end_p (si))
1963 need_debug_cleanup = true;
1965 if (!gsi_end_p (si))
1966 /* Note that bb's predecessor edges aren't necessarily
1967 right at this point; split_block doesn't care. */
1969 edge e = split_block (new_bb, copy_stmt);
1971 new_bb = e->dest;
1972 new_bb->aux = e->src->aux;
1973 si = gsi_start_bb (new_bb);
1977 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1978 make_eh_dispatch_edges (copy_stmt);
1979 else if (can_throw)
1980 make_eh_edges (copy_stmt);
1982 if (nonlocal_goto)
1983 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1985 if ((can_throw || nonlocal_goto)
1986 && gimple_in_ssa_p (cfun))
1987 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1988 can_throw, nonlocal_goto);
1990 return need_debug_cleanup;
1993 /* Copy the PHIs. All blocks and edges are copied, some blocks
1994 was possibly split and new outgoing EH edges inserted.
1995 BB points to the block of original function and AUX pointers links
1996 the original and newly copied blocks. */
1998 static void
1999 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2001 basic_block const new_bb = (basic_block) bb->aux;
2002 edge_iterator ei;
2003 gimple phi;
2004 gimple_stmt_iterator si;
2005 edge new_edge;
2006 bool inserted = false;
2008 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
2010 tree res, new_res;
2011 gimple new_phi;
2013 phi = gsi_stmt (si);
2014 res = PHI_RESULT (phi);
2015 new_res = res;
2016 if (is_gimple_reg (res))
2018 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2019 SSA_NAME_DEF_STMT (new_res)
2020 = new_phi = create_phi_node (new_res, new_bb);
2021 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2023 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2024 tree arg;
2025 tree new_arg;
2026 tree block = id->block;
2027 edge_iterator ei2;
2029 /* When doing partial cloning, we allow PHIs on the entry block
2030 as long as all the arguments are the same. Find any input
2031 edge to see argument to copy. */
2032 if (!old_edge)
2033 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2034 if (!old_edge->src->aux)
2035 break;
2037 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2038 new_arg = arg;
2039 id->block = NULL_TREE;
2040 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2041 id->block = block;
2042 gcc_assert (new_arg);
2043 /* With return slot optimization we can end up with
2044 non-gimple (foo *)&this->m, fix that here. */
2045 if (TREE_CODE (new_arg) != SSA_NAME
2046 && TREE_CODE (new_arg) != FUNCTION_DECL
2047 && !is_gimple_val (new_arg))
2049 gimple_seq stmts = NULL;
2050 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2051 gsi_insert_seq_on_edge (new_edge, stmts);
2052 inserted = true;
2054 add_phi_arg (new_phi, new_arg, new_edge,
2055 gimple_phi_arg_location_from_edge (phi, old_edge));
2060 /* Commit the delayed edge insertions. */
2061 if (inserted)
2062 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2063 gsi_commit_one_edge_insert (new_edge, NULL);
2067 /* Wrapper for remap_decl so it can be used as a callback. */
2069 static tree
2070 remap_decl_1 (tree decl, void *data)
2072 return remap_decl (decl, (copy_body_data *) data);
2075 /* Build struct function and associated datastructures for the new clone
2076 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
2078 static void
2079 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2081 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2082 gcov_type count_scale;
2084 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2085 count_scale = (REG_BR_PROB_BASE * count
2086 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2087 else
2088 count_scale = REG_BR_PROB_BASE;
2090 /* Register specific tree functions. */
2091 gimple_register_cfg_hooks ();
2093 /* Get clean struct function. */
2094 push_struct_function (new_fndecl);
2096 /* We will rebuild these, so just sanity check that they are empty. */
2097 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2098 gcc_assert (cfun->local_decls == NULL);
2099 gcc_assert (cfun->cfg == NULL);
2100 gcc_assert (cfun->decl == new_fndecl);
2102 /* Copy items we preserve during cloning. */
2103 cfun->static_chain_decl = src_cfun->static_chain_decl;
2104 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2105 cfun->function_end_locus = src_cfun->function_end_locus;
2106 cfun->curr_properties = src_cfun->curr_properties;
2107 cfun->last_verified = src_cfun->last_verified;
2108 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2109 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2110 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2111 cfun->stdarg = src_cfun->stdarg;
2112 cfun->after_inlining = src_cfun->after_inlining;
2113 cfun->can_throw_non_call_exceptions
2114 = src_cfun->can_throw_non_call_exceptions;
2115 cfun->returns_struct = src_cfun->returns_struct;
2116 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2117 cfun->after_tree_profile = src_cfun->after_tree_profile;
2119 init_empty_tree_cfg ();
2121 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2122 ENTRY_BLOCK_PTR->count =
2123 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2124 REG_BR_PROB_BASE);
2125 ENTRY_BLOCK_PTR->frequency
2126 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2127 EXIT_BLOCK_PTR->count =
2128 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2129 REG_BR_PROB_BASE);
2130 EXIT_BLOCK_PTR->frequency =
2131 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2132 if (src_cfun->eh)
2133 init_eh_for_function ();
2135 if (src_cfun->gimple_df)
2137 init_tree_ssa (cfun);
2138 cfun->gimple_df->in_ssa_p = true;
2139 init_ssa_operands ();
2141 pop_cfun ();
2144 /* Helper function for copy_cfg_body. Move debug stmts from the end
2145 of NEW_BB to the beginning of successor basic blocks when needed. If the
2146 successor has multiple predecessors, reset them, otherwise keep
2147 their value. */
2149 static void
2150 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2152 edge e;
2153 edge_iterator ei;
2154 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2156 if (gsi_end_p (si)
2157 || gsi_one_before_end_p (si)
2158 || !(stmt_can_throw_internal (gsi_stmt (si))
2159 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2160 return;
2162 FOR_EACH_EDGE (e, ei, new_bb->succs)
2164 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2165 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2166 while (is_gimple_debug (gsi_stmt (ssi)))
2168 gimple stmt = gsi_stmt (ssi), new_stmt;
2169 tree var;
2170 tree value;
2172 /* For the last edge move the debug stmts instead of copying
2173 them. */
2174 if (ei_one_before_end_p (ei))
2176 si = ssi;
2177 gsi_prev (&ssi);
2178 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2179 gimple_debug_bind_reset_value (stmt);
2180 gsi_remove (&si, false);
2181 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2182 continue;
2185 if (gimple_debug_bind_p (stmt))
2187 var = gimple_debug_bind_get_var (stmt);
2188 if (single_pred_p (e->dest))
2190 value = gimple_debug_bind_get_value (stmt);
2191 value = unshare_expr (value);
2193 else
2194 value = NULL_TREE;
2195 new_stmt = gimple_build_debug_bind (var, value, stmt);
2197 else if (gimple_debug_source_bind_p (stmt))
2199 var = gimple_debug_source_bind_get_var (stmt);
2200 value = gimple_debug_source_bind_get_value (stmt);
2201 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2203 else
2204 gcc_unreachable ();
2205 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2206 VEC_safe_push (gimple, heap, id->debug_stmts, new_stmt);
2207 gsi_prev (&ssi);
2212 /* Make a copy of the body of FN so that it can be inserted inline in
2213 another function. Walks FN via CFG, returns new fndecl. */
2215 static tree
2216 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2217 basic_block entry_block_map, basic_block exit_block_map,
2218 bitmap blocks_to_copy, basic_block new_entry)
2220 tree callee_fndecl = id->src_fn;
2221 /* Original cfun for the callee, doesn't change. */
2222 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2223 struct function *cfun_to_copy;
2224 basic_block bb;
2225 tree new_fndecl = NULL;
2226 bool need_debug_cleanup = false;
2227 gcov_type count_scale;
2228 int last;
2229 int incoming_frequency = 0;
2230 gcov_type incoming_count = 0;
2232 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2233 count_scale = (REG_BR_PROB_BASE * count
2234 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2235 else
2236 count_scale = REG_BR_PROB_BASE;
2238 /* Register specific tree functions. */
2239 gimple_register_cfg_hooks ();
2241 /* If we are inlining just region of the function, make sure to connect new entry
2242 to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute
2243 frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
2244 probabilities of edges incoming from nonduplicated region. */
2245 if (new_entry)
2247 edge e;
2248 edge_iterator ei;
2250 FOR_EACH_EDGE (e, ei, new_entry->preds)
2251 if (!e->src->aux)
2253 incoming_frequency += EDGE_FREQUENCY (e);
2254 incoming_count += e->count;
2256 incoming_count = incoming_count * count_scale / REG_BR_PROB_BASE;
2257 incoming_frequency
2258 = incoming_frequency * frequency_scale / REG_BR_PROB_BASE;
2259 ENTRY_BLOCK_PTR->count = incoming_count;
2260 ENTRY_BLOCK_PTR->frequency = incoming_frequency;
2263 /* Must have a CFG here at this point. */
2264 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2265 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2267 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2269 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2270 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2271 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2272 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2274 /* Duplicate any exception-handling regions. */
2275 if (cfun->eh)
2276 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2277 remap_decl_1, id);
2279 /* Use aux pointers to map the original blocks to copy. */
2280 FOR_EACH_BB_FN (bb, cfun_to_copy)
2281 if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index))
2283 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2284 bb->aux = new_bb;
2285 new_bb->aux = bb;
2288 last = last_basic_block;
2290 /* Now that we've duplicated the blocks, duplicate their edges. */
2291 FOR_ALL_BB_FN (bb, cfun_to_copy)
2292 if (!blocks_to_copy
2293 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2294 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map);
2296 if (new_entry)
2298 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2299 e->probability = REG_BR_PROB_BASE;
2300 e->count = incoming_count;
2303 if (gimple_in_ssa_p (cfun))
2304 FOR_ALL_BB_FN (bb, cfun_to_copy)
2305 if (!blocks_to_copy
2306 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2307 copy_phis_for_bb (bb, id);
2309 FOR_ALL_BB_FN (bb, cfun_to_copy)
2310 if (bb->aux)
2312 if (need_debug_cleanup
2313 && bb->index != ENTRY_BLOCK
2314 && bb->index != EXIT_BLOCK)
2315 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2316 ((basic_block)bb->aux)->aux = NULL;
2317 bb->aux = NULL;
2320 /* Zero out AUX fields of newly created block during EH edge
2321 insertion. */
2322 for (; last < last_basic_block; last++)
2324 if (need_debug_cleanup)
2325 maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2326 BASIC_BLOCK (last)->aux = NULL;
2328 entry_block_map->aux = NULL;
2329 exit_block_map->aux = NULL;
2331 if (id->eh_map)
2333 pointer_map_destroy (id->eh_map);
2334 id->eh_map = NULL;
2337 return new_fndecl;
2340 /* Copy the debug STMT using ID. We deal with these statements in a
2341 special way: if any variable in their VALUE expression wasn't
2342 remapped yet, we won't remap it, because that would get decl uids
2343 out of sync, causing codegen differences between -g and -g0. If
2344 this arises, we drop the VALUE expression altogether. */
2346 static void
2347 copy_debug_stmt (gimple stmt, copy_body_data *id)
2349 tree t, *n;
2350 struct walk_stmt_info wi;
2352 t = id->block;
2353 if (gimple_block (stmt))
2355 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2356 if (n)
2357 t = *n;
2359 gimple_set_block (stmt, t);
2361 /* Remap all the operands in COPY. */
2362 memset (&wi, 0, sizeof (wi));
2363 wi.info = id;
2365 processing_debug_stmt = 1;
2367 if (gimple_debug_source_bind_p (stmt))
2368 t = gimple_debug_source_bind_get_var (stmt);
2369 else
2370 t = gimple_debug_bind_get_var (stmt);
2372 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2373 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2375 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2376 t = *n;
2378 else if (TREE_CODE (t) == VAR_DECL
2379 && !TREE_STATIC (t)
2380 && gimple_in_ssa_p (cfun)
2381 && !pointer_map_contains (id->decl_map, t)
2382 && !var_ann (t))
2383 /* T is a non-localized variable. */;
2384 else
2385 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2387 if (gimple_debug_bind_p (stmt))
2389 gimple_debug_bind_set_var (stmt, t);
2391 if (gimple_debug_bind_has_value_p (stmt))
2392 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2393 remap_gimple_op_r, &wi, NULL);
2395 /* Punt if any decl couldn't be remapped. */
2396 if (processing_debug_stmt < 0)
2397 gimple_debug_bind_reset_value (stmt);
2399 else if (gimple_debug_source_bind_p (stmt))
2401 gimple_debug_source_bind_set_var (stmt, t);
2402 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2403 remap_gimple_op_r, &wi, NULL);
2406 processing_debug_stmt = 0;
2408 update_stmt (stmt);
2409 if (gimple_in_ssa_p (cfun))
2410 mark_symbols_for_renaming (stmt);
2413 /* Process deferred debug stmts. In order to give values better odds
2414 of being successfully remapped, we delay the processing of debug
2415 stmts until all other stmts that might require remapping are
2416 processed. */
2418 static void
2419 copy_debug_stmts (copy_body_data *id)
2421 size_t i;
2422 gimple stmt;
2424 if (!id->debug_stmts)
2425 return;
2427 FOR_EACH_VEC_ELT (gimple, id->debug_stmts, i, stmt)
2428 copy_debug_stmt (stmt, id);
2430 VEC_free (gimple, heap, id->debug_stmts);
2433 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2434 another function. */
2436 static tree
2437 copy_tree_body (copy_body_data *id)
2439 tree fndecl = id->src_fn;
2440 tree body = DECL_SAVED_TREE (fndecl);
2442 walk_tree (&body, copy_tree_body_r, id, NULL);
2444 return body;
2447 /* Make a copy of the body of FN so that it can be inserted inline in
2448 another function. */
2450 static tree
2451 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2452 basic_block entry_block_map, basic_block exit_block_map,
2453 bitmap blocks_to_copy, basic_block new_entry)
2455 tree fndecl = id->src_fn;
2456 tree body;
2458 /* If this body has a CFG, walk CFG and copy. */
2459 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2460 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2461 blocks_to_copy, new_entry);
2462 copy_debug_stmts (id);
2464 return body;
2467 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2468 defined in function FN, or of a data member thereof. */
2470 static bool
2471 self_inlining_addr_expr (tree value, tree fn)
2473 tree var;
2475 if (TREE_CODE (value) != ADDR_EXPR)
2476 return false;
2478 var = get_base_address (TREE_OPERAND (value, 0));
2480 return var && auto_var_in_fn_p (var, fn);
2483 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2484 lexical block and line number information from base_stmt, if given,
2485 or from the last stmt of the block otherwise. */
2487 static gimple
2488 insert_init_debug_bind (copy_body_data *id,
2489 basic_block bb, tree var, tree value,
2490 gimple base_stmt)
2492 gimple note;
2493 gimple_stmt_iterator gsi;
2494 tree tracked_var;
2496 if (!gimple_in_ssa_p (id->src_cfun))
2497 return NULL;
2499 if (!MAY_HAVE_DEBUG_STMTS)
2500 return NULL;
2502 tracked_var = target_for_debug_bind (var);
2503 if (!tracked_var)
2504 return NULL;
2506 if (bb)
2508 gsi = gsi_last_bb (bb);
2509 if (!base_stmt && !gsi_end_p (gsi))
2510 base_stmt = gsi_stmt (gsi);
2513 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2515 if (bb)
2517 if (!gsi_end_p (gsi))
2518 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2519 else
2520 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2523 return note;
2526 static void
2527 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2529 /* If VAR represents a zero-sized variable, it's possible that the
2530 assignment statement may result in no gimple statements. */
2531 if (init_stmt)
2533 gimple_stmt_iterator si = gsi_last_bb (bb);
2535 /* We can end up with init statements that store to a non-register
2536 from a rhs with a conversion. Handle that here by forcing the
2537 rhs into a temporary. gimple_regimplify_operands is not
2538 prepared to do this for us. */
2539 if (!is_gimple_debug (init_stmt)
2540 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2541 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2542 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2544 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2545 gimple_expr_type (init_stmt),
2546 gimple_assign_rhs1 (init_stmt));
2547 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2548 GSI_NEW_STMT);
2549 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2550 gimple_assign_set_rhs1 (init_stmt, rhs);
2552 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2553 gimple_regimplify_operands (init_stmt, &si);
2554 mark_symbols_for_renaming (init_stmt);
2556 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2558 tree var, def = gimple_assign_lhs (init_stmt);
2560 if (TREE_CODE (def) == SSA_NAME)
2561 var = SSA_NAME_VAR (def);
2562 else
2563 var = def;
2565 insert_init_debug_bind (id, bb, var, def, init_stmt);
2570 /* Initialize parameter P with VALUE. If needed, produce init statement
2571 at the end of BB. When BB is NULL, we return init statement to be
2572 output later. */
2573 static gimple
2574 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2575 basic_block bb, tree *vars)
2577 gimple init_stmt = NULL;
2578 tree var;
2579 tree rhs = value;
2580 tree def = (gimple_in_ssa_p (cfun)
2581 ? gimple_default_def (id->src_cfun, p) : NULL);
2583 if (value
2584 && value != error_mark_node
2585 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2587 /* If we can match up types by promotion/demotion do so. */
2588 if (fold_convertible_p (TREE_TYPE (p), value))
2589 rhs = fold_convert (TREE_TYPE (p), value);
2590 else
2592 /* ??? For valid programs we should not end up here.
2593 Still if we end up with truly mismatched types here, fall back
2594 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2595 GIMPLE to the following passes. */
2596 if (!is_gimple_reg_type (TREE_TYPE (value))
2597 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2598 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2599 else
2600 rhs = build_zero_cst (TREE_TYPE (p));
2604 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2605 here since the type of this decl must be visible to the calling
2606 function. */
2607 var = copy_decl_to_var (p, id);
2609 /* We're actually using the newly-created var. */
2610 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2611 add_referenced_var (var);
2613 /* Declare this new variable. */
2614 DECL_CHAIN (var) = *vars;
2615 *vars = var;
2617 /* Make gimplifier happy about this variable. */
2618 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2620 /* We are eventually using the value - make sure all variables
2621 referenced therein are properly recorded. */
2622 if (value
2623 && gimple_in_ssa_p (cfun)
2624 && TREE_CODE (value) == ADDR_EXPR)
2626 tree base = get_base_address (TREE_OPERAND (value, 0));
2627 if (base && TREE_CODE (base) == VAR_DECL)
2628 add_referenced_var (base);
2631 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2632 we would not need to create a new variable here at all, if it
2633 weren't for debug info. Still, we can just use the argument
2634 value. */
2635 if (TREE_READONLY (p)
2636 && !TREE_ADDRESSABLE (p)
2637 && value && !TREE_SIDE_EFFECTS (value)
2638 && !def)
2640 /* We may produce non-gimple trees by adding NOPs or introduce
2641 invalid sharing when operand is not really constant.
2642 It is not big deal to prohibit constant propagation here as
2643 we will constant propagate in DOM1 pass anyway. */
2644 if (is_gimple_min_invariant (value)
2645 && useless_type_conversion_p (TREE_TYPE (p),
2646 TREE_TYPE (value))
2647 /* We have to be very careful about ADDR_EXPR. Make sure
2648 the base variable isn't a local variable of the inlined
2649 function, e.g., when doing recursive inlining, direct or
2650 mutually-recursive or whatever, which is why we don't
2651 just test whether fn == current_function_decl. */
2652 && ! self_inlining_addr_expr (value, fn))
2654 insert_decl_map (id, p, value);
2655 insert_debug_decl_map (id, p, var);
2656 return insert_init_debug_bind (id, bb, var, value, NULL);
2660 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2661 that way, when the PARM_DECL is encountered, it will be
2662 automatically replaced by the VAR_DECL. */
2663 insert_decl_map (id, p, var);
2665 /* Even if P was TREE_READONLY, the new VAR should not be.
2666 In the original code, we would have constructed a
2667 temporary, and then the function body would have never
2668 changed the value of P. However, now, we will be
2669 constructing VAR directly. The constructor body may
2670 change its value multiple times as it is being
2671 constructed. Therefore, it must not be TREE_READONLY;
2672 the back-end assumes that TREE_READONLY variable is
2673 assigned to only once. */
2674 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2675 TREE_READONLY (var) = 0;
2677 /* If there is no setup required and we are in SSA, take the easy route
2678 replacing all SSA names representing the function parameter by the
2679 SSA name passed to function.
2681 We need to construct map for the variable anyway as it might be used
2682 in different SSA names when parameter is set in function.
2684 Do replacement at -O0 for const arguments replaced by constant.
2685 This is important for builtin_constant_p and other construct requiring
2686 constant argument to be visible in inlined function body. */
2687 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2688 && (optimize
2689 || (TREE_READONLY (p)
2690 && is_gimple_min_invariant (rhs)))
2691 && (TREE_CODE (rhs) == SSA_NAME
2692 || is_gimple_min_invariant (rhs))
2693 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2695 insert_decl_map (id, def, rhs);
2696 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2699 /* If the value of argument is never used, don't care about initializing
2700 it. */
2701 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2703 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2704 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2707 /* Initialize this VAR_DECL from the equivalent argument. Convert
2708 the argument to the proper type in case it was promoted. */
2709 if (value)
2711 if (rhs == error_mark_node)
2713 insert_decl_map (id, p, var);
2714 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2717 STRIP_USELESS_TYPE_CONVERSION (rhs);
2719 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2720 keep our trees in gimple form. */
2721 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2723 def = remap_ssa_name (def, id);
2724 init_stmt = gimple_build_assign (def, rhs);
2725 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2726 set_default_def (var, NULL);
2728 else
2729 init_stmt = gimple_build_assign (var, rhs);
2731 if (bb && init_stmt)
2732 insert_init_stmt (id, bb, init_stmt);
2734 return init_stmt;
2737 /* Generate code to initialize the parameters of the function at the
2738 top of the stack in ID from the GIMPLE_CALL STMT. */
2740 static void
2741 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2742 tree fn, basic_block bb)
2744 tree parms;
2745 size_t i;
2746 tree p;
2747 tree vars = NULL_TREE;
2748 tree static_chain = gimple_call_chain (stmt);
2750 /* Figure out what the parameters are. */
2751 parms = DECL_ARGUMENTS (fn);
2753 /* Loop through the parameter declarations, replacing each with an
2754 equivalent VAR_DECL, appropriately initialized. */
2755 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2757 tree val;
2758 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2759 setup_one_parameter (id, p, val, fn, bb, &vars);
2761 /* After remapping parameters remap their types. This has to be done
2762 in a second loop over all parameters to appropriately remap
2763 variable sized arrays when the size is specified in a
2764 parameter following the array. */
2765 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2767 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
2768 if (varp
2769 && TREE_CODE (*varp) == VAR_DECL)
2771 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
2772 ? gimple_default_def (id->src_cfun, p) : NULL);
2773 tree var = *varp;
2774 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
2775 /* Also remap the default definition if it was remapped
2776 to the default definition of the parameter replacement
2777 by the parameter setup. */
2778 if (def)
2780 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
2781 if (defp
2782 && TREE_CODE (*defp) == SSA_NAME
2783 && SSA_NAME_VAR (*defp) == var)
2784 TREE_TYPE (*defp) = TREE_TYPE (var);
2789 /* Initialize the static chain. */
2790 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2791 gcc_assert (fn != current_function_decl);
2792 if (p)
2794 /* No static chain? Seems like a bug in tree-nested.c. */
2795 gcc_assert (static_chain);
2797 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2800 declare_inline_vars (id->block, vars);
2804 /* Declare a return variable to replace the RESULT_DECL for the
2805 function we are calling. An appropriate DECL_STMT is returned.
2806 The USE_STMT is filled to contain a use of the declaration to
2807 indicate the return value of the function.
2809 RETURN_SLOT, if non-null is place where to store the result. It
2810 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2811 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2813 The return value is a (possibly null) value that holds the result
2814 as seen by the caller. */
2816 static tree
2817 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2818 basic_block entry_bb)
2820 tree callee = id->src_fn;
2821 tree result = DECL_RESULT (callee);
2822 tree callee_type = TREE_TYPE (result);
2823 tree caller_type;
2824 tree var, use;
2826 /* Handle type-mismatches in the function declaration return type
2827 vs. the call expression. */
2828 if (modify_dest)
2829 caller_type = TREE_TYPE (modify_dest);
2830 else
2831 caller_type = TREE_TYPE (TREE_TYPE (callee));
2833 /* We don't need to do anything for functions that don't return anything. */
2834 if (VOID_TYPE_P (callee_type))
2835 return NULL_TREE;
2837 /* If there was a return slot, then the return value is the
2838 dereferenced address of that object. */
2839 if (return_slot)
2841 /* The front end shouldn't have used both return_slot and
2842 a modify expression. */
2843 gcc_assert (!modify_dest);
2844 if (DECL_BY_REFERENCE (result))
2846 tree return_slot_addr = build_fold_addr_expr (return_slot);
2847 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2849 /* We are going to construct *&return_slot and we can't do that
2850 for variables believed to be not addressable.
2852 FIXME: This check possibly can match, because values returned
2853 via return slot optimization are not believed to have address
2854 taken by alias analysis. */
2855 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2856 var = return_slot_addr;
2858 else
2860 var = return_slot;
2861 gcc_assert (TREE_CODE (var) != SSA_NAME);
2862 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2864 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2865 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2866 && !DECL_GIMPLE_REG_P (result)
2867 && DECL_P (var))
2868 DECL_GIMPLE_REG_P (var) = 0;
2869 use = NULL;
2870 goto done;
2873 /* All types requiring non-trivial constructors should have been handled. */
2874 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2876 /* Attempt to avoid creating a new temporary variable. */
2877 if (modify_dest
2878 && TREE_CODE (modify_dest) != SSA_NAME)
2880 bool use_it = false;
2882 /* We can't use MODIFY_DEST if there's type promotion involved. */
2883 if (!useless_type_conversion_p (callee_type, caller_type))
2884 use_it = false;
2886 /* ??? If we're assigning to a variable sized type, then we must
2887 reuse the destination variable, because we've no good way to
2888 create variable sized temporaries at this point. */
2889 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2890 use_it = true;
2892 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2893 reuse it as the result of the call directly. Don't do this if
2894 it would promote MODIFY_DEST to addressable. */
2895 else if (TREE_ADDRESSABLE (result))
2896 use_it = false;
2897 else
2899 tree base_m = get_base_address (modify_dest);
2901 /* If the base isn't a decl, then it's a pointer, and we don't
2902 know where that's going to go. */
2903 if (!DECL_P (base_m))
2904 use_it = false;
2905 else if (is_global_var (base_m))
2906 use_it = false;
2907 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2908 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2909 && !DECL_GIMPLE_REG_P (result)
2910 && DECL_GIMPLE_REG_P (base_m))
2911 use_it = false;
2912 else if (!TREE_ADDRESSABLE (base_m))
2913 use_it = true;
2916 if (use_it)
2918 var = modify_dest;
2919 use = NULL;
2920 goto done;
2924 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2926 var = copy_result_decl_to_var (result, id);
2927 if (gimple_in_ssa_p (cfun))
2928 add_referenced_var (var);
2930 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2932 /* Do not have the rest of GCC warn about this variable as it should
2933 not be visible to the user. */
2934 TREE_NO_WARNING (var) = 1;
2936 declare_inline_vars (id->block, var);
2938 /* Build the use expr. If the return type of the function was
2939 promoted, convert it back to the expected type. */
2940 use = var;
2941 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2943 /* If we can match up types by promotion/demotion do so. */
2944 if (fold_convertible_p (caller_type, var))
2945 use = fold_convert (caller_type, var);
2946 else
2948 /* ??? For valid programs we should not end up here.
2949 Still if we end up with truly mismatched types here, fall back
2950 to using a MEM_REF to not leak invalid GIMPLE to the following
2951 passes. */
2952 /* Prevent var from being written into SSA form. */
2953 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
2954 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
2955 DECL_GIMPLE_REG_P (var) = false;
2956 else if (is_gimple_reg_type (TREE_TYPE (var)))
2957 TREE_ADDRESSABLE (var) = true;
2958 use = fold_build2 (MEM_REF, caller_type,
2959 build_fold_addr_expr (var),
2960 build_int_cst (ptr_type_node, 0));
2964 STRIP_USELESS_TYPE_CONVERSION (use);
2966 if (DECL_BY_REFERENCE (result))
2968 TREE_ADDRESSABLE (var) = 1;
2969 var = build_fold_addr_expr (var);
2972 done:
2973 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2974 way, when the RESULT_DECL is encountered, it will be
2975 automatically replaced by the VAR_DECL.
2977 When returning by reference, ensure that RESULT_DECL remaps to
2978 gimple_val. */
2979 if (DECL_BY_REFERENCE (result)
2980 && !is_gimple_val (var))
2982 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
2983 if (gimple_in_ssa_p (id->src_cfun))
2984 add_referenced_var (temp);
2985 insert_decl_map (id, result, temp);
2986 /* When RESULT_DECL is in SSA form, we need to remap and initialize
2987 it's default_def SSA_NAME. */
2988 if (gimple_in_ssa_p (id->src_cfun)
2989 && is_gimple_reg (result))
2991 temp = make_ssa_name (temp, NULL);
2992 insert_decl_map (id, gimple_default_def (id->src_cfun, result),
2993 temp);
2995 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
2997 else
2998 insert_decl_map (id, result, var);
3000 /* Remember this so we can ignore it in remap_decls. */
3001 id->retvar = var;
3003 return use;
3006 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3007 to a local label. */
3009 static tree
3010 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3012 tree node = *nodep;
3013 tree fn = (tree) fnp;
3015 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3016 return node;
3018 if (TYPE_P (node))
3019 *walk_subtrees = 0;
3021 return NULL_TREE;
3024 /* Determine if the function can be copied. If so return NULL. If
3025 not return a string describng the reason for failure. */
3027 static const char *
3028 copy_forbidden (struct function *fun, tree fndecl)
3030 const char *reason = fun->cannot_be_copied_reason;
3031 tree decl;
3032 unsigned ix;
3034 /* Only examine the function once. */
3035 if (fun->cannot_be_copied_set)
3036 return reason;
3038 /* We cannot copy a function that receives a non-local goto
3039 because we cannot remap the destination label used in the
3040 function that is performing the non-local goto. */
3041 /* ??? Actually, this should be possible, if we work at it.
3042 No doubt there's just a handful of places that simply
3043 assume it doesn't happen and don't substitute properly. */
3044 if (fun->has_nonlocal_label)
3046 reason = G_("function %q+F can never be copied "
3047 "because it receives a non-local goto");
3048 goto fail;
3051 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3052 if (TREE_CODE (decl) == VAR_DECL
3053 && TREE_STATIC (decl)
3054 && !DECL_EXTERNAL (decl)
3055 && DECL_INITIAL (decl)
3056 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3057 has_label_address_in_static_1,
3058 fndecl))
3060 reason = G_("function %q+F can never be copied because it saves "
3061 "address of local label in a static variable");
3062 goto fail;
3065 fail:
3066 fun->cannot_be_copied_reason = reason;
3067 fun->cannot_be_copied_set = true;
3068 return reason;
3072 static const char *inline_forbidden_reason;
3074 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3075 iff a function can not be inlined. Also sets the reason why. */
3077 static tree
3078 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3079 struct walk_stmt_info *wip)
3081 tree fn = (tree) wip->info;
3082 tree t;
3083 gimple stmt = gsi_stmt (*gsi);
3085 switch (gimple_code (stmt))
3087 case GIMPLE_CALL:
3088 /* Refuse to inline alloca call unless user explicitly forced so as
3089 this may change program's memory overhead drastically when the
3090 function using alloca is called in loop. In GCC present in
3091 SPEC2000 inlining into schedule_block cause it to require 2GB of
3092 RAM instead of 256MB. Don't do so for alloca calls emitted for
3093 VLA objects as those can't cause unbounded growth (they're always
3094 wrapped inside stack_save/stack_restore regions. */
3095 if (gimple_alloca_call_p (stmt)
3096 && !gimple_call_alloca_for_var_p (stmt)
3097 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3099 inline_forbidden_reason
3100 = G_("function %q+F can never be inlined because it uses "
3101 "alloca (override using the always_inline attribute)");
3102 *handled_ops_p = true;
3103 return fn;
3106 t = gimple_call_fndecl (stmt);
3107 if (t == NULL_TREE)
3108 break;
3110 /* We cannot inline functions that call setjmp. */
3111 if (setjmp_call_p (t))
3113 inline_forbidden_reason
3114 = G_("function %q+F can never be inlined because it uses setjmp");
3115 *handled_ops_p = true;
3116 return t;
3119 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3120 switch (DECL_FUNCTION_CODE (t))
3122 /* We cannot inline functions that take a variable number of
3123 arguments. */
3124 case BUILT_IN_VA_START:
3125 case BUILT_IN_NEXT_ARG:
3126 case BUILT_IN_VA_END:
3127 inline_forbidden_reason
3128 = G_("function %q+F can never be inlined because it "
3129 "uses variable argument lists");
3130 *handled_ops_p = true;
3131 return t;
3133 case BUILT_IN_LONGJMP:
3134 /* We can't inline functions that call __builtin_longjmp at
3135 all. The non-local goto machinery really requires the
3136 destination be in a different function. If we allow the
3137 function calling __builtin_longjmp to be inlined into the
3138 function calling __builtin_setjmp, Things will Go Awry. */
3139 inline_forbidden_reason
3140 = G_("function %q+F can never be inlined because "
3141 "it uses setjmp-longjmp exception handling");
3142 *handled_ops_p = true;
3143 return t;
3145 case BUILT_IN_NONLOCAL_GOTO:
3146 /* Similarly. */
3147 inline_forbidden_reason
3148 = G_("function %q+F can never be inlined because "
3149 "it uses non-local goto");
3150 *handled_ops_p = true;
3151 return t;
3153 case BUILT_IN_RETURN:
3154 case BUILT_IN_APPLY_ARGS:
3155 /* If a __builtin_apply_args caller would be inlined,
3156 it would be saving arguments of the function it has
3157 been inlined into. Similarly __builtin_return would
3158 return from the function the inline has been inlined into. */
3159 inline_forbidden_reason
3160 = G_("function %q+F can never be inlined because "
3161 "it uses __builtin_return or __builtin_apply_args");
3162 *handled_ops_p = true;
3163 return t;
3165 default:
3166 break;
3168 break;
3170 case GIMPLE_GOTO:
3171 t = gimple_goto_dest (stmt);
3173 /* We will not inline a function which uses computed goto. The
3174 addresses of its local labels, which may be tucked into
3175 global storage, are of course not constant across
3176 instantiations, which causes unexpected behavior. */
3177 if (TREE_CODE (t) != LABEL_DECL)
3179 inline_forbidden_reason
3180 = G_("function %q+F can never be inlined "
3181 "because it contains a computed goto");
3182 *handled_ops_p = true;
3183 return t;
3185 break;
3187 default:
3188 break;
3191 *handled_ops_p = false;
3192 return NULL_TREE;
3195 /* Return true if FNDECL is a function that cannot be inlined into
3196 another one. */
3198 static bool
3199 inline_forbidden_p (tree fndecl)
3201 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3202 struct walk_stmt_info wi;
3203 struct pointer_set_t *visited_nodes;
3204 basic_block bb;
3205 bool forbidden_p = false;
3207 /* First check for shared reasons not to copy the code. */
3208 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3209 if (inline_forbidden_reason != NULL)
3210 return true;
3212 /* Next, walk the statements of the function looking for
3213 constraucts we can't handle, or are non-optimal for inlining. */
3214 visited_nodes = pointer_set_create ();
3215 memset (&wi, 0, sizeof (wi));
3216 wi.info = (void *) fndecl;
3217 wi.pset = visited_nodes;
3219 FOR_EACH_BB_FN (bb, fun)
3221 gimple ret;
3222 gimple_seq seq = bb_seq (bb);
3223 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3224 forbidden_p = (ret != NULL);
3225 if (forbidden_p)
3226 break;
3229 pointer_set_destroy (visited_nodes);
3230 return forbidden_p;
3233 /* Returns nonzero if FN is a function that does not have any
3234 fundamental inline blocking properties. */
3236 bool
3237 tree_inlinable_function_p (tree fn)
3239 bool inlinable = true;
3240 bool do_warning;
3241 tree always_inline;
3243 /* If we've already decided this function shouldn't be inlined,
3244 there's no need to check again. */
3245 if (DECL_UNINLINABLE (fn))
3246 return false;
3248 /* We only warn for functions declared `inline' by the user. */
3249 do_warning = (warn_inline
3250 && DECL_DECLARED_INLINE_P (fn)
3251 && !DECL_NO_INLINE_WARNING_P (fn)
3252 && !DECL_IN_SYSTEM_HEADER (fn));
3254 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3256 if (flag_no_inline
3257 && always_inline == NULL)
3259 if (do_warning)
3260 warning (OPT_Winline, "function %q+F can never be inlined because it "
3261 "is suppressed using -fno-inline", fn);
3262 inlinable = false;
3265 else if (!function_attribute_inlinable_p (fn))
3267 if (do_warning)
3268 warning (OPT_Winline, "function %q+F can never be inlined because it "
3269 "uses attributes conflicting with inlining", fn);
3270 inlinable = false;
3273 else if (inline_forbidden_p (fn))
3275 /* See if we should warn about uninlinable functions. Previously,
3276 some of these warnings would be issued while trying to expand
3277 the function inline, but that would cause multiple warnings
3278 about functions that would for example call alloca. But since
3279 this a property of the function, just one warning is enough.
3280 As a bonus we can now give more details about the reason why a
3281 function is not inlinable. */
3282 if (always_inline)
3283 error (inline_forbidden_reason, fn);
3284 else if (do_warning)
3285 warning (OPT_Winline, inline_forbidden_reason, fn);
3287 inlinable = false;
3290 /* Squirrel away the result so that we don't have to check again. */
3291 DECL_UNINLINABLE (fn) = !inlinable;
3293 return inlinable;
3296 /* Estimate the cost of a memory move. Use machine dependent
3297 word size and take possible memcpy call into account. */
3300 estimate_move_cost (tree type)
3302 HOST_WIDE_INT size;
3304 gcc_assert (!VOID_TYPE_P (type));
3306 if (TREE_CODE (type) == VECTOR_TYPE)
3308 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3309 enum machine_mode simd
3310 = targetm.vectorize.preferred_simd_mode (inner);
3311 int simd_mode_size = GET_MODE_SIZE (simd);
3312 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3313 / simd_mode_size);
3316 size = int_size_in_bytes (type);
3318 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3319 /* Cost of a memcpy call, 3 arguments and the call. */
3320 return 4;
3321 else
3322 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3325 /* Returns cost of operation CODE, according to WEIGHTS */
3327 static int
3328 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3329 tree op1 ATTRIBUTE_UNUSED, tree op2)
3331 switch (code)
3333 /* These are "free" conversions, or their presumed cost
3334 is folded into other operations. */
3335 case RANGE_EXPR:
3336 CASE_CONVERT:
3337 case COMPLEX_EXPR:
3338 case PAREN_EXPR:
3339 case VIEW_CONVERT_EXPR:
3340 return 0;
3342 /* Assign cost of 1 to usual operations.
3343 ??? We may consider mapping RTL costs to this. */
3344 case COND_EXPR:
3345 case VEC_COND_EXPR:
3346 case VEC_PERM_EXPR:
3348 case PLUS_EXPR:
3349 case POINTER_PLUS_EXPR:
3350 case MINUS_EXPR:
3351 case MULT_EXPR:
3352 case FMA_EXPR:
3354 case ADDR_SPACE_CONVERT_EXPR:
3355 case FIXED_CONVERT_EXPR:
3356 case FIX_TRUNC_EXPR:
3358 case NEGATE_EXPR:
3359 case FLOAT_EXPR:
3360 case MIN_EXPR:
3361 case MAX_EXPR:
3362 case ABS_EXPR:
3364 case LSHIFT_EXPR:
3365 case RSHIFT_EXPR:
3366 case LROTATE_EXPR:
3367 case RROTATE_EXPR:
3368 case VEC_LSHIFT_EXPR:
3369 case VEC_RSHIFT_EXPR:
3371 case BIT_IOR_EXPR:
3372 case BIT_XOR_EXPR:
3373 case BIT_AND_EXPR:
3374 case BIT_NOT_EXPR:
3376 case TRUTH_ANDIF_EXPR:
3377 case TRUTH_ORIF_EXPR:
3378 case TRUTH_AND_EXPR:
3379 case TRUTH_OR_EXPR:
3380 case TRUTH_XOR_EXPR:
3381 case TRUTH_NOT_EXPR:
3383 case LT_EXPR:
3384 case LE_EXPR:
3385 case GT_EXPR:
3386 case GE_EXPR:
3387 case EQ_EXPR:
3388 case NE_EXPR:
3389 case ORDERED_EXPR:
3390 case UNORDERED_EXPR:
3392 case UNLT_EXPR:
3393 case UNLE_EXPR:
3394 case UNGT_EXPR:
3395 case UNGE_EXPR:
3396 case UNEQ_EXPR:
3397 case LTGT_EXPR:
3399 case CONJ_EXPR:
3401 case PREDECREMENT_EXPR:
3402 case PREINCREMENT_EXPR:
3403 case POSTDECREMENT_EXPR:
3404 case POSTINCREMENT_EXPR:
3406 case REALIGN_LOAD_EXPR:
3408 case REDUC_MAX_EXPR:
3409 case REDUC_MIN_EXPR:
3410 case REDUC_PLUS_EXPR:
3411 case WIDEN_SUM_EXPR:
3412 case WIDEN_MULT_EXPR:
3413 case DOT_PROD_EXPR:
3414 case WIDEN_MULT_PLUS_EXPR:
3415 case WIDEN_MULT_MINUS_EXPR:
3416 case WIDEN_LSHIFT_EXPR:
3418 case VEC_WIDEN_MULT_HI_EXPR:
3419 case VEC_WIDEN_MULT_LO_EXPR:
3420 case VEC_UNPACK_HI_EXPR:
3421 case VEC_UNPACK_LO_EXPR:
3422 case VEC_UNPACK_FLOAT_HI_EXPR:
3423 case VEC_UNPACK_FLOAT_LO_EXPR:
3424 case VEC_PACK_TRUNC_EXPR:
3425 case VEC_PACK_SAT_EXPR:
3426 case VEC_PACK_FIX_TRUNC_EXPR:
3427 case VEC_WIDEN_LSHIFT_HI_EXPR:
3428 case VEC_WIDEN_LSHIFT_LO_EXPR:
3430 return 1;
3432 /* Few special cases of expensive operations. This is useful
3433 to avoid inlining on functions having too many of these. */
3434 case TRUNC_DIV_EXPR:
3435 case CEIL_DIV_EXPR:
3436 case FLOOR_DIV_EXPR:
3437 case ROUND_DIV_EXPR:
3438 case EXACT_DIV_EXPR:
3439 case TRUNC_MOD_EXPR:
3440 case CEIL_MOD_EXPR:
3441 case FLOOR_MOD_EXPR:
3442 case ROUND_MOD_EXPR:
3443 case RDIV_EXPR:
3444 if (TREE_CODE (op2) != INTEGER_CST)
3445 return weights->div_mod_cost;
3446 return 1;
3448 default:
3449 /* We expect a copy assignment with no operator. */
3450 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3451 return 0;
3456 /* Estimate number of instructions that will be created by expanding
3457 the statements in the statement sequence STMTS.
3458 WEIGHTS contains weights attributed to various constructs. */
3460 static
3461 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3463 int cost;
3464 gimple_stmt_iterator gsi;
3466 cost = 0;
3467 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3468 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3470 return cost;
3474 /* Estimate number of instructions that will be created by expanding STMT.
3475 WEIGHTS contains weights attributed to various constructs. */
3478 estimate_num_insns (gimple stmt, eni_weights *weights)
3480 unsigned cost, i;
3481 enum gimple_code code = gimple_code (stmt);
3482 tree lhs;
3483 tree rhs;
3485 switch (code)
3487 case GIMPLE_ASSIGN:
3488 /* Try to estimate the cost of assignments. We have three cases to
3489 deal with:
3490 1) Simple assignments to registers;
3491 2) Stores to things that must live in memory. This includes
3492 "normal" stores to scalars, but also assignments of large
3493 structures, or constructors of big arrays;
3495 Let us look at the first two cases, assuming we have "a = b + C":
3496 <GIMPLE_ASSIGN <var_decl "a">
3497 <plus_expr <var_decl "b"> <constant C>>
3498 If "a" is a GIMPLE register, the assignment to it is free on almost
3499 any target, because "a" usually ends up in a real register. Hence
3500 the only cost of this expression comes from the PLUS_EXPR, and we
3501 can ignore the GIMPLE_ASSIGN.
3502 If "a" is not a GIMPLE register, the assignment to "a" will most
3503 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3504 of moving something into "a", which we compute using the function
3505 estimate_move_cost. */
3506 if (gimple_clobber_p (stmt))
3507 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3509 lhs = gimple_assign_lhs (stmt);
3510 rhs = gimple_assign_rhs1 (stmt);
3512 if (is_gimple_reg (lhs))
3513 cost = 0;
3514 else
3515 cost = estimate_move_cost (TREE_TYPE (lhs));
3517 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3518 cost += estimate_move_cost (TREE_TYPE (rhs));
3520 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3521 gimple_assign_rhs1 (stmt),
3522 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3523 == GIMPLE_BINARY_RHS
3524 ? gimple_assign_rhs2 (stmt) : NULL);
3525 break;
3527 case GIMPLE_COND:
3528 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3529 gimple_op (stmt, 0),
3530 gimple_op (stmt, 1));
3531 break;
3533 case GIMPLE_SWITCH:
3534 /* Take into account cost of the switch + guess 2 conditional jumps for
3535 each case label.
3537 TODO: once the switch expansion logic is sufficiently separated, we can
3538 do better job on estimating cost of the switch. */
3539 if (weights->time_based)
3540 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3541 else
3542 cost = gimple_switch_num_labels (stmt) * 2;
3543 break;
3545 case GIMPLE_CALL:
3547 tree decl = gimple_call_fndecl (stmt);
3548 struct cgraph_node *node = NULL;
3550 /* Do not special case builtins where we see the body.
3551 This just confuse inliner. */
3552 if (!decl || !(node = cgraph_get_node (decl)) || node->analyzed)
3554 /* For buitins that are likely expanded to nothing or
3555 inlined do not account operand costs. */
3556 else if (is_simple_builtin (decl))
3557 return 0;
3558 else if (is_inexpensive_builtin (decl))
3559 return weights->target_builtin_call_cost;
3560 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3562 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3563 specialize the cheap expansion we do here.
3564 ??? This asks for a more general solution. */
3565 switch (DECL_FUNCTION_CODE (decl))
3567 case BUILT_IN_POW:
3568 case BUILT_IN_POWF:
3569 case BUILT_IN_POWL:
3570 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3571 && REAL_VALUES_EQUAL
3572 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3573 return estimate_operator_cost (MULT_EXPR, weights,
3574 gimple_call_arg (stmt, 0),
3575 gimple_call_arg (stmt, 0));
3576 break;
3578 default:
3579 break;
3583 cost = node ? weights->call_cost : weights->indirect_call_cost;
3584 if (gimple_call_lhs (stmt))
3585 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3586 for (i = 0; i < gimple_call_num_args (stmt); i++)
3588 tree arg = gimple_call_arg (stmt, i);
3589 cost += estimate_move_cost (TREE_TYPE (arg));
3591 break;
3594 case GIMPLE_RETURN:
3595 return weights->return_cost;
3597 case GIMPLE_GOTO:
3598 case GIMPLE_LABEL:
3599 case GIMPLE_NOP:
3600 case GIMPLE_PHI:
3601 case GIMPLE_PREDICT:
3602 case GIMPLE_DEBUG:
3603 return 0;
3605 case GIMPLE_ASM:
3606 return asm_str_count (gimple_asm_string (stmt));
3608 case GIMPLE_RESX:
3609 /* This is either going to be an external function call with one
3610 argument, or two register copy statements plus a goto. */
3611 return 2;
3613 case GIMPLE_EH_DISPATCH:
3614 /* ??? This is going to turn into a switch statement. Ideally
3615 we'd have a look at the eh region and estimate the number of
3616 edges involved. */
3617 return 10;
3619 case GIMPLE_BIND:
3620 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3622 case GIMPLE_EH_FILTER:
3623 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3625 case GIMPLE_CATCH:
3626 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3628 case GIMPLE_TRY:
3629 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3630 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3632 /* OpenMP directives are generally very expensive. */
3634 case GIMPLE_OMP_RETURN:
3635 case GIMPLE_OMP_SECTIONS_SWITCH:
3636 case GIMPLE_OMP_ATOMIC_STORE:
3637 case GIMPLE_OMP_CONTINUE:
3638 /* ...except these, which are cheap. */
3639 return 0;
3641 case GIMPLE_OMP_ATOMIC_LOAD:
3642 return weights->omp_cost;
3644 case GIMPLE_OMP_FOR:
3645 return (weights->omp_cost
3646 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3647 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3649 case GIMPLE_OMP_PARALLEL:
3650 case GIMPLE_OMP_TASK:
3651 case GIMPLE_OMP_CRITICAL:
3652 case GIMPLE_OMP_MASTER:
3653 case GIMPLE_OMP_ORDERED:
3654 case GIMPLE_OMP_SECTION:
3655 case GIMPLE_OMP_SECTIONS:
3656 case GIMPLE_OMP_SINGLE:
3657 return (weights->omp_cost
3658 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3660 case GIMPLE_TRANSACTION:
3661 return (weights->tm_cost
3662 + estimate_num_insns_seq (gimple_transaction_body (stmt),
3663 weights));
3665 default:
3666 gcc_unreachable ();
3669 return cost;
3672 /* Estimate number of instructions that will be created by expanding
3673 function FNDECL. WEIGHTS contains weights attributed to various
3674 constructs. */
3677 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3679 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3680 gimple_stmt_iterator bsi;
3681 basic_block bb;
3682 int n = 0;
3684 gcc_assert (my_function && my_function->cfg);
3685 FOR_EACH_BB_FN (bb, my_function)
3687 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3688 n += estimate_num_insns (gsi_stmt (bsi), weights);
3691 return n;
3695 /* Initializes weights used by estimate_num_insns. */
3697 void
3698 init_inline_once (void)
3700 eni_size_weights.call_cost = 1;
3701 eni_size_weights.indirect_call_cost = 3;
3702 eni_size_weights.target_builtin_call_cost = 1;
3703 eni_size_weights.div_mod_cost = 1;
3704 eni_size_weights.omp_cost = 40;
3705 eni_size_weights.tm_cost = 10;
3706 eni_size_weights.time_based = false;
3707 eni_size_weights.return_cost = 1;
3709 /* Estimating time for call is difficult, since we have no idea what the
3710 called function does. In the current uses of eni_time_weights,
3711 underestimating the cost does less harm than overestimating it, so
3712 we choose a rather small value here. */
3713 eni_time_weights.call_cost = 10;
3714 eni_time_weights.indirect_call_cost = 15;
3715 eni_time_weights.target_builtin_call_cost = 1;
3716 eni_time_weights.div_mod_cost = 10;
3717 eni_time_weights.omp_cost = 40;
3718 eni_time_weights.tm_cost = 40;
3719 eni_time_weights.time_based = true;
3720 eni_time_weights.return_cost = 2;
3723 /* Estimate the number of instructions in a gimple_seq. */
3726 count_insns_seq (gimple_seq seq, eni_weights *weights)
3728 gimple_stmt_iterator gsi;
3729 int n = 0;
3730 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3731 n += estimate_num_insns (gsi_stmt (gsi), weights);
3733 return n;
3737 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3739 static void
3740 prepend_lexical_block (tree current_block, tree new_block)
3742 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3743 BLOCK_SUBBLOCKS (current_block) = new_block;
3744 BLOCK_SUPERCONTEXT (new_block) = current_block;
3747 /* Add local variables from CALLEE to CALLER. */
3749 static inline void
3750 add_local_variables (struct function *callee, struct function *caller,
3751 copy_body_data *id, bool check_var_ann)
3753 tree var;
3754 unsigned ix;
3756 FOR_EACH_LOCAL_DECL (callee, ix, var)
3757 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3759 if (!check_var_ann
3760 || (var_ann (var) && add_referenced_var (var)))
3761 add_local_decl (caller, var);
3763 else if (!can_be_nonlocal (var, id))
3765 tree new_var = remap_decl (var, id);
3767 /* Remap debug-expressions. */
3768 if (TREE_CODE (new_var) == VAR_DECL
3769 && DECL_DEBUG_EXPR_IS_FROM (new_var)
3770 && new_var != var)
3772 tree tem = DECL_DEBUG_EXPR (var);
3773 bool old_regimplify = id->regimplify;
3774 id->remapping_type_depth++;
3775 walk_tree (&tem, copy_tree_body_r, id, NULL);
3776 id->remapping_type_depth--;
3777 id->regimplify = old_regimplify;
3778 SET_DECL_DEBUG_EXPR (new_var, tem);
3780 add_local_decl (caller, new_var);
3784 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3786 static bool
3787 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3789 tree use_retvar;
3790 tree fn;
3791 struct pointer_map_t *st, *dst;
3792 tree return_slot;
3793 tree modify_dest;
3794 location_t saved_location;
3795 struct cgraph_edge *cg_edge;
3796 cgraph_inline_failed_t reason;
3797 basic_block return_block;
3798 edge e;
3799 gimple_stmt_iterator gsi, stmt_gsi;
3800 bool successfully_inlined = FALSE;
3801 bool purge_dead_abnormal_edges;
3803 /* Set input_location here so we get the right instantiation context
3804 if we call instantiate_decl from inlinable_function_p. */
3805 saved_location = input_location;
3806 if (gimple_has_location (stmt))
3807 input_location = gimple_location (stmt);
3809 /* From here on, we're only interested in CALL_EXPRs. */
3810 if (gimple_code (stmt) != GIMPLE_CALL)
3811 goto egress;
3813 cg_edge = cgraph_edge (id->dst_node, stmt);
3814 gcc_checking_assert (cg_edge);
3815 /* First, see if we can figure out what function is being called.
3816 If we cannot, then there is no hope of inlining the function. */
3817 if (cg_edge->indirect_unknown_callee)
3818 goto egress;
3819 fn = cg_edge->callee->decl;
3820 gcc_checking_assert (fn);
3822 /* If FN is a declaration of a function in a nested scope that was
3823 globally declared inline, we don't set its DECL_INITIAL.
3824 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3825 C++ front-end uses it for cdtors to refer to their internal
3826 declarations, that are not real functions. Fortunately those
3827 don't have trees to be saved, so we can tell by checking their
3828 gimple_body. */
3829 if (!DECL_INITIAL (fn)
3830 && DECL_ABSTRACT_ORIGIN (fn)
3831 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3832 fn = DECL_ABSTRACT_ORIGIN (fn);
3834 /* Don't try to inline functions that are not well-suited to inlining. */
3835 if (!cgraph_inline_p (cg_edge, &reason))
3837 /* If this call was originally indirect, we do not want to emit any
3838 inlining related warnings or sorry messages because there are no
3839 guarantees regarding those. */
3840 if (cg_edge->indirect_inlining_edge)
3841 goto egress;
3843 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3844 /* For extern inline functions that get redefined we always
3845 silently ignored always_inline flag. Better behaviour would
3846 be to be able to keep both bodies and use extern inline body
3847 for inlining, but we can't do that because frontends overwrite
3848 the body. */
3849 && !cg_edge->callee->local.redefined_extern_inline
3850 /* Avoid warnings during early inline pass. */
3851 && cgraph_global_info_ready
3852 /* PR 20090218-1_0.c. Body can be provided by another module. */
3853 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
3855 error ("inlining failed in call to always_inline %q+F: %s", fn,
3856 cgraph_inline_failed_string (reason));
3857 error ("called from here");
3859 else if (warn_inline
3860 && DECL_DECLARED_INLINE_P (fn)
3861 && !DECL_NO_INLINE_WARNING_P (fn)
3862 && !DECL_IN_SYSTEM_HEADER (fn)
3863 && reason != CIF_UNSPECIFIED
3864 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3865 /* Do not warn about not inlined recursive calls. */
3866 && !cgraph_edge_recursive_p (cg_edge)
3867 /* Avoid warnings during early inline pass. */
3868 && cgraph_global_info_ready
3869 && reason != CIF_UNLIKELY_CALL)
3871 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3872 fn, _(cgraph_inline_failed_string (reason)));
3873 warning (OPT_Winline, "called from here");
3875 goto egress;
3877 fn = cg_edge->callee->decl;
3879 #ifdef ENABLE_CHECKING
3880 if (cg_edge->callee->decl != id->dst_node->decl)
3881 verify_cgraph_node (cg_edge->callee);
3882 #endif
3884 /* We will be inlining this callee. */
3885 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3887 /* Update the callers EH personality. */
3888 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3889 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3890 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3892 /* Split the block holding the GIMPLE_CALL. */
3893 e = split_block (bb, stmt);
3894 bb = e->src;
3895 return_block = e->dest;
3896 remove_edge (e);
3898 /* split_block splits after the statement; work around this by
3899 moving the call into the second block manually. Not pretty,
3900 but seems easier than doing the CFG manipulation by hand
3901 when the GIMPLE_CALL is in the last statement of BB. */
3902 stmt_gsi = gsi_last_bb (bb);
3903 gsi_remove (&stmt_gsi, false);
3905 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3906 been the source of abnormal edges. In this case, schedule
3907 the removal of dead abnormal edges. */
3908 gsi = gsi_start_bb (return_block);
3909 if (gsi_end_p (gsi))
3911 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3912 purge_dead_abnormal_edges = true;
3914 else
3916 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3917 purge_dead_abnormal_edges = false;
3920 stmt_gsi = gsi_start_bb (return_block);
3922 /* Build a block containing code to initialize the arguments, the
3923 actual inline expansion of the body, and a label for the return
3924 statements within the function to jump to. The type of the
3925 statement expression is the return type of the function call. */
3926 id->block = make_node (BLOCK);
3927 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3928 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3929 prepend_lexical_block (gimple_block (stmt), id->block);
3931 /* Local declarations will be replaced by their equivalents in this
3932 map. */
3933 st = id->decl_map;
3934 id->decl_map = pointer_map_create ();
3935 dst = id->debug_map;
3936 id->debug_map = NULL;
3938 /* Record the function we are about to inline. */
3939 id->src_fn = fn;
3940 id->src_node = cg_edge->callee;
3941 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3942 id->gimple_call = stmt;
3944 gcc_assert (!id->src_cfun->after_inlining);
3946 id->entry_bb = bb;
3947 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3949 gimple_stmt_iterator si = gsi_last_bb (bb);
3950 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3951 NOT_TAKEN),
3952 GSI_NEW_STMT);
3954 initialize_inlined_parameters (id, stmt, fn, bb);
3956 if (DECL_INITIAL (fn))
3957 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3959 /* Return statements in the function body will be replaced by jumps
3960 to the RET_LABEL. */
3961 gcc_assert (DECL_INITIAL (fn));
3962 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3964 /* Find the LHS to which the result of this call is assigned. */
3965 return_slot = NULL;
3966 if (gimple_call_lhs (stmt))
3968 modify_dest = gimple_call_lhs (stmt);
3970 /* The function which we are inlining might not return a value,
3971 in which case we should issue a warning that the function
3972 does not return a value. In that case the optimizers will
3973 see that the variable to which the value is assigned was not
3974 initialized. We do not want to issue a warning about that
3975 uninitialized variable. */
3976 if (DECL_P (modify_dest))
3977 TREE_NO_WARNING (modify_dest) = 1;
3979 if (gimple_call_return_slot_opt_p (stmt))
3981 return_slot = modify_dest;
3982 modify_dest = NULL;
3985 else
3986 modify_dest = NULL;
3988 /* If we are inlining a call to the C++ operator new, we don't want
3989 to use type based alias analysis on the return value. Otherwise
3990 we may get confused if the compiler sees that the inlined new
3991 function returns a pointer which was just deleted. See bug
3992 33407. */
3993 if (DECL_IS_OPERATOR_NEW (fn))
3995 return_slot = NULL;
3996 modify_dest = NULL;
3999 /* Declare the return variable for the function. */
4000 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4002 /* Add local vars in this inlined callee to caller. */
4003 add_local_variables (id->src_cfun, cfun, id, true);
4005 if (dump_file && (dump_flags & TDF_DETAILS))
4007 fprintf (dump_file, "Inlining ");
4008 print_generic_expr (dump_file, id->src_fn, 0);
4009 fprintf (dump_file, " to ");
4010 print_generic_expr (dump_file, id->dst_fn, 0);
4011 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4014 /* This is it. Duplicate the callee body. Assume callee is
4015 pre-gimplified. Note that we must not alter the caller
4016 function in any way before this point, as this CALL_EXPR may be
4017 a self-referential call; if we're calling ourselves, we need to
4018 duplicate our body before altering anything. */
4019 copy_body (id, bb->count,
4020 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
4021 bb, return_block, NULL, NULL);
4023 /* Reset the escaped solution. */
4024 if (cfun->gimple_df)
4025 pt_solution_reset (&cfun->gimple_df->escaped);
4027 /* Clean up. */
4028 if (id->debug_map)
4030 pointer_map_destroy (id->debug_map);
4031 id->debug_map = dst;
4033 pointer_map_destroy (id->decl_map);
4034 id->decl_map = st;
4036 /* Unlink the calls virtual operands before replacing it. */
4037 unlink_stmt_vdef (stmt);
4039 /* If the inlined function returns a result that we care about,
4040 substitute the GIMPLE_CALL with an assignment of the return
4041 variable to the LHS of the call. That is, if STMT was
4042 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4043 if (use_retvar && gimple_call_lhs (stmt))
4045 gimple old_stmt = stmt;
4046 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4047 gsi_replace (&stmt_gsi, stmt, false);
4048 if (gimple_in_ssa_p (cfun))
4049 mark_symbols_for_renaming (stmt);
4050 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4052 else
4054 /* Handle the case of inlining a function with no return
4055 statement, which causes the return value to become undefined. */
4056 if (gimple_call_lhs (stmt)
4057 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4059 tree name = gimple_call_lhs (stmt);
4060 tree var = SSA_NAME_VAR (name);
4061 tree def = gimple_default_def (cfun, var);
4063 if (def)
4065 /* If the variable is used undefined, make this name
4066 undefined via a move. */
4067 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4068 gsi_replace (&stmt_gsi, stmt, true);
4070 else
4072 /* Otherwise make this variable undefined. */
4073 gsi_remove (&stmt_gsi, true);
4074 set_default_def (var, name);
4075 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4078 else
4079 gsi_remove (&stmt_gsi, true);
4082 if (purge_dead_abnormal_edges)
4084 gimple_purge_dead_eh_edges (return_block);
4085 gimple_purge_dead_abnormal_call_edges (return_block);
4088 /* If the value of the new expression is ignored, that's OK. We
4089 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4090 the equivalent inlined version either. */
4091 if (is_gimple_assign (stmt))
4093 gcc_assert (gimple_assign_single_p (stmt)
4094 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4095 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4098 /* Output the inlining info for this abstract function, since it has been
4099 inlined. If we don't do this now, we can lose the information about the
4100 variables in the function when the blocks get blown away as soon as we
4101 remove the cgraph node. */
4102 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4104 /* Update callgraph if needed. */
4105 cgraph_remove_node (cg_edge->callee);
4107 id->block = NULL_TREE;
4108 successfully_inlined = TRUE;
4110 egress:
4111 input_location = saved_location;
4112 return successfully_inlined;
4115 /* Expand call statements reachable from STMT_P.
4116 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4117 in a MODIFY_EXPR. */
4119 static bool
4120 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4122 gimple_stmt_iterator gsi;
4124 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4126 gimple stmt = gsi_stmt (gsi);
4128 if (is_gimple_call (stmt)
4129 && expand_call_inline (bb, stmt, id))
4130 return true;
4133 return false;
4137 /* Walk all basic blocks created after FIRST and try to fold every statement
4138 in the STATEMENTS pointer set. */
4140 static void
4141 fold_marked_statements (int first, struct pointer_set_t *statements)
4143 for (; first < n_basic_blocks; first++)
4144 if (BASIC_BLOCK (first))
4146 gimple_stmt_iterator gsi;
4148 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4149 !gsi_end_p (gsi);
4150 gsi_next (&gsi))
4151 if (pointer_set_contains (statements, gsi_stmt (gsi)))
4153 gimple old_stmt = gsi_stmt (gsi);
4154 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4156 if (old_decl && DECL_BUILT_IN (old_decl))
4158 /* Folding builtins can create multiple instructions,
4159 we need to look at all of them. */
4160 gimple_stmt_iterator i2 = gsi;
4161 gsi_prev (&i2);
4162 if (fold_stmt (&gsi))
4164 gimple new_stmt;
4165 /* If a builtin at the end of a bb folded into nothing,
4166 the following loop won't work. */
4167 if (gsi_end_p (gsi))
4169 cgraph_update_edges_for_call_stmt (old_stmt,
4170 old_decl, NULL);
4171 break;
4173 if (gsi_end_p (i2))
4174 i2 = gsi_start_bb (BASIC_BLOCK (first));
4175 else
4176 gsi_next (&i2);
4177 while (1)
4179 new_stmt = gsi_stmt (i2);
4180 update_stmt (new_stmt);
4181 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4182 new_stmt);
4184 if (new_stmt == gsi_stmt (gsi))
4186 /* It is okay to check only for the very last
4187 of these statements. If it is a throwing
4188 statement nothing will change. If it isn't
4189 this can remove EH edges. If that weren't
4190 correct then because some intermediate stmts
4191 throw, but not the last one. That would mean
4192 we'd have to split the block, which we can't
4193 here and we'd loose anyway. And as builtins
4194 probably never throw, this all
4195 is mood anyway. */
4196 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4197 new_stmt))
4198 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4199 break;
4201 gsi_next (&i2);
4205 else if (fold_stmt (&gsi))
4207 /* Re-read the statement from GSI as fold_stmt() may
4208 have changed it. */
4209 gimple new_stmt = gsi_stmt (gsi);
4210 update_stmt (new_stmt);
4212 if (is_gimple_call (old_stmt)
4213 || is_gimple_call (new_stmt))
4214 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4215 new_stmt);
4217 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4218 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4224 /* Return true if BB has at least one abnormal outgoing edge. */
4226 static inline bool
4227 has_abnormal_outgoing_edge_p (basic_block bb)
4229 edge e;
4230 edge_iterator ei;
4232 FOR_EACH_EDGE (e, ei, bb->succs)
4233 if (e->flags & EDGE_ABNORMAL)
4234 return true;
4236 return false;
4239 /* Expand calls to inline functions in the body of FN. */
4241 unsigned int
4242 optimize_inline_calls (tree fn)
4244 copy_body_data id;
4245 basic_block bb;
4246 int last = n_basic_blocks;
4247 struct gimplify_ctx gctx;
4248 bool inlined_p = false;
4250 /* Clear out ID. */
4251 memset (&id, 0, sizeof (id));
4253 id.src_node = id.dst_node = cgraph_get_node (fn);
4254 gcc_assert (id.dst_node->analyzed);
4255 id.dst_fn = fn;
4256 /* Or any functions that aren't finished yet. */
4257 if (current_function_decl)
4258 id.dst_fn = current_function_decl;
4260 id.copy_decl = copy_decl_maybe_to_var;
4261 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4262 id.transform_new_cfg = false;
4263 id.transform_return_to_modify = true;
4264 id.transform_lang_insert_block = NULL;
4265 id.statements_to_fold = pointer_set_create ();
4267 push_gimplify_context (&gctx);
4269 /* We make no attempts to keep dominance info up-to-date. */
4270 free_dominance_info (CDI_DOMINATORS);
4271 free_dominance_info (CDI_POST_DOMINATORS);
4273 /* Register specific gimple functions. */
4274 gimple_register_cfg_hooks ();
4276 /* Reach the trees by walking over the CFG, and note the
4277 enclosing basic-blocks in the call edges. */
4278 /* We walk the blocks going forward, because inlined function bodies
4279 will split id->current_basic_block, and the new blocks will
4280 follow it; we'll trudge through them, processing their CALL_EXPRs
4281 along the way. */
4282 FOR_EACH_BB (bb)
4283 inlined_p |= gimple_expand_calls_inline (bb, &id);
4285 pop_gimplify_context (NULL);
4287 #ifdef ENABLE_CHECKING
4289 struct cgraph_edge *e;
4291 verify_cgraph_node (id.dst_node);
4293 /* Double check that we inlined everything we are supposed to inline. */
4294 for (e = id.dst_node->callees; e; e = e->next_callee)
4295 gcc_assert (e->inline_failed);
4297 #endif
4299 /* Fold queued statements. */
4300 fold_marked_statements (last, id.statements_to_fold);
4301 pointer_set_destroy (id.statements_to_fold);
4303 gcc_assert (!id.debug_stmts);
4305 /* If we didn't inline into the function there is nothing to do. */
4306 if (!inlined_p)
4307 return 0;
4309 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4310 number_blocks (fn);
4312 delete_unreachable_blocks_update_callgraph (&id);
4313 #ifdef ENABLE_CHECKING
4314 verify_cgraph_node (id.dst_node);
4315 #endif
4317 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4318 not possible yet - the IPA passes might make various functions to not
4319 throw and they don't care to proactively update local EH info. This is
4320 done later in fixup_cfg pass that also execute the verification. */
4321 return (TODO_update_ssa
4322 | TODO_cleanup_cfg
4323 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4324 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4325 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4328 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4330 tree
4331 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4333 enum tree_code code = TREE_CODE (*tp);
4334 enum tree_code_class cl = TREE_CODE_CLASS (code);
4336 /* We make copies of most nodes. */
4337 if (IS_EXPR_CODE_CLASS (cl)
4338 || code == TREE_LIST
4339 || code == TREE_VEC
4340 || code == TYPE_DECL
4341 || code == OMP_CLAUSE)
4343 /* Because the chain gets clobbered when we make a copy, we save it
4344 here. */
4345 tree chain = NULL_TREE, new_tree;
4347 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4348 chain = TREE_CHAIN (*tp);
4350 /* Copy the node. */
4351 new_tree = copy_node (*tp);
4353 /* Propagate mudflap marked-ness. */
4354 if (flag_mudflap && mf_marked_p (*tp))
4355 mf_mark (new_tree);
4357 *tp = new_tree;
4359 /* Now, restore the chain, if appropriate. That will cause
4360 walk_tree to walk into the chain as well. */
4361 if (code == PARM_DECL
4362 || code == TREE_LIST
4363 || code == OMP_CLAUSE)
4364 TREE_CHAIN (*tp) = chain;
4366 /* For now, we don't update BLOCKs when we make copies. So, we
4367 have to nullify all BIND_EXPRs. */
4368 if (TREE_CODE (*tp) == BIND_EXPR)
4369 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4371 else if (code == CONSTRUCTOR)
4373 /* CONSTRUCTOR nodes need special handling because
4374 we need to duplicate the vector of elements. */
4375 tree new_tree;
4377 new_tree = copy_node (*tp);
4379 /* Propagate mudflap marked-ness. */
4380 if (flag_mudflap && mf_marked_p (*tp))
4381 mf_mark (new_tree);
4383 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4384 CONSTRUCTOR_ELTS (*tp));
4385 *tp = new_tree;
4387 else if (code == STATEMENT_LIST)
4388 /* We used to just abort on STATEMENT_LIST, but we can run into them
4389 with statement-expressions (c++/40975). */
4390 copy_statement_list (tp);
4391 else if (TREE_CODE_CLASS (code) == tcc_type)
4392 *walk_subtrees = 0;
4393 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4394 *walk_subtrees = 0;
4395 else if (TREE_CODE_CLASS (code) == tcc_constant)
4396 *walk_subtrees = 0;
4397 return NULL_TREE;
4400 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4401 information indicating to what new SAVE_EXPR this one should be mapped,
4402 use that one. Otherwise, create a new node and enter it in ST. FN is
4403 the function into which the copy will be placed. */
4405 static void
4406 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4408 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4409 tree *n;
4410 tree t;
4412 /* See if we already encountered this SAVE_EXPR. */
4413 n = (tree *) pointer_map_contains (st, *tp);
4415 /* If we didn't already remap this SAVE_EXPR, do so now. */
4416 if (!n)
4418 t = copy_node (*tp);
4420 /* Remember this SAVE_EXPR. */
4421 *pointer_map_insert (st, *tp) = t;
4422 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4423 *pointer_map_insert (st, t) = t;
4425 else
4427 /* We've already walked into this SAVE_EXPR; don't do it again. */
4428 *walk_subtrees = 0;
4429 t = *n;
4432 /* Replace this SAVE_EXPR with the copy. */
4433 *tp = t;
4436 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4437 copies the declaration and enters it in the splay_tree in DATA (which is
4438 really an `copy_body_data *'). */
4440 static tree
4441 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4442 void *data)
4444 copy_body_data *id = (copy_body_data *) data;
4446 /* Don't walk into types. */
4447 if (TYPE_P (*tp))
4448 *walk_subtrees = 0;
4450 else if (TREE_CODE (*tp) == LABEL_EXPR)
4452 tree decl = TREE_OPERAND (*tp, 0);
4454 /* Copy the decl and remember the copy. */
4455 insert_decl_map (id, decl, id->copy_decl (decl, id));
4458 return NULL_TREE;
4461 /* Perform any modifications to EXPR required when it is unsaved. Does
4462 not recurse into EXPR's subtrees. */
4464 static void
4465 unsave_expr_1 (tree expr)
4467 switch (TREE_CODE (expr))
4469 case TARGET_EXPR:
4470 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4471 It's OK for this to happen if it was part of a subtree that
4472 isn't immediately expanded, such as operand 2 of another
4473 TARGET_EXPR. */
4474 if (TREE_OPERAND (expr, 1))
4475 break;
4477 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4478 TREE_OPERAND (expr, 3) = NULL_TREE;
4479 break;
4481 default:
4482 break;
4486 /* Called via walk_tree when an expression is unsaved. Using the
4487 splay_tree pointed to by ST (which is really a `splay_tree'),
4488 remaps all local declarations to appropriate replacements. */
4490 static tree
4491 unsave_r (tree *tp, int *walk_subtrees, void *data)
4493 copy_body_data *id = (copy_body_data *) data;
4494 struct pointer_map_t *st = id->decl_map;
4495 tree *n;
4497 /* Only a local declaration (variable or label). */
4498 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4499 || TREE_CODE (*tp) == LABEL_DECL)
4501 /* Lookup the declaration. */
4502 n = (tree *) pointer_map_contains (st, *tp);
4504 /* If it's there, remap it. */
4505 if (n)
4506 *tp = *n;
4509 else if (TREE_CODE (*tp) == STATEMENT_LIST)
4510 gcc_unreachable ();
4511 else if (TREE_CODE (*tp) == BIND_EXPR)
4512 copy_bind_expr (tp, walk_subtrees, id);
4513 else if (TREE_CODE (*tp) == SAVE_EXPR
4514 || TREE_CODE (*tp) == TARGET_EXPR)
4515 remap_save_expr (tp, st, walk_subtrees);
4516 else
4518 copy_tree_r (tp, walk_subtrees, NULL);
4520 /* Do whatever unsaving is required. */
4521 unsave_expr_1 (*tp);
4524 /* Keep iterating. */
4525 return NULL_TREE;
4528 /* Copies everything in EXPR and replaces variables, labels
4529 and SAVE_EXPRs local to EXPR. */
4531 tree
4532 unsave_expr_now (tree expr)
4534 copy_body_data id;
4536 /* There's nothing to do for NULL_TREE. */
4537 if (expr == 0)
4538 return expr;
4540 /* Set up ID. */
4541 memset (&id, 0, sizeof (id));
4542 id.src_fn = current_function_decl;
4543 id.dst_fn = current_function_decl;
4544 id.decl_map = pointer_map_create ();
4545 id.debug_map = NULL;
4547 id.copy_decl = copy_decl_no_change;
4548 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4549 id.transform_new_cfg = false;
4550 id.transform_return_to_modify = false;
4551 id.transform_lang_insert_block = NULL;
4553 /* Walk the tree once to find local labels. */
4554 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4556 /* Walk the tree again, copying, remapping, and unsaving. */
4557 walk_tree (&expr, unsave_r, &id, NULL);
4559 /* Clean up. */
4560 pointer_map_destroy (id.decl_map);
4561 if (id.debug_map)
4562 pointer_map_destroy (id.debug_map);
4564 return expr;
4567 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4568 label, copies the declaration and enters it in the splay_tree in DATA (which
4569 is really a 'copy_body_data *'. */
4571 static tree
4572 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4573 bool *handled_ops_p ATTRIBUTE_UNUSED,
4574 struct walk_stmt_info *wi)
4576 copy_body_data *id = (copy_body_data *) wi->info;
4577 gimple stmt = gsi_stmt (*gsip);
4579 if (gimple_code (stmt) == GIMPLE_LABEL)
4581 tree decl = gimple_label_label (stmt);
4583 /* Copy the decl and remember the copy. */
4584 insert_decl_map (id, decl, id->copy_decl (decl, id));
4587 return NULL_TREE;
4591 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4592 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4593 remaps all local declarations to appropriate replacements in gimple
4594 operands. */
4596 static tree
4597 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4599 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4600 copy_body_data *id = (copy_body_data *) wi->info;
4601 struct pointer_map_t *st = id->decl_map;
4602 tree *n;
4603 tree expr = *tp;
4605 /* Only a local declaration (variable or label). */
4606 if ((TREE_CODE (expr) == VAR_DECL
4607 && !TREE_STATIC (expr))
4608 || TREE_CODE (expr) == LABEL_DECL)
4610 /* Lookup the declaration. */
4611 n = (tree *) pointer_map_contains (st, expr);
4613 /* If it's there, remap it. */
4614 if (n)
4615 *tp = *n;
4616 *walk_subtrees = 0;
4618 else if (TREE_CODE (expr) == STATEMENT_LIST
4619 || TREE_CODE (expr) == BIND_EXPR
4620 || TREE_CODE (expr) == SAVE_EXPR)
4621 gcc_unreachable ();
4622 else if (TREE_CODE (expr) == TARGET_EXPR)
4624 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4625 It's OK for this to happen if it was part of a subtree that
4626 isn't immediately expanded, such as operand 2 of another
4627 TARGET_EXPR. */
4628 if (!TREE_OPERAND (expr, 1))
4630 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4631 TREE_OPERAND (expr, 3) = NULL_TREE;
4635 /* Keep iterating. */
4636 return NULL_TREE;
4640 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4641 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4642 remaps all local declarations to appropriate replacements in gimple
4643 statements. */
4645 static tree
4646 replace_locals_stmt (gimple_stmt_iterator *gsip,
4647 bool *handled_ops_p ATTRIBUTE_UNUSED,
4648 struct walk_stmt_info *wi)
4650 copy_body_data *id = (copy_body_data *) wi->info;
4651 gimple stmt = gsi_stmt (*gsip);
4653 if (gimple_code (stmt) == GIMPLE_BIND)
4655 tree block = gimple_bind_block (stmt);
4657 if (block)
4659 remap_block (&block, id);
4660 gimple_bind_set_block (stmt, block);
4663 /* This will remap a lot of the same decls again, but this should be
4664 harmless. */
4665 if (gimple_bind_vars (stmt))
4666 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4669 /* Keep iterating. */
4670 return NULL_TREE;
4674 /* Copies everything in SEQ and replaces variables and labels local to
4675 current_function_decl. */
4677 gimple_seq
4678 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4680 copy_body_data id;
4681 struct walk_stmt_info wi;
4682 struct pointer_set_t *visited;
4683 gimple_seq copy;
4685 /* There's nothing to do for NULL_TREE. */
4686 if (seq == NULL)
4687 return seq;
4689 /* Set up ID. */
4690 memset (&id, 0, sizeof (id));
4691 id.src_fn = current_function_decl;
4692 id.dst_fn = current_function_decl;
4693 id.decl_map = pointer_map_create ();
4694 id.debug_map = NULL;
4696 id.copy_decl = copy_decl_no_change;
4697 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4698 id.transform_new_cfg = false;
4699 id.transform_return_to_modify = false;
4700 id.transform_lang_insert_block = NULL;
4702 /* Walk the tree once to find local labels. */
4703 memset (&wi, 0, sizeof (wi));
4704 visited = pointer_set_create ();
4705 wi.info = &id;
4706 wi.pset = visited;
4707 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4708 pointer_set_destroy (visited);
4710 copy = gimple_seq_copy (seq);
4712 /* Walk the copy, remapping decls. */
4713 memset (&wi, 0, sizeof (wi));
4714 wi.info = &id;
4715 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4717 /* Clean up. */
4718 pointer_map_destroy (id.decl_map);
4719 if (id.debug_map)
4720 pointer_map_destroy (id.debug_map);
4722 return copy;
4726 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4728 static tree
4729 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4731 if (*tp == data)
4732 return (tree) data;
4733 else
4734 return NULL;
4737 DEBUG_FUNCTION bool
4738 debug_find_tree (tree top, tree search)
4740 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4744 /* Declare the variables created by the inliner. Add all the variables in
4745 VARS to BIND_EXPR. */
4747 static void
4748 declare_inline_vars (tree block, tree vars)
4750 tree t;
4751 for (t = vars; t; t = DECL_CHAIN (t))
4753 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4754 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4755 add_local_decl (cfun, t);
4758 if (block)
4759 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4762 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4763 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4764 VAR_DECL translation. */
4766 static tree
4767 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4769 /* Don't generate debug information for the copy if we wouldn't have
4770 generated it for the copy either. */
4771 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4772 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4774 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4775 declaration inspired this copy. */
4776 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4778 /* The new variable/label has no RTL, yet. */
4779 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4780 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4781 SET_DECL_RTL (copy, 0);
4783 /* These args would always appear unused, if not for this. */
4784 TREE_USED (copy) = 1;
4786 /* Set the context for the new declaration. */
4787 if (!DECL_CONTEXT (decl))
4788 /* Globals stay global. */
4790 else if (DECL_CONTEXT (decl) != id->src_fn)
4791 /* Things that weren't in the scope of the function we're inlining
4792 from aren't in the scope we're inlining to, either. */
4794 else if (TREE_STATIC (decl))
4795 /* Function-scoped static variables should stay in the original
4796 function. */
4798 else
4799 /* Ordinary automatic local variables are now in the scope of the
4800 new function. */
4801 DECL_CONTEXT (copy) = id->dst_fn;
4803 if (TREE_CODE (decl) == VAR_DECL
4804 /* C++ clones functions during parsing, before
4805 referenced_vars. */
4806 && gimple_referenced_vars (DECL_STRUCT_FUNCTION (id->src_fn))
4807 && referenced_var_lookup (DECL_STRUCT_FUNCTION (id->src_fn),
4808 DECL_UID (decl)))
4809 add_referenced_var (copy);
4811 return copy;
4814 static tree
4815 copy_decl_to_var (tree decl, copy_body_data *id)
4817 tree copy, type;
4819 gcc_assert (TREE_CODE (decl) == PARM_DECL
4820 || TREE_CODE (decl) == RESULT_DECL);
4822 type = TREE_TYPE (decl);
4824 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4825 VAR_DECL, DECL_NAME (decl), type);
4826 if (DECL_PT_UID_SET_P (decl))
4827 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4828 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4829 TREE_READONLY (copy) = TREE_READONLY (decl);
4830 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4831 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4833 return copy_decl_for_dup_finish (id, decl, copy);
4836 /* Like copy_decl_to_var, but create a return slot object instead of a
4837 pointer variable for return by invisible reference. */
4839 static tree
4840 copy_result_decl_to_var (tree decl, copy_body_data *id)
4842 tree copy, type;
4844 gcc_assert (TREE_CODE (decl) == PARM_DECL
4845 || TREE_CODE (decl) == RESULT_DECL);
4847 type = TREE_TYPE (decl);
4848 if (DECL_BY_REFERENCE (decl))
4849 type = TREE_TYPE (type);
4851 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4852 VAR_DECL, DECL_NAME (decl), type);
4853 if (DECL_PT_UID_SET_P (decl))
4854 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4855 TREE_READONLY (copy) = TREE_READONLY (decl);
4856 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4857 if (!DECL_BY_REFERENCE (decl))
4859 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4860 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4863 return copy_decl_for_dup_finish (id, decl, copy);
4866 tree
4867 copy_decl_no_change (tree decl, copy_body_data *id)
4869 tree copy;
4871 copy = copy_node (decl);
4873 /* The COPY is not abstract; it will be generated in DST_FN. */
4874 DECL_ABSTRACT (copy) = 0;
4875 lang_hooks.dup_lang_specific_decl (copy);
4877 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4878 been taken; it's for internal bookkeeping in expand_goto_internal. */
4879 if (TREE_CODE (copy) == LABEL_DECL)
4881 TREE_ADDRESSABLE (copy) = 0;
4882 LABEL_DECL_UID (copy) = -1;
4885 return copy_decl_for_dup_finish (id, decl, copy);
4888 static tree
4889 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4891 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4892 return copy_decl_to_var (decl, id);
4893 else
4894 return copy_decl_no_change (decl, id);
4897 /* Return a copy of the function's argument tree. */
4898 static tree
4899 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4900 bitmap args_to_skip, tree *vars)
4902 tree arg, *parg;
4903 tree new_parm = NULL;
4904 int i = 0;
4906 parg = &new_parm;
4908 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
4909 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4911 tree new_tree = remap_decl (arg, id);
4912 if (TREE_CODE (new_tree) != PARM_DECL)
4913 new_tree = id->copy_decl (arg, id);
4914 lang_hooks.dup_lang_specific_decl (new_tree);
4915 *parg = new_tree;
4916 parg = &DECL_CHAIN (new_tree);
4918 else if (!pointer_map_contains (id->decl_map, arg))
4920 /* Make an equivalent VAR_DECL. If the argument was used
4921 as temporary variable later in function, the uses will be
4922 replaced by local variable. */
4923 tree var = copy_decl_to_var (arg, id);
4924 add_referenced_var (var);
4925 insert_decl_map (id, arg, var);
4926 /* Declare this new variable. */
4927 DECL_CHAIN (var) = *vars;
4928 *vars = var;
4930 return new_parm;
4933 /* Return a copy of the function's static chain. */
4934 static tree
4935 copy_static_chain (tree static_chain, copy_body_data * id)
4937 tree *chain_copy, *pvar;
4939 chain_copy = &static_chain;
4940 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
4942 tree new_tree = remap_decl (*pvar, id);
4943 lang_hooks.dup_lang_specific_decl (new_tree);
4944 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
4945 *pvar = new_tree;
4947 return static_chain;
4950 /* Return true if the function is allowed to be versioned.
4951 This is a guard for the versioning functionality. */
4953 bool
4954 tree_versionable_function_p (tree fndecl)
4956 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4957 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4960 /* Delete all unreachable basic blocks and update callgraph.
4961 Doing so is somewhat nontrivial because we need to update all clones and
4962 remove inline function that become unreachable. */
4964 static bool
4965 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4967 bool changed = false;
4968 basic_block b, next_bb;
4970 find_unreachable_blocks ();
4972 /* Delete all unreachable basic blocks. */
4974 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4976 next_bb = b->next_bb;
4978 if (!(b->flags & BB_REACHABLE))
4980 gimple_stmt_iterator bsi;
4982 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4983 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4985 struct cgraph_edge *e;
4986 struct cgraph_node *node;
4988 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4990 if (!e->inline_failed)
4991 cgraph_remove_node_and_inline_clones (e->callee);
4992 else
4993 cgraph_remove_edge (e);
4995 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4996 && id->dst_node->clones)
4997 for (node = id->dst_node->clones; node != id->dst_node;)
4999 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
5001 if (!e->inline_failed)
5002 cgraph_remove_node_and_inline_clones (e->callee);
5003 else
5004 cgraph_remove_edge (e);
5007 if (node->clones)
5008 node = node->clones;
5009 else if (node->next_sibling_clone)
5010 node = node->next_sibling_clone;
5011 else
5013 while (node != id->dst_node && !node->next_sibling_clone)
5014 node = node->clone_of;
5015 if (node != id->dst_node)
5016 node = node->next_sibling_clone;
5020 delete_basic_block (b);
5021 changed = true;
5025 return changed;
5028 /* Update clone info after duplication. */
5030 static void
5031 update_clone_info (copy_body_data * id)
5033 struct cgraph_node *node;
5034 if (!id->dst_node->clones)
5035 return;
5036 for (node = id->dst_node->clones; node != id->dst_node;)
5038 /* First update replace maps to match the new body. */
5039 if (node->clone.tree_map)
5041 unsigned int i;
5042 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
5044 struct ipa_replace_map *replace_info;
5045 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
5046 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5047 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5050 if (node->clones)
5051 node = node->clones;
5052 else if (node->next_sibling_clone)
5053 node = node->next_sibling_clone;
5054 else
5056 while (node != id->dst_node && !node->next_sibling_clone)
5057 node = node->clone_of;
5058 if (node != id->dst_node)
5059 node = node->next_sibling_clone;
5064 /* Create a copy of a function's tree.
5065 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5066 of the original function and the new copied function
5067 respectively. In case we want to replace a DECL
5068 tree with another tree while duplicating the function's
5069 body, TREE_MAP represents the mapping between these
5070 trees. If UPDATE_CLONES is set, the call_stmt fields
5071 of edges of clones of the function will be updated.
5073 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5074 from new version.
5075 If SKIP_RETURN is true, the new version will return void.
5076 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5077 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5079 void
5080 tree_function_versioning (tree old_decl, tree new_decl,
5081 VEC(ipa_replace_map_p,gc)* tree_map,
5082 bool update_clones, bitmap args_to_skip,
5083 bool skip_return, bitmap blocks_to_copy,
5084 basic_block new_entry)
5086 struct cgraph_node *old_version_node;
5087 struct cgraph_node *new_version_node;
5088 copy_body_data id;
5089 tree p;
5090 unsigned i;
5091 struct ipa_replace_map *replace_info;
5092 basic_block old_entry_block, bb;
5093 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
5095 tree old_current_function_decl = current_function_decl;
5096 tree vars = NULL_TREE;
5098 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5099 && TREE_CODE (new_decl) == FUNCTION_DECL);
5100 DECL_POSSIBLY_INLINED (old_decl) = 1;
5102 old_version_node = cgraph_get_node (old_decl);
5103 gcc_checking_assert (old_version_node);
5104 new_version_node = cgraph_get_node (new_decl);
5105 gcc_checking_assert (new_version_node);
5107 /* Copy over debug args. */
5108 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5110 VEC(tree, gc) **new_debug_args, **old_debug_args;
5111 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5112 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5113 old_debug_args = decl_debug_args_lookup (old_decl);
5114 if (old_debug_args)
5116 new_debug_args = decl_debug_args_insert (new_decl);
5117 *new_debug_args = VEC_copy (tree, gc, *old_debug_args);
5121 /* Output the inlining info for this abstract function, since it has been
5122 inlined. If we don't do this now, we can lose the information about the
5123 variables in the function when the blocks get blown away as soon as we
5124 remove the cgraph node. */
5125 (*debug_hooks->outlining_inline_function) (old_decl);
5127 DECL_ARTIFICIAL (new_decl) = 1;
5128 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5129 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5131 /* Prepare the data structures for the tree copy. */
5132 memset (&id, 0, sizeof (id));
5134 /* Generate a new name for the new version. */
5135 id.statements_to_fold = pointer_set_create ();
5137 id.decl_map = pointer_map_create ();
5138 id.debug_map = NULL;
5139 id.src_fn = old_decl;
5140 id.dst_fn = new_decl;
5141 id.src_node = old_version_node;
5142 id.dst_node = new_version_node;
5143 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5144 if (id.src_node->ipa_transforms_to_apply)
5146 VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
5147 unsigned int i;
5149 id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
5150 id.src_node->ipa_transforms_to_apply);
5151 for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
5152 VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
5153 VEC_index (ipa_opt_pass,
5154 old_transforms_to_apply,
5155 i));
5158 id.copy_decl = copy_decl_no_change;
5159 id.transform_call_graph_edges
5160 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5161 id.transform_new_cfg = true;
5162 id.transform_return_to_modify = false;
5163 id.transform_lang_insert_block = NULL;
5165 current_function_decl = new_decl;
5166 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5167 (DECL_STRUCT_FUNCTION (old_decl));
5168 initialize_cfun (new_decl, old_decl,
5169 old_entry_block->count);
5170 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5171 = id.src_cfun->gimple_df->ipa_pta;
5172 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
5174 /* Copy the function's static chain. */
5175 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5176 if (p)
5177 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5178 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5179 &id);
5181 /* If there's a tree_map, prepare for substitution. */
5182 if (tree_map)
5183 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
5185 gimple init;
5186 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
5187 if (replace_info->replace_p)
5189 tree op = replace_info->new_tree;
5190 if (!replace_info->old_tree)
5192 int i = replace_info->parm_num;
5193 tree parm;
5194 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5195 i --;
5196 replace_info->old_tree = parm;
5200 STRIP_NOPS (op);
5202 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
5203 op = TREE_OPERAND (op, 0);
5205 if (TREE_CODE (op) == ADDR_EXPR)
5207 op = TREE_OPERAND (op, 0);
5208 while (handled_component_p (op))
5209 op = TREE_OPERAND (op, 0);
5210 if (TREE_CODE (op) == VAR_DECL)
5211 add_referenced_var (op);
5213 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5214 init = setup_one_parameter (&id, replace_info->old_tree,
5215 replace_info->new_tree, id.src_fn,
5216 NULL,
5217 &vars);
5218 if (init)
5219 VEC_safe_push (gimple, heap, init_stmts, init);
5222 /* Copy the function's arguments. */
5223 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5224 DECL_ARGUMENTS (new_decl) =
5225 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5226 args_to_skip, &vars);
5228 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5229 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5231 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5233 if (!VEC_empty (tree, DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5234 /* Add local vars. */
5235 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id, false);
5237 if (DECL_RESULT (old_decl) == NULL_TREE)
5239 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5241 DECL_RESULT (new_decl)
5242 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5243 RESULT_DECL, NULL_TREE, void_type_node);
5244 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5245 cfun->returns_struct = 0;
5246 cfun->returns_pcc_struct = 0;
5248 else
5250 tree old_name;
5251 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5252 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5253 if (gimple_in_ssa_p (id.src_cfun)
5254 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5255 && (old_name
5256 = gimple_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5258 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5259 insert_decl_map (&id, old_name, new_name);
5260 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5261 set_default_def (DECL_RESULT (new_decl), new_name);
5265 /* Copy the Function's body. */
5266 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5267 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry);
5269 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5270 number_blocks (new_decl);
5272 /* We want to create the BB unconditionally, so that the addition of
5273 debug stmts doesn't affect BB count, which may in the end cause
5274 codegen differences. */
5275 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5276 while (VEC_length (gimple, init_stmts))
5277 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
5278 update_clone_info (&id);
5280 /* Remap the nonlocal_goto_save_area, if any. */
5281 if (cfun->nonlocal_goto_save_area)
5283 struct walk_stmt_info wi;
5285 memset (&wi, 0, sizeof (wi));
5286 wi.info = &id;
5287 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5290 /* Clean up. */
5291 pointer_map_destroy (id.decl_map);
5292 if (id.debug_map)
5293 pointer_map_destroy (id.debug_map);
5294 free_dominance_info (CDI_DOMINATORS);
5295 free_dominance_info (CDI_POST_DOMINATORS);
5297 fold_marked_statements (0, id.statements_to_fold);
5298 pointer_set_destroy (id.statements_to_fold);
5299 fold_cond_expr_cond ();
5300 delete_unreachable_blocks_update_callgraph (&id);
5301 if (id.dst_node->analyzed)
5302 cgraph_rebuild_references ();
5303 update_ssa (TODO_update_ssa);
5305 /* After partial cloning we need to rescale frequencies, so they are
5306 within proper range in the cloned function. */
5307 if (new_entry)
5309 struct cgraph_edge *e;
5310 rebuild_frequencies ();
5312 new_version_node->count = ENTRY_BLOCK_PTR->count;
5313 for (e = new_version_node->callees; e; e = e->next_callee)
5315 basic_block bb = gimple_bb (e->call_stmt);
5316 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5317 bb);
5318 e->count = bb->count;
5320 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5322 basic_block bb = gimple_bb (e->call_stmt);
5323 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5324 bb);
5325 e->count = bb->count;
5329 free_dominance_info (CDI_DOMINATORS);
5330 free_dominance_info (CDI_POST_DOMINATORS);
5332 gcc_assert (!id.debug_stmts);
5333 VEC_free (gimple, heap, init_stmts);
5334 pop_cfun ();
5335 current_function_decl = old_current_function_decl;
5336 gcc_assert (!current_function_decl
5337 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
5338 return;
5341 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5342 the callee and return the inlined body on success. */
5344 tree
5345 maybe_inline_call_in_expr (tree exp)
5347 tree fn = get_callee_fndecl (exp);
5349 /* We can only try to inline "const" functions. */
5350 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5352 struct pointer_map_t *decl_map = pointer_map_create ();
5353 call_expr_arg_iterator iter;
5354 copy_body_data id;
5355 tree param, arg, t;
5357 /* Remap the parameters. */
5358 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5359 param;
5360 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5361 *pointer_map_insert (decl_map, param) = arg;
5363 memset (&id, 0, sizeof (id));
5364 id.src_fn = fn;
5365 id.dst_fn = current_function_decl;
5366 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5367 id.decl_map = decl_map;
5369 id.copy_decl = copy_decl_no_change;
5370 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5371 id.transform_new_cfg = false;
5372 id.transform_return_to_modify = true;
5373 id.transform_lang_insert_block = NULL;
5375 /* Make sure not to unshare trees behind the front-end's back
5376 since front-end specific mechanisms may rely on sharing. */
5377 id.regimplify = false;
5378 id.do_not_unshare = true;
5380 /* We're not inside any EH region. */
5381 id.eh_lp_nr = 0;
5383 t = copy_tree_body (&id);
5384 pointer_map_destroy (decl_map);
5386 /* We can only return something suitable for use in a GENERIC
5387 expression tree. */
5388 if (TREE_CODE (t) == MODIFY_EXPR)
5389 return TREE_OPERAND (t, 1);
5392 return NULL_TREE;
5395 /* Duplicate a type, fields and all. */
5397 tree
5398 build_duplicate_type (tree type)
5400 struct copy_body_data id;
5402 memset (&id, 0, sizeof (id));
5403 id.src_fn = current_function_decl;
5404 id.dst_fn = current_function_decl;
5405 id.src_cfun = cfun;
5406 id.decl_map = pointer_map_create ();
5407 id.debug_map = NULL;
5408 id.copy_decl = copy_decl_no_change;
5410 type = remap_type_1 (type, &id);
5412 pointer_map_destroy (id.decl_map);
5413 if (id.debug_map)
5414 pointer_map_destroy (id.debug_map);
5416 TYPE_CANONICAL (type) = type;
5418 return type;