* tree-optimize.c (has_abnormal_outgoing_edge_p): Move to tree-inline.
[official-gcc.git] / gcc / tree-inline.c
blob75a0553f72c46f2221af99ed41ff4d8dfc604270
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "varray.h"
36 #include "hashtab.h"
37 #include "langhooks.h"
38 #include "basic-block.h"
39 #include "tree-iterator.h"
40 #include "cgraph.h"
41 #include "intl.h"
42 #include "tree-mudflap.h"
43 #include "tree-flow.h"
44 #include "function.h"
45 #include "ggc.h"
46 #include "tree-flow.h"
47 #include "diagnostic.h"
48 #include "except.h"
49 #include "debug.h"
50 #include "pointer-set.h"
51 #include "ipa-prop.h"
52 #include "value-prof.h"
53 #include "tree-pass.h"
55 /* I'm not real happy about this, but we need to handle gimple and
56 non-gimple trees. */
57 #include "tree-gimple.h"
59 /* Inlining, Cloning, Versioning, Parallelization
61 Inlining: a function body is duplicated, but the PARM_DECLs are
62 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
63 GIMPLE_MODIFY_STMTs that store to a dedicated returned-value variable.
64 The duplicated eh_region info of the copy will later be appended
65 to the info for the caller; the eh_region info in copied throwing
66 statements and RESX_EXPRs is adjusted accordingly.
68 Cloning: (only in C++) We have one body for a con/de/structor, and
69 multiple function decls, each with a unique parameter list.
70 Duplicate the body, using the given splay tree; some parameters
71 will become constants (like 0 or 1).
73 Versioning: a function body is duplicated and the result is a new
74 function rather than into blocks of an existing function as with
75 inlining. Some parameters will become constants.
77 Parallelization: a region of a function is duplicated resulting in
78 a new function. Variables may be replaced with complex expressions
79 to enable shared variable semantics.
81 All of these will simultaneously lookup any callgraph edges. If
82 we're going to inline the duplicated function body, and the given
83 function has some cloned callgraph nodes (one for each place this
84 function will be inlined) those callgraph edges will be duplicated.
85 If we're cloning the body, those callgraph edges will be
86 updated to point into the new body. (Note that the original
87 callgraph node and edge list will not be altered.)
89 See the CALL_EXPR handling case in copy_body_r (). */
91 /* 0 if we should not perform inlining.
92 1 if we should expand functions calls inline at the tree level.
93 2 if we should consider *all* functions to be inline
94 candidates. */
96 int flag_inline_trees = 0;
98 /* To Do:
100 o In order to make inlining-on-trees work, we pessimized
101 function-local static constants. In particular, they are now
102 always output, even when not addressed. Fix this by treating
103 function-local static constants just like global static
104 constants; the back-end already knows not to output them if they
105 are not needed.
107 o Provide heuristics to clamp inlining of recursive template
108 calls? */
111 /* Weights that estimate_num_insns uses for heuristics in inlining. */
113 eni_weights eni_inlining_weights;
115 /* Weights that estimate_num_insns uses to estimate the size of the
116 produced code. */
118 eni_weights eni_size_weights;
120 /* Weights that estimate_num_insns uses to estimate the time necessary
121 to execute the produced code. */
123 eni_weights eni_time_weights;
125 /* Prototypes. */
127 static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
128 static tree copy_generic_body (copy_body_data *);
129 static bool inlinable_function_p (tree);
130 static void remap_block (tree *, copy_body_data *);
131 static tree remap_decls (tree, copy_body_data *);
132 static void copy_bind_expr (tree *, int *, copy_body_data *);
133 static tree mark_local_for_remap_r (tree *, int *, void *);
134 static void unsave_expr_1 (tree);
135 static tree unsave_r (tree *, int *, void *);
136 static void declare_inline_vars (tree, tree);
137 static void remap_save_expr (tree *, void *, int *);
138 static void add_lexical_block (tree current_block, tree new_block);
139 static tree copy_decl_to_var (tree, copy_body_data *);
140 static tree copy_result_decl_to_var (tree, copy_body_data *);
141 static tree copy_decl_no_change (tree, copy_body_data *);
142 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
144 /* Insert a tree->tree mapping for ID. Despite the name suggests
145 that the trees should be variables, it is used for more than that. */
147 void
148 insert_decl_map (copy_body_data *id, tree key, tree value)
150 splay_tree_insert (id->decl_map, (splay_tree_key) key,
151 (splay_tree_value) value);
153 /* Always insert an identity map as well. If we see this same new
154 node again, we won't want to duplicate it a second time. */
155 if (key != value)
156 splay_tree_insert (id->decl_map, (splay_tree_key) value,
157 (splay_tree_value) value);
160 /* Construct new SSA name for old NAME. ID is the inline context. */
162 static tree
163 remap_ssa_name (tree name, copy_body_data *id)
165 tree new;
166 splay_tree_node n;
168 gcc_assert (TREE_CODE (name) == SSA_NAME);
170 n = splay_tree_lookup (id->decl_map, (splay_tree_key) name);
171 if (n)
172 return (tree) n->value;
174 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
175 in copy_bb. */
176 new = remap_decl (SSA_NAME_VAR (name), id);
177 /* We might've substituted constant or another SSA_NAME for
178 the variable.
180 Replace the SSA name representing RESULT_DECL by variable during
181 inlining: this saves us from need to introduce PHI node in a case
182 return value is just partly initialized. */
183 if ((TREE_CODE (new) == VAR_DECL || TREE_CODE (new) == PARM_DECL)
184 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
185 || !id->transform_return_to_modify))
187 new = make_ssa_name (new, NULL);
188 insert_decl_map (id, name, new);
189 if (IS_EMPTY_STMT (SSA_NAME_DEF_STMT (name)))
191 SSA_NAME_DEF_STMT (new) = build_empty_stmt ();
192 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name)) == name)
193 set_default_def (SSA_NAME_VAR (new), new);
195 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new)
196 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
197 TREE_TYPE (new) = TREE_TYPE (SSA_NAME_VAR (new));
199 else
200 insert_decl_map (id, name, new);
201 return new;
204 /* Remap DECL during the copying of the BLOCK tree for the function. */
206 tree
207 remap_decl (tree decl, copy_body_data *id)
209 splay_tree_node n;
210 tree fn;
212 /* We only remap local variables in the current function. */
213 fn = id->src_fn;
215 /* See if we have remapped this declaration. */
217 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
219 /* If we didn't already have an equivalent for this declaration,
220 create one now. */
221 if (!n)
223 /* Make a copy of the variable or label. */
224 tree t = id->copy_decl (decl, id);
226 /* Remember it, so that if we encounter this local entity again
227 we can reuse this copy. Do this early because remap_type may
228 need this decl for TYPE_STUB_DECL. */
229 insert_decl_map (id, decl, t);
231 if (!DECL_P (t))
232 return t;
234 /* Remap types, if necessary. */
235 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
236 if (TREE_CODE (t) == TYPE_DECL)
237 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
239 /* Remap sizes as necessary. */
240 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
241 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
243 /* If fields, do likewise for offset and qualifier. */
244 if (TREE_CODE (t) == FIELD_DECL)
246 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
247 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
248 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
251 if (cfun && gimple_in_ssa_p (cfun)
252 && (TREE_CODE (t) == VAR_DECL
253 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
255 tree def = gimple_default_def (id->src_cfun, decl);
256 get_var_ann (t);
257 if (TREE_CODE (decl) != PARM_DECL && def)
259 tree map = remap_ssa_name (def, id);
260 /* Watch out RESULT_DECLs whose SSA names map directly
261 to them. */
262 if (TREE_CODE (map) == SSA_NAME)
263 set_default_def (t, map);
265 add_referenced_var (t);
267 return t;
270 return unshare_expr ((tree) n->value);
273 static tree
274 remap_type_1 (tree type, copy_body_data *id)
276 splay_tree_node node;
277 tree new, t;
279 if (type == NULL)
280 return type;
282 /* See if we have remapped this type. */
283 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
284 if (node)
285 return (tree) node->value;
287 /* The type only needs remapping if it's variably modified. */
288 if (! variably_modified_type_p (type, id->src_fn))
290 insert_decl_map (id, type, type);
291 return type;
294 /* We do need a copy. build and register it now. If this is a pointer or
295 reference type, remap the designated type and make a new pointer or
296 reference type. */
297 if (TREE_CODE (type) == POINTER_TYPE)
299 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
300 TYPE_MODE (type),
301 TYPE_REF_CAN_ALIAS_ALL (type));
302 insert_decl_map (id, type, new);
303 return new;
305 else if (TREE_CODE (type) == REFERENCE_TYPE)
307 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
308 TYPE_MODE (type),
309 TYPE_REF_CAN_ALIAS_ALL (type));
310 insert_decl_map (id, type, new);
311 return new;
313 else
314 new = copy_node (type);
316 insert_decl_map (id, type, new);
318 /* This is a new type, not a copy of an old type. Need to reassociate
319 variants. We can handle everything except the main variant lazily. */
320 t = TYPE_MAIN_VARIANT (type);
321 if (type != t)
323 t = remap_type (t, id);
324 TYPE_MAIN_VARIANT (new) = t;
325 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
326 TYPE_NEXT_VARIANT (t) = new;
328 else
330 TYPE_MAIN_VARIANT (new) = new;
331 TYPE_NEXT_VARIANT (new) = NULL;
334 if (TYPE_STUB_DECL (type))
335 TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
337 /* Lazily create pointer and reference types. */
338 TYPE_POINTER_TO (new) = NULL;
339 TYPE_REFERENCE_TO (new) = NULL;
341 switch (TREE_CODE (new))
343 case INTEGER_TYPE:
344 case REAL_TYPE:
345 case ENUMERAL_TYPE:
346 case BOOLEAN_TYPE:
347 t = TYPE_MIN_VALUE (new);
348 if (t && TREE_CODE (t) != INTEGER_CST)
349 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
351 t = TYPE_MAX_VALUE (new);
352 if (t && TREE_CODE (t) != INTEGER_CST)
353 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
354 return new;
356 case FUNCTION_TYPE:
357 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
358 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
359 return new;
361 case ARRAY_TYPE:
362 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
363 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
364 break;
366 case RECORD_TYPE:
367 case UNION_TYPE:
368 case QUAL_UNION_TYPE:
370 tree f, nf = NULL;
372 for (f = TYPE_FIELDS (new); f ; f = TREE_CHAIN (f))
374 t = remap_decl (f, id);
375 DECL_CONTEXT (t) = new;
376 TREE_CHAIN (t) = nf;
377 nf = t;
379 TYPE_FIELDS (new) = nreverse (nf);
381 break;
383 case OFFSET_TYPE:
384 default:
385 /* Shouldn't have been thought variable sized. */
386 gcc_unreachable ();
389 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
390 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
392 return new;
395 tree
396 remap_type (tree type, copy_body_data *id)
398 splay_tree_node node;
400 if (type == NULL)
401 return type;
403 /* See if we have remapped this type. */
404 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
405 if (node)
406 return (tree) node->value;
408 /* The type only needs remapping if it's variably modified. */
409 if (! variably_modified_type_p (type, id->src_fn))
411 insert_decl_map (id, type, type);
412 return type;
415 return remap_type_1 (type, id);
418 static tree
419 remap_decls (tree decls, copy_body_data *id)
421 tree old_var;
422 tree new_decls = NULL_TREE;
424 /* Remap its variables. */
425 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
427 tree new_var;
429 /* We can not chain the local static declarations into the unexpanded_var_list
430 as we can't duplicate them or break one decl rule. Go ahead and link
431 them into unexpanded_var_list. */
432 if (!lang_hooks.tree_inlining.auto_var_in_fn_p (old_var, id->src_fn)
433 && !DECL_EXTERNAL (old_var))
435 cfun->unexpanded_var_list = tree_cons (NULL_TREE, old_var,
436 cfun->unexpanded_var_list);
437 continue;
440 /* Remap the variable. */
441 new_var = remap_decl (old_var, id);
443 /* If we didn't remap this variable, so we can't mess with its
444 TREE_CHAIN. If we remapped this variable to the return slot, it's
445 already declared somewhere else, so don't declare it here. */
446 if (!new_var || new_var == id->retvar)
448 else
450 gcc_assert (DECL_P (new_var));
451 TREE_CHAIN (new_var) = new_decls;
452 new_decls = new_var;
456 return nreverse (new_decls);
459 /* Copy the BLOCK to contain remapped versions of the variables
460 therein. And hook the new block into the block-tree. */
462 static void
463 remap_block (tree *block, copy_body_data *id)
465 tree old_block;
466 tree new_block;
467 tree fn;
469 /* Make the new block. */
470 old_block = *block;
471 new_block = make_node (BLOCK);
472 TREE_USED (new_block) = TREE_USED (old_block);
473 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
474 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
475 *block = new_block;
477 /* Remap its variables. */
478 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
480 fn = id->dst_fn;
482 if (id->transform_lang_insert_block)
483 lang_hooks.decls.insert_block (new_block);
485 /* Remember the remapped block. */
486 insert_decl_map (id, old_block, new_block);
489 /* Copy the whole block tree and root it in id->block. */
490 static tree
491 remap_blocks (tree block, copy_body_data *id)
493 tree t;
494 tree new = block;
496 if (!block)
497 return NULL;
499 remap_block (&new, id);
500 gcc_assert (new != block);
501 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
502 add_lexical_block (new, remap_blocks (t, id));
503 return new;
506 static void
507 copy_statement_list (tree *tp)
509 tree_stmt_iterator oi, ni;
510 tree new;
512 new = alloc_stmt_list ();
513 ni = tsi_start (new);
514 oi = tsi_start (*tp);
515 *tp = new;
517 for (; !tsi_end_p (oi); tsi_next (&oi))
518 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
521 static void
522 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
524 tree block = BIND_EXPR_BLOCK (*tp);
525 /* Copy (and replace) the statement. */
526 copy_tree_r (tp, walk_subtrees, NULL);
527 if (block)
529 remap_block (&block, id);
530 BIND_EXPR_BLOCK (*tp) = block;
533 if (BIND_EXPR_VARS (*tp))
534 /* This will remap a lot of the same decls again, but this should be
535 harmless. */
536 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
539 /* Called from copy_body_id via walk_tree. DATA is really an
540 `copy_body_data *'. */
542 tree
543 copy_body_r (tree *tp, int *walk_subtrees, void *data)
545 copy_body_data *id = (copy_body_data *) data;
546 tree fn = id->src_fn;
547 tree new_block;
549 /* Begin by recognizing trees that we'll completely rewrite for the
550 inlining context. Our output for these trees is completely
551 different from out input (e.g. RETURN_EXPR is deleted, and morphs
552 into an edge). Further down, we'll handle trees that get
553 duplicated and/or tweaked. */
555 /* When requested, RETURN_EXPRs should be transformed to just the
556 contained GIMPLE_MODIFY_STMT. The branch semantics of the return will
557 be handled elsewhere by manipulating the CFG rather than a statement. */
558 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
560 tree assignment = TREE_OPERAND (*tp, 0);
562 /* If we're returning something, just turn that into an
563 assignment into the equivalent of the original RESULT_DECL.
564 If the "assignment" is just the result decl, the result
565 decl has already been set (e.g. a recent "foo (&result_decl,
566 ...)"); just toss the entire RETURN_EXPR. */
567 if (assignment && TREE_CODE (assignment) == GIMPLE_MODIFY_STMT)
569 /* Replace the RETURN_EXPR with (a copy of) the
570 GIMPLE_MODIFY_STMT hanging underneath. */
571 *tp = copy_node (assignment);
573 else /* Else the RETURN_EXPR returns no value. */
575 *tp = NULL;
576 return (tree) (void *)1;
579 else if (TREE_CODE (*tp) == SSA_NAME)
581 *tp = remap_ssa_name (*tp, id);
582 *walk_subtrees = 0;
583 return NULL;
586 /* Local variables and labels need to be replaced by equivalent
587 variables. We don't want to copy static variables; there's only
588 one of those, no matter how many times we inline the containing
589 function. Similarly for globals from an outer function. */
590 else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
592 tree new_decl;
594 /* Remap the declaration. */
595 new_decl = remap_decl (*tp, id);
596 gcc_assert (new_decl);
597 /* Replace this variable with the copy. */
598 STRIP_TYPE_NOPS (new_decl);
599 *tp = new_decl;
600 *walk_subtrees = 0;
602 else if (TREE_CODE (*tp) == STATEMENT_LIST)
603 copy_statement_list (tp);
604 else if (TREE_CODE (*tp) == SAVE_EXPR)
605 remap_save_expr (tp, id->decl_map, walk_subtrees);
606 else if (TREE_CODE (*tp) == LABEL_DECL
607 && (! DECL_CONTEXT (*tp)
608 || decl_function_context (*tp) == id->src_fn))
609 /* These may need to be remapped for EH handling. */
610 *tp = remap_decl (*tp, id);
611 else if (TREE_CODE (*tp) == BIND_EXPR)
612 copy_bind_expr (tp, walk_subtrees, id);
613 /* Types may need remapping as well. */
614 else if (TYPE_P (*tp))
615 *tp = remap_type (*tp, id);
617 /* If this is a constant, we have to copy the node iff the type will be
618 remapped. copy_tree_r will not copy a constant. */
619 else if (CONSTANT_CLASS_P (*tp))
621 tree new_type = remap_type (TREE_TYPE (*tp), id);
623 if (new_type == TREE_TYPE (*tp))
624 *walk_subtrees = 0;
626 else if (TREE_CODE (*tp) == INTEGER_CST)
627 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
628 TREE_INT_CST_HIGH (*tp));
629 else
631 *tp = copy_node (*tp);
632 TREE_TYPE (*tp) = new_type;
636 /* Otherwise, just copy the node. Note that copy_tree_r already
637 knows not to copy VAR_DECLs, etc., so this is safe. */
638 else
640 /* Here we handle trees that are not completely rewritten.
641 First we detect some inlining-induced bogosities for
642 discarding. */
643 if (TREE_CODE (*tp) == GIMPLE_MODIFY_STMT
644 && GIMPLE_STMT_OPERAND (*tp, 0) == GIMPLE_STMT_OPERAND (*tp, 1)
645 && (lang_hooks.tree_inlining.auto_var_in_fn_p
646 (GIMPLE_STMT_OPERAND (*tp, 0), fn)))
648 /* Some assignments VAR = VAR; don't generate any rtl code
649 and thus don't count as variable modification. Avoid
650 keeping bogosities like 0 = 0. */
651 tree decl = GIMPLE_STMT_OPERAND (*tp, 0), value;
652 splay_tree_node n;
654 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
655 if (n)
657 value = (tree) n->value;
658 STRIP_TYPE_NOPS (value);
659 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
661 *tp = build_empty_stmt ();
662 return copy_body_r (tp, walk_subtrees, data);
666 else if (TREE_CODE (*tp) == INDIRECT_REF)
668 /* Get rid of *& from inline substitutions that can happen when a
669 pointer argument is an ADDR_EXPR. */
670 tree decl = TREE_OPERAND (*tp, 0);
671 splay_tree_node n;
673 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
674 if (n)
676 tree new;
677 tree old;
678 /* If we happen to get an ADDR_EXPR in n->value, strip
679 it manually here as we'll eventually get ADDR_EXPRs
680 which lie about their types pointed to. In this case
681 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
682 but we absolutely rely on that. As fold_indirect_ref
683 does other useful transformations, try that first, though. */
684 tree type = TREE_TYPE (TREE_TYPE ((tree)n->value));
685 new = unshare_expr ((tree)n->value);
686 old = *tp;
687 *tp = fold_indirect_ref_1 (type, new);
688 if (! *tp)
690 if (TREE_CODE (new) == ADDR_EXPR)
691 *tp = TREE_OPERAND (new, 0);
692 else
694 *tp = build1 (INDIRECT_REF, type, new);
695 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
698 *walk_subtrees = 0;
699 return NULL;
703 /* Here is the "usual case". Copy this tree node, and then
704 tweak some special cases. */
705 copy_tree_r (tp, walk_subtrees, NULL);
707 /* Global variables we didn't seen yet needs to go into referenced
708 vars. */
709 if (gimple_in_ssa_p (cfun) && TREE_CODE (*tp) == VAR_DECL)
710 add_referenced_var (*tp);
712 /* If EXPR has block defined, map it to newly constructed block.
713 When inlining we want EXPRs without block appear in the block
714 of function call. */
715 if (EXPR_P (*tp) || GIMPLE_STMT_P (*tp))
717 new_block = id->block;
718 if (TREE_BLOCK (*tp))
720 splay_tree_node n;
721 n = splay_tree_lookup (id->decl_map,
722 (splay_tree_key) TREE_BLOCK (*tp));
723 gcc_assert (n);
724 new_block = (tree) n->value;
726 TREE_BLOCK (*tp) = new_block;
729 if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
730 TREE_OPERAND (*tp, 0) =
731 build_int_cst
732 (NULL_TREE,
733 id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
735 if (!GIMPLE_TUPLE_P (*tp))
736 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
738 /* The copied TARGET_EXPR has never been expanded, even if the
739 original node was expanded already. */
740 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
742 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
743 TREE_OPERAND (*tp, 3) = NULL_TREE;
746 /* Variable substitution need not be simple. In particular, the
747 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
748 and friends are up-to-date. */
749 else if (TREE_CODE (*tp) == ADDR_EXPR)
751 walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
752 /* Handle the case where we substituted an INDIRECT_REF
753 into the operand of the ADDR_EXPR. */
754 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
755 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
756 else
757 recompute_tree_invariant_for_addr_expr (*tp);
758 *walk_subtrees = 0;
762 /* Keep iterating. */
763 return NULL_TREE;
766 /* Copy basic block, scale profile accordingly. Edges will be taken care of
767 later */
769 static basic_block
770 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, int count_scale)
772 block_stmt_iterator bsi, copy_bsi;
773 basic_block copy_basic_block;
775 /* create_basic_block() will append every new block to
776 basic_block_info automatically. */
777 copy_basic_block = create_basic_block (NULL, (void *) 0,
778 (basic_block) bb->prev_bb->aux);
779 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
780 copy_basic_block->frequency = (bb->frequency
781 * frequency_scale / REG_BR_PROB_BASE);
782 copy_bsi = bsi_start (copy_basic_block);
784 for (bsi = bsi_start (bb);
785 !bsi_end_p (bsi); bsi_next (&bsi))
787 tree stmt = bsi_stmt (bsi);
788 tree orig_stmt = stmt;
790 walk_tree (&stmt, copy_body_r, id, NULL);
792 /* RETURN_EXPR might be removed,
793 this is signalled by making stmt pointer NULL. */
794 if (stmt)
796 tree call, decl;
798 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
800 /* With return slot optimization we can end up with
801 non-gimple (foo *)&this->m, fix that here. */
802 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
803 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == NOP_EXPR
804 && !is_gimple_val (TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0)))
805 gimplify_stmt (&stmt);
807 bsi_insert_after (&copy_bsi, stmt, BSI_NEW_STMT);
809 /* Process new statement. gimplify_stmt possibly turned statement
810 into multiple statements, we need to process all of them. */
811 while (!bsi_end_p (copy_bsi))
813 stmt = bsi_stmt (copy_bsi);
814 call = get_call_expr_in (stmt);
816 /* Statements produced by inlining can be unfolded, especially
817 when we constant propagated some operands. We can't fold
818 them right now for two reasons:
819 1) folding require SSA_NAME_DEF_STMTs to be correct
820 2) we can't change function calls to builtins.
821 So we just mark statement for later folding. We mark
822 all new statements, instead just statements that has changed
823 by some nontrivial substitution so even statements made
824 foldable indirectly are updated. If this turns out to be
825 expensive, copy_body can be told to watch for nontrivial
826 changes. */
827 if (id->statements_to_fold)
828 pointer_set_insert (id->statements_to_fold, stmt);
829 /* We're duplicating a CALL_EXPR. Find any corresponding
830 callgraph edges and update or duplicate them. */
831 if (call && (decl = get_callee_fndecl (call)))
833 struct cgraph_node *node;
834 struct cgraph_edge *edge;
836 switch (id->transform_call_graph_edges)
838 case CB_CGE_DUPLICATE:
839 edge = cgraph_edge (id->src_node, orig_stmt);
840 if (edge)
841 cgraph_clone_edge (edge, id->dst_node, stmt,
842 REG_BR_PROB_BASE, 1, true);
843 break;
845 case CB_CGE_MOVE_CLONES:
846 for (node = id->dst_node->next_clone;
847 node;
848 node = node->next_clone)
850 edge = cgraph_edge (node, orig_stmt);
851 gcc_assert (edge);
852 cgraph_set_call_stmt (edge, stmt);
854 /* FALLTHRU */
856 case CB_CGE_MOVE:
857 edge = cgraph_edge (id->dst_node, orig_stmt);
858 if (edge)
859 cgraph_set_call_stmt (edge, stmt);
860 break;
862 default:
863 gcc_unreachable ();
866 /* If you think we can abort here, you are wrong.
867 There is no region 0 in tree land. */
868 gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt)
869 != 0);
871 if (tree_could_throw_p (stmt))
873 int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
874 /* Add an entry for the copied tree in the EH hashtable.
875 When cloning or versioning, use the hashtable in
876 cfun, and just copy the EH number. When inlining, use the
877 hashtable in the caller, and adjust the region number. */
878 if (region > 0)
879 add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
881 /* If this tree doesn't have a region associated with it,
882 and there is a "current region,"
883 then associate this tree with the current region
884 and add edges associated with this region. */
885 if ((lookup_stmt_eh_region_fn (id->src_cfun,
886 orig_stmt) <= 0
887 && id->eh_region > 0)
888 && tree_could_throw_p (stmt))
889 add_stmt_to_eh_region (stmt, id->eh_region);
891 if (gimple_in_ssa_p (cfun))
893 ssa_op_iter i;
894 tree def;
896 find_new_referenced_vars (bsi_stmt_ptr (copy_bsi));
897 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
898 if (TREE_CODE (def) == SSA_NAME)
899 SSA_NAME_DEF_STMT (def) = stmt;
901 bsi_next (&copy_bsi);
903 copy_bsi = bsi_last (copy_basic_block);
906 return copy_basic_block;
909 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
910 form is quite easy, since dominator relationship for old basic blocks does
911 not change.
913 There is however exception where inlining might change dominator relation
914 across EH edges from basic block within inlined functions destinating
915 to landing pads in function we inline into.
917 The function mark PHI_RESULT of such PHI nodes for renaming; it is
918 safe the EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI
919 must be set. This means, that there will be no overlapping live ranges
920 for the underlying symbol.
922 This might change in future if we allow redirecting of EH edges and
923 we might want to change way build CFG pre-inlining to include
924 all the possible edges then. */
925 static void
926 update_ssa_across_eh_edges (basic_block bb)
928 edge e;
929 edge_iterator ei;
931 FOR_EACH_EDGE (e, ei, bb->succs)
932 if (!e->dest->aux
933 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
935 tree phi;
937 gcc_assert (e->flags & EDGE_EH);
938 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
940 gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
941 (PHI_RESULT (phi)));
942 mark_sym_for_renaming
943 (SSA_NAME_VAR (PHI_RESULT (phi)));
948 /* Copy edges from BB into its copy constructed earlier, scale profile
949 accordingly. Edges will be taken care of later. Assume aux
950 pointers to point to the copies of each BB. */
951 static void
952 copy_edges_for_bb (basic_block bb, int count_scale)
954 basic_block new_bb = (basic_block) bb->aux;
955 edge_iterator ei;
956 edge old_edge;
957 block_stmt_iterator bsi;
958 int flags;
960 /* Use the indices from the original blocks to create edges for the
961 new ones. */
962 FOR_EACH_EDGE (old_edge, ei, bb->succs)
963 if (!(old_edge->flags & EDGE_EH))
965 edge new;
967 flags = old_edge->flags;
969 /* Return edges do get a FALLTHRU flag when the get inlined. */
970 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
971 && old_edge->dest->aux != EXIT_BLOCK_PTR)
972 flags |= EDGE_FALLTHRU;
973 new = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
974 new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
975 new->probability = old_edge->probability;
978 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
979 return;
981 for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
983 tree copy_stmt;
985 copy_stmt = bsi_stmt (bsi);
986 update_stmt (copy_stmt);
987 if (gimple_in_ssa_p (cfun))
988 mark_symbols_for_renaming (copy_stmt);
989 /* Do this before the possible split_block. */
990 bsi_next (&bsi);
992 /* If this tree could throw an exception, there are two
993 cases where we need to add abnormal edge(s): the
994 tree wasn't in a region and there is a "current
995 region" in the caller; or the original tree had
996 EH edges. In both cases split the block after the tree,
997 and add abnormal edge(s) as needed; we need both
998 those from the callee and the caller.
999 We check whether the copy can throw, because the const
1000 propagation can change an INDIRECT_REF which throws
1001 into a COMPONENT_REF which doesn't. If the copy
1002 can throw, the original could also throw. */
1004 if (tree_can_throw_internal (copy_stmt))
1006 if (!bsi_end_p (bsi))
1007 /* Note that bb's predecessor edges aren't necessarily
1008 right at this point; split_block doesn't care. */
1010 edge e = split_block (new_bb, copy_stmt);
1012 new_bb = e->dest;
1013 new_bb->aux = e->src->aux;
1014 bsi = bsi_start (new_bb);
1017 make_eh_edges (copy_stmt);
1019 if (gimple_in_ssa_p (cfun))
1020 update_ssa_across_eh_edges (bb_for_stmt (copy_stmt));
1025 /* Copy the PHIs. All blocks and edges are copied, some blocks
1026 was possibly split and new outgoing EH edges inserted.
1027 BB points to the block of original function and AUX pointers links
1028 the original and newly copied blocks. */
1030 static void
1031 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1033 basic_block new_bb = bb->aux;
1034 edge_iterator ei;
1035 tree phi;
1037 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1039 tree res = PHI_RESULT (phi);
1040 tree new_res = res;
1041 tree new_phi;
1042 edge new_edge;
1044 if (is_gimple_reg (res))
1046 walk_tree (&new_res, copy_body_r, id, NULL);
1047 SSA_NAME_DEF_STMT (new_res)
1048 = new_phi = create_phi_node (new_res, new_bb);
1049 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1051 edge old_edge = find_edge (new_edge->src->aux, bb);
1052 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1053 tree new_arg = arg;
1055 walk_tree (&new_arg, copy_body_r, id, NULL);
1056 gcc_assert (new_arg);
1057 add_phi_arg (new_phi, new_arg, new_edge);
1063 /* Wrapper for remap_decl so it can be used as a callback. */
1064 static tree
1065 remap_decl_1 (tree decl, void *data)
1067 return remap_decl (decl, (copy_body_data *) data);
1070 /* Build struct function and associated datastructures for the new clone
1071 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1073 static void
1074 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
1075 int frequency)
1077 struct function *new_cfun
1078 = (struct function *) ggc_alloc_cleared (sizeof (struct function));
1079 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1080 int count_scale, frequency_scale;
1082 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1083 count_scale = (REG_BR_PROB_BASE * count
1084 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1085 else
1086 count_scale = 1;
1088 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1089 frequency_scale = (REG_BR_PROB_BASE * frequency
1091 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1092 else
1093 frequency_scale = count_scale;
1095 /* Register specific tree functions. */
1096 tree_register_cfg_hooks ();
1097 *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
1098 new_cfun->funcdef_no = get_next_funcdef_no ();
1099 VALUE_HISTOGRAMS (new_cfun) = NULL;
1100 new_cfun->unexpanded_var_list = NULL;
1101 new_cfun->cfg = NULL;
1102 new_cfun->decl = new_fndecl /*= copy_node (callee_fndecl)*/;
1103 new_cfun->ib_boundaries_block = NULL;
1104 DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
1105 push_cfun (new_cfun);
1106 init_empty_tree_cfg ();
1108 ENTRY_BLOCK_PTR->count =
1109 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1110 REG_BR_PROB_BASE);
1111 ENTRY_BLOCK_PTR->frequency =
1112 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1113 frequency_scale / REG_BR_PROB_BASE);
1114 EXIT_BLOCK_PTR->count =
1115 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1116 REG_BR_PROB_BASE);
1117 EXIT_BLOCK_PTR->frequency =
1118 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1119 frequency_scale / REG_BR_PROB_BASE);
1120 if (src_cfun->eh)
1121 init_eh_for_function ();
1123 if (src_cfun->gimple_df)
1125 init_tree_ssa ();
1126 cfun->gimple_df->in_ssa_p = true;
1127 init_ssa_operands ();
1129 pop_cfun ();
1132 /* Make a copy of the body of FN so that it can be inserted inline in
1133 another function. Walks FN via CFG, returns new fndecl. */
1135 static tree
1136 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
1137 basic_block entry_block_map, basic_block exit_block_map)
1139 tree callee_fndecl = id->src_fn;
1140 /* Original cfun for the callee, doesn't change. */
1141 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1142 struct function *cfun_to_copy;
1143 basic_block bb;
1144 tree new_fndecl = NULL;
1145 int count_scale, frequency_scale;
1146 int last;
1148 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1149 count_scale = (REG_BR_PROB_BASE * count
1150 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1151 else
1152 count_scale = 1;
1154 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1155 frequency_scale = (REG_BR_PROB_BASE * frequency
1157 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1158 else
1159 frequency_scale = count_scale;
1161 /* Register specific tree functions. */
1162 tree_register_cfg_hooks ();
1164 /* Must have a CFG here at this point. */
1165 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
1166 (DECL_STRUCT_FUNCTION (callee_fndecl)));
1168 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1171 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
1172 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
1173 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1174 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1176 /* Duplicate any exception-handling regions. */
1177 if (cfun->eh)
1179 id->eh_region_offset
1180 = duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
1181 0, id->eh_region);
1183 /* Use aux pointers to map the original blocks to copy. */
1184 FOR_EACH_BB_FN (bb, cfun_to_copy)
1186 basic_block new = copy_bb (id, bb, frequency_scale, count_scale);
1187 bb->aux = new;
1188 new->aux = bb;
1191 last = n_basic_blocks;
1192 /* Now that we've duplicated the blocks, duplicate their edges. */
1193 FOR_ALL_BB_FN (bb, cfun_to_copy)
1194 copy_edges_for_bb (bb, count_scale);
1195 if (gimple_in_ssa_p (cfun))
1196 FOR_ALL_BB_FN (bb, cfun_to_copy)
1197 copy_phis_for_bb (bb, id);
1198 FOR_ALL_BB_FN (bb, cfun_to_copy)
1200 ((basic_block)bb->aux)->aux = NULL;
1201 bb->aux = NULL;
1203 /* Zero out AUX fields of newly created block during EH edge
1204 insertion. */
1205 for (; last < n_basic_blocks; last++)
1206 BASIC_BLOCK (last)->aux = NULL;
1207 entry_block_map->aux = NULL;
1208 exit_block_map->aux = NULL;
1210 return new_fndecl;
1213 /* Make a copy of the body of FN so that it can be inserted inline in
1214 another function. */
1216 static tree
1217 copy_generic_body (copy_body_data *id)
1219 tree body;
1220 tree fndecl = id->src_fn;
1222 body = DECL_SAVED_TREE (fndecl);
1223 walk_tree (&body, copy_body_r, id, NULL);
1225 return body;
1228 static tree
1229 copy_body (copy_body_data *id, gcov_type count, int frequency,
1230 basic_block entry_block_map, basic_block exit_block_map)
1232 tree fndecl = id->src_fn;
1233 tree body;
1235 /* If this body has a CFG, walk CFG and copy. */
1236 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
1237 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
1239 return body;
1242 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
1243 defined in function FN, or of a data member thereof. */
1245 static bool
1246 self_inlining_addr_expr (tree value, tree fn)
1248 tree var;
1250 if (TREE_CODE (value) != ADDR_EXPR)
1251 return false;
1253 var = get_base_address (TREE_OPERAND (value, 0));
1255 return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
1258 static void
1259 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
1260 basic_block bb, tree *vars)
1262 tree init_stmt;
1263 tree var;
1264 tree var_sub;
1265 tree rhs = value ? fold_convert (TREE_TYPE (p), value) : NULL;
1266 tree def = (gimple_in_ssa_p (cfun)
1267 ? gimple_default_def (id->src_cfun, p) : NULL);
1269 /* If the parameter is never assigned to, has no SSA_NAMEs created,
1270 we may not need to create a new variable here at all. Instead, we may
1271 be able to just use the argument value. */
1272 if (TREE_READONLY (p)
1273 && !TREE_ADDRESSABLE (p)
1274 && value && !TREE_SIDE_EFFECTS (value)
1275 && !def)
1277 /* We may produce non-gimple trees by adding NOPs or introduce
1278 invalid sharing when operand is not really constant.
1279 It is not big deal to prohibit constant propagation here as
1280 we will constant propagate in DOM1 pass anyway. */
1281 if (is_gimple_min_invariant (value)
1282 && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
1283 /* We have to be very careful about ADDR_EXPR. Make sure
1284 the base variable isn't a local variable of the inlined
1285 function, e.g., when doing recursive inlining, direct or
1286 mutually-recursive or whatever, which is why we don't
1287 just test whether fn == current_function_decl. */
1288 && ! self_inlining_addr_expr (value, fn))
1290 insert_decl_map (id, p, value);
1291 return;
1295 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
1296 here since the type of this decl must be visible to the calling
1297 function. */
1298 var = copy_decl_to_var (p, id);
1299 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
1301 get_var_ann (var);
1302 add_referenced_var (var);
1305 /* See if the frontend wants to pass this by invisible reference. If
1306 so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
1307 replace uses of the PARM_DECL with dereferences. */
1308 if (TREE_TYPE (var) != TREE_TYPE (p)
1309 && POINTER_TYPE_P (TREE_TYPE (var))
1310 && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
1312 insert_decl_map (id, var, var);
1313 var_sub = build_fold_indirect_ref (var);
1315 else
1316 var_sub = var;
1318 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
1319 that way, when the PARM_DECL is encountered, it will be
1320 automatically replaced by the VAR_DECL. */
1321 insert_decl_map (id, p, var_sub);
1323 /* Declare this new variable. */
1324 TREE_CHAIN (var) = *vars;
1325 *vars = var;
1327 /* Make gimplifier happy about this variable. */
1328 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1330 /* Even if P was TREE_READONLY, the new VAR should not be.
1331 In the original code, we would have constructed a
1332 temporary, and then the function body would have never
1333 changed the value of P. However, now, we will be
1334 constructing VAR directly. The constructor body may
1335 change its value multiple times as it is being
1336 constructed. Therefore, it must not be TREE_READONLY;
1337 the back-end assumes that TREE_READONLY variable is
1338 assigned to only once. */
1339 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
1340 TREE_READONLY (var) = 0;
1342 /* If there is no setup required and we are in SSA, take the easy route
1343 replacing all SSA names representing the function parameter by the
1344 SSA name passed to function.
1346 We need to construct map for the variable anyway as it might be used
1347 in different SSA names when parameter is set in function.
1349 FIXME: This usually kills the last connection in between inlined
1350 function parameter and the actual value in debug info. Can we do
1351 better here? If we just inserted the statement, copy propagation
1352 would kill it anyway as it always did in older versions of GCC.
1354 We might want to introduce a notion that single SSA_NAME might
1355 represent multiple variables for purposes of debugging. */
1356 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
1357 && (TREE_CODE (rhs) == SSA_NAME
1358 || is_gimple_min_invariant (rhs))
1359 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1361 insert_decl_map (id, def, rhs);
1362 return;
1365 /* Initialize this VAR_DECL from the equivalent argument. Convert
1366 the argument to the proper type in case it was promoted. */
1367 if (value)
1369 block_stmt_iterator bsi = bsi_last (bb);
1371 if (rhs == error_mark_node)
1373 insert_decl_map (id, p, var_sub);
1374 return;
1377 STRIP_USELESS_TYPE_CONVERSION (rhs);
1379 /* We want to use GIMPLE_MODIFY_STMT, not INIT_EXPR here so that we
1380 keep our trees in gimple form. */
1381 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
1383 def = remap_ssa_name (def, id);
1384 init_stmt = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (var), def, rhs);
1385 SSA_NAME_DEF_STMT (def) = init_stmt;
1386 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
1387 set_default_def (var, NULL);
1389 else
1390 init_stmt = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (var), var, rhs);
1392 /* If we did not create a gimple value and we did not create a gimple
1393 cast of a gimple value, then we will need to gimplify INIT_STMTS
1394 at the end. Note that is_gimple_cast only checks the outer
1395 tree code, not its operand. Thus the explicit check that its
1396 operand is a gimple value. */
1397 if ((!is_gimple_val (rhs)
1398 && (!is_gimple_cast (rhs)
1399 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
1400 || !is_gimple_reg (var))
1402 tree_stmt_iterator i;
1404 push_gimplify_context ();
1405 gimplify_stmt (&init_stmt);
1406 if (gimple_in_ssa_p (cfun)
1407 && init_stmt && TREE_CODE (init_stmt) == STATEMENT_LIST)
1409 /* The replacement can expose previously unreferenced
1410 variables. */
1411 for (i = tsi_start (init_stmt); !tsi_end_p (i); tsi_next (&i))
1412 find_new_referenced_vars (tsi_stmt_ptr (i));
1414 pop_gimplify_context (NULL);
1417 /* If VAR represents a zero-sized variable, it's possible that the
1418 assignment statment may result in no gimple statements. */
1419 if (init_stmt)
1420 bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
1421 if (gimple_in_ssa_p (cfun))
1422 for (;!bsi_end_p (bsi); bsi_next (&bsi))
1423 mark_symbols_for_renaming (bsi_stmt (bsi));
1427 /* Generate code to initialize the parameters of the function at the
1428 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
1430 static void
1431 initialize_inlined_parameters (copy_body_data *id, tree args, tree static_chain,
1432 tree fn, basic_block bb)
1434 tree parms;
1435 tree a;
1436 tree p;
1437 tree vars = NULL_TREE;
1438 int argnum = 0;
1440 /* Figure out what the parameters are. */
1441 parms = DECL_ARGUMENTS (fn);
1443 /* Loop through the parameter declarations, replacing each with an
1444 equivalent VAR_DECL, appropriately initialized. */
1445 for (p = parms, a = args; p;
1446 a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
1448 tree value;
1450 ++argnum;
1452 /* Find the initializer. */
1453 value = lang_hooks.tree_inlining.convert_parm_for_inlining
1454 (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
1456 setup_one_parameter (id, p, value, fn, bb, &vars);
1459 /* Initialize the static chain. */
1460 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1461 gcc_assert (fn != current_function_decl);
1462 if (p)
1464 /* No static chain? Seems like a bug in tree-nested.c. */
1465 gcc_assert (static_chain);
1467 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
1470 declare_inline_vars (id->block, vars);
1473 /* Declare a return variable to replace the RESULT_DECL for the
1474 function we are calling. An appropriate DECL_STMT is returned.
1475 The USE_STMT is filled to contain a use of the declaration to
1476 indicate the return value of the function.
1478 RETURN_SLOT, if non-null is place where to store the result. It
1479 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
1480 was the LHS of the GIMPLE_MODIFY_STMT to which this call is the RHS.
1482 The return value is a (possibly null) value that is the result of the
1483 function as seen by the callee. *USE_P is a (possibly null) value that
1484 holds the result as seen by the caller. */
1486 static tree
1487 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
1488 tree *use_p)
1490 tree callee = id->src_fn;
1491 tree caller = id->dst_fn;
1492 tree result = DECL_RESULT (callee);
1493 tree callee_type = TREE_TYPE (result);
1494 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
1495 tree var, use;
1497 /* We don't need to do anything for functions that don't return
1498 anything. */
1499 if (!result || VOID_TYPE_P (callee_type))
1501 *use_p = NULL_TREE;
1502 return NULL_TREE;
1505 /* If there was a return slot, then the return value is the
1506 dereferenced address of that object. */
1507 if (return_slot)
1509 /* The front end shouldn't have used both return_slot and
1510 a modify expression. */
1511 gcc_assert (!modify_dest);
1512 if (DECL_BY_REFERENCE (result))
1514 tree return_slot_addr = build_fold_addr_expr (return_slot);
1515 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
1517 /* We are going to construct *&return_slot and we can't do that
1518 for variables believed to be not addressable.
1520 FIXME: This check possibly can match, because values returned
1521 via return slot optimization are not believed to have address
1522 taken by alias analysis. */
1523 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
1524 if (gimple_in_ssa_p (cfun))
1526 HOST_WIDE_INT bitsize;
1527 HOST_WIDE_INT bitpos;
1528 tree offset;
1529 enum machine_mode mode;
1530 int unsignedp;
1531 int volatilep;
1532 tree base;
1533 base = get_inner_reference (return_slot, &bitsize, &bitpos,
1534 &offset,
1535 &mode, &unsignedp, &volatilep,
1536 false);
1537 if (TREE_CODE (base) == INDIRECT_REF)
1538 base = TREE_OPERAND (base, 0);
1539 if (TREE_CODE (base) == SSA_NAME)
1540 base = SSA_NAME_VAR (base);
1541 mark_sym_for_renaming (base);
1543 var = return_slot_addr;
1545 else
1547 var = return_slot;
1548 gcc_assert (TREE_CODE (var) != SSA_NAME);
1550 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1551 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1552 && !DECL_GIMPLE_REG_P (result)
1553 && DECL_P (var))
1554 DECL_GIMPLE_REG_P (var) = 0;
1555 use = NULL;
1556 goto done;
1559 /* All types requiring non-trivial constructors should have been handled. */
1560 gcc_assert (!TREE_ADDRESSABLE (callee_type));
1562 /* Attempt to avoid creating a new temporary variable. */
1563 if (modify_dest
1564 && TREE_CODE (modify_dest) != SSA_NAME)
1566 bool use_it = false;
1568 /* We can't use MODIFY_DEST if there's type promotion involved. */
1569 if (!lang_hooks.types_compatible_p (caller_type, callee_type))
1570 use_it = false;
1572 /* ??? If we're assigning to a variable sized type, then we must
1573 reuse the destination variable, because we've no good way to
1574 create variable sized temporaries at this point. */
1575 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
1576 use_it = true;
1578 /* If the callee cannot possibly modify MODIFY_DEST, then we can
1579 reuse it as the result of the call directly. Don't do this if
1580 it would promote MODIFY_DEST to addressable. */
1581 else if (TREE_ADDRESSABLE (result))
1582 use_it = false;
1583 else
1585 tree base_m = get_base_address (modify_dest);
1587 /* If the base isn't a decl, then it's a pointer, and we don't
1588 know where that's going to go. */
1589 if (!DECL_P (base_m))
1590 use_it = false;
1591 else if (is_global_var (base_m))
1592 use_it = false;
1593 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1594 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1595 && !DECL_GIMPLE_REG_P (result)
1596 && DECL_GIMPLE_REG_P (base_m))
1597 use_it = false;
1598 else if (!TREE_ADDRESSABLE (base_m))
1599 use_it = true;
1602 if (use_it)
1604 var = modify_dest;
1605 use = NULL;
1606 goto done;
1610 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
1612 var = copy_result_decl_to_var (result, id);
1613 if (gimple_in_ssa_p (cfun))
1615 get_var_ann (var);
1616 add_referenced_var (var);
1619 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1620 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
1621 = tree_cons (NULL_TREE, var,
1622 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
1624 /* Do not have the rest of GCC warn about this variable as it should
1625 not be visible to the user. */
1626 TREE_NO_WARNING (var) = 1;
1628 declare_inline_vars (id->block, var);
1630 /* Build the use expr. If the return type of the function was
1631 promoted, convert it back to the expected type. */
1632 use = var;
1633 if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
1634 use = fold_convert (caller_type, var);
1636 STRIP_USELESS_TYPE_CONVERSION (use);
1638 if (DECL_BY_REFERENCE (result))
1639 var = build_fold_addr_expr (var);
1641 done:
1642 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
1643 way, when the RESULT_DECL is encountered, it will be
1644 automatically replaced by the VAR_DECL. */
1645 insert_decl_map (id, result, var);
1647 /* Remember this so we can ignore it in remap_decls. */
1648 id->retvar = var;
1650 *use_p = use;
1651 return var;
1654 /* Returns nonzero if a function can be inlined as a tree. */
1656 bool
1657 tree_inlinable_function_p (tree fn)
1659 return inlinable_function_p (fn);
1662 static const char *inline_forbidden_reason;
1664 static tree
1665 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
1666 void *fnp)
1668 tree node = *nodep;
1669 tree fn = (tree) fnp;
1670 tree t;
1672 switch (TREE_CODE (node))
1674 case CALL_EXPR:
1675 /* Refuse to inline alloca call unless user explicitly forced so as
1676 this may change program's memory overhead drastically when the
1677 function using alloca is called in loop. In GCC present in
1678 SPEC2000 inlining into schedule_block cause it to require 2GB of
1679 RAM instead of 256MB. */
1680 if (alloca_call_p (node)
1681 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1683 inline_forbidden_reason
1684 = G_("function %q+F can never be inlined because it uses "
1685 "alloca (override using the always_inline attribute)");
1686 return node;
1688 t = get_callee_fndecl (node);
1689 if (! t)
1690 break;
1692 /* We cannot inline functions that call setjmp. */
1693 if (setjmp_call_p (t))
1695 inline_forbidden_reason
1696 = G_("function %q+F can never be inlined because it uses setjmp");
1697 return node;
1700 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
1701 switch (DECL_FUNCTION_CODE (t))
1703 /* We cannot inline functions that take a variable number of
1704 arguments. */
1705 case BUILT_IN_VA_START:
1706 case BUILT_IN_STDARG_START:
1707 case BUILT_IN_NEXT_ARG:
1708 case BUILT_IN_VA_END:
1709 inline_forbidden_reason
1710 = G_("function %q+F can never be inlined because it "
1711 "uses variable argument lists");
1712 return node;
1714 case BUILT_IN_LONGJMP:
1715 /* We can't inline functions that call __builtin_longjmp at
1716 all. The non-local goto machinery really requires the
1717 destination be in a different function. If we allow the
1718 function calling __builtin_longjmp to be inlined into the
1719 function calling __builtin_setjmp, Things will Go Awry. */
1720 inline_forbidden_reason
1721 = G_("function %q+F can never be inlined because "
1722 "it uses setjmp-longjmp exception handling");
1723 return node;
1725 case BUILT_IN_NONLOCAL_GOTO:
1726 /* Similarly. */
1727 inline_forbidden_reason
1728 = G_("function %q+F can never be inlined because "
1729 "it uses non-local goto");
1730 return node;
1732 case BUILT_IN_RETURN:
1733 case BUILT_IN_APPLY_ARGS:
1734 /* If a __builtin_apply_args caller would be inlined,
1735 it would be saving arguments of the function it has
1736 been inlined into. Similarly __builtin_return would
1737 return from the function the inline has been inlined into. */
1738 inline_forbidden_reason
1739 = G_("function %q+F can never be inlined because "
1740 "it uses __builtin_return or __builtin_apply_args");
1741 return node;
1743 default:
1744 break;
1746 break;
1748 case GOTO_EXPR:
1749 t = TREE_OPERAND (node, 0);
1751 /* We will not inline a function which uses computed goto. The
1752 addresses of its local labels, which may be tucked into
1753 global storage, are of course not constant across
1754 instantiations, which causes unexpected behavior. */
1755 if (TREE_CODE (t) != LABEL_DECL)
1757 inline_forbidden_reason
1758 = G_("function %q+F can never be inlined "
1759 "because it contains a computed goto");
1760 return node;
1762 break;
1764 case LABEL_EXPR:
1765 t = TREE_OPERAND (node, 0);
1766 if (DECL_NONLOCAL (t))
1768 /* We cannot inline a function that receives a non-local goto
1769 because we cannot remap the destination label used in the
1770 function that is performing the non-local goto. */
1771 inline_forbidden_reason
1772 = G_("function %q+F can never be inlined "
1773 "because it receives a non-local goto");
1774 return node;
1776 break;
1778 case RECORD_TYPE:
1779 case UNION_TYPE:
1780 /* We cannot inline a function of the form
1782 void F (int i) { struct S { int ar[i]; } s; }
1784 Attempting to do so produces a catch-22.
1785 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1786 UNION_TYPE nodes, then it goes into infinite recursion on a
1787 structure containing a pointer to its own type. If it doesn't,
1788 then the type node for S doesn't get adjusted properly when
1789 F is inlined.
1791 ??? This is likely no longer true, but it's too late in the 4.0
1792 cycle to try to find out. This should be checked for 4.1. */
1793 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1794 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1796 inline_forbidden_reason
1797 = G_("function %q+F can never be inlined "
1798 "because it uses variable sized variables");
1799 return node;
1802 default:
1803 break;
1806 return NULL_TREE;
1809 /* Return subexpression representing possible alloca call, if any. */
1810 static tree
1811 inline_forbidden_p (tree fndecl)
1813 location_t saved_loc = input_location;
1814 block_stmt_iterator bsi;
1815 basic_block bb;
1816 tree ret = NULL_TREE;
1818 FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (fndecl))
1819 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1821 ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
1822 inline_forbidden_p_1, fndecl);
1823 if (ret)
1824 goto egress;
1827 egress:
1828 input_location = saved_loc;
1829 return ret;
1832 /* Returns nonzero if FN is a function that does not have any
1833 fundamental inline blocking properties. */
1835 static bool
1836 inlinable_function_p (tree fn)
1838 bool inlinable = true;
1840 /* If we've already decided this function shouldn't be inlined,
1841 there's no need to check again. */
1842 if (DECL_UNINLINABLE (fn))
1843 return false;
1845 /* See if there is any language-specific reason it cannot be
1846 inlined. (It is important that this hook be called early because
1847 in C++ it may result in template instantiation.)
1848 If the function is not inlinable for language-specific reasons,
1849 it is left up to the langhook to explain why. */
1850 inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1852 /* If we don't have the function body available, we can't inline it.
1853 However, this should not be recorded since we also get here for
1854 forward declared inline functions. Therefore, return at once. */
1855 if (!DECL_SAVED_TREE (fn))
1856 return false;
1858 /* If we're not inlining at all, then we cannot inline this function. */
1859 else if (!flag_inline_trees)
1860 inlinable = false;
1862 /* Only try to inline functions if DECL_INLINE is set. This should be
1863 true for all functions declared `inline', and for all other functions
1864 as well with -finline-functions.
1866 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1867 it's the front-end that must set DECL_INLINE in this case, because
1868 dwarf2out loses if a function that does not have DECL_INLINE set is
1869 inlined anyway. That is why we have both DECL_INLINE and
1870 DECL_DECLARED_INLINE_P. */
1871 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1872 here should be redundant. */
1873 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1874 inlinable = false;
1876 else if (inline_forbidden_p (fn))
1878 /* See if we should warn about uninlinable functions. Previously,
1879 some of these warnings would be issued while trying to expand
1880 the function inline, but that would cause multiple warnings
1881 about functions that would for example call alloca. But since
1882 this a property of the function, just one warning is enough.
1883 As a bonus we can now give more details about the reason why a
1884 function is not inlinable.
1885 We only warn for functions declared `inline' by the user. */
1886 bool do_warning = (warn_inline
1887 && DECL_INLINE (fn)
1888 && DECL_DECLARED_INLINE_P (fn)
1889 && !DECL_IN_SYSTEM_HEADER (fn));
1891 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1892 sorry (inline_forbidden_reason, fn);
1893 else if (do_warning)
1894 warning (OPT_Winline, inline_forbidden_reason, fn);
1896 inlinable = false;
1899 /* Squirrel away the result so that we don't have to check again. */
1900 DECL_UNINLINABLE (fn) = !inlinable;
1902 return inlinable;
1905 /* Estimate the cost of a memory move. Use machine dependent
1906 word size and take possible memcpy call into account. */
1909 estimate_move_cost (tree type)
1911 HOST_WIDE_INT size;
1913 size = int_size_in_bytes (type);
1915 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1916 /* Cost of a memcpy call, 3 arguments and the call. */
1917 return 4;
1918 else
1919 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1922 /* Arguments for estimate_num_insns_1. */
1924 struct eni_data
1926 /* Used to return the number of insns. */
1927 int count;
1929 /* Weights of various constructs. */
1930 eni_weights *weights;
1933 /* Used by estimate_num_insns. Estimate number of instructions seen
1934 by given statement. */
1936 static tree
1937 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1939 struct eni_data *d = data;
1940 tree x = *tp;
1941 unsigned cost;
1943 if (IS_TYPE_OR_DECL_P (x))
1945 *walk_subtrees = 0;
1946 return NULL;
1948 /* Assume that constants and references counts nothing. These should
1949 be majorized by amount of operations among them we count later
1950 and are common target of CSE and similar optimizations. */
1951 else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
1952 return NULL;
1954 switch (TREE_CODE (x))
1956 /* Containers have no cost. */
1957 case TREE_LIST:
1958 case TREE_VEC:
1959 case BLOCK:
1960 case COMPONENT_REF:
1961 case BIT_FIELD_REF:
1962 case INDIRECT_REF:
1963 case ALIGN_INDIRECT_REF:
1964 case MISALIGNED_INDIRECT_REF:
1965 case ARRAY_REF:
1966 case ARRAY_RANGE_REF:
1967 case OBJ_TYPE_REF:
1968 case EXC_PTR_EXPR: /* ??? */
1969 case FILTER_EXPR: /* ??? */
1970 case COMPOUND_EXPR:
1971 case BIND_EXPR:
1972 case WITH_CLEANUP_EXPR:
1973 case NOP_EXPR:
1974 case VIEW_CONVERT_EXPR:
1975 case SAVE_EXPR:
1976 case ADDR_EXPR:
1977 case COMPLEX_EXPR:
1978 case RANGE_EXPR:
1979 case CASE_LABEL_EXPR:
1980 case SSA_NAME:
1981 case CATCH_EXPR:
1982 case EH_FILTER_EXPR:
1983 case STATEMENT_LIST:
1984 case ERROR_MARK:
1985 case NON_LVALUE_EXPR:
1986 case FDESC_EXPR:
1987 case VA_ARG_EXPR:
1988 case TRY_CATCH_EXPR:
1989 case TRY_FINALLY_EXPR:
1990 case LABEL_EXPR:
1991 case GOTO_EXPR:
1992 case RETURN_EXPR:
1993 case EXIT_EXPR:
1994 case LOOP_EXPR:
1995 case PHI_NODE:
1996 case WITH_SIZE_EXPR:
1997 case OMP_CLAUSE:
1998 case OMP_RETURN:
1999 case OMP_CONTINUE:
2000 break;
2002 /* We don't account constants for now. Assume that the cost is amortized
2003 by operations that do use them. We may re-consider this decision once
2004 we are able to optimize the tree before estimating its size and break
2005 out static initializers. */
2006 case IDENTIFIER_NODE:
2007 case INTEGER_CST:
2008 case REAL_CST:
2009 case COMPLEX_CST:
2010 case VECTOR_CST:
2011 case STRING_CST:
2012 *walk_subtrees = 0;
2013 return NULL;
2015 /* Try to estimate the cost of assignments. We have three cases to
2016 deal with:
2017 1) Simple assignments to registers;
2018 2) Stores to things that must live in memory. This includes
2019 "normal" stores to scalars, but also assignments of large
2020 structures, or constructors of big arrays;
2021 3) TARGET_EXPRs.
2023 Let us look at the first two cases, assuming we have "a = b + C":
2024 <GIMPLE_MODIFY_STMT <var_decl "a">
2025 <plus_expr <var_decl "b"> <constant C>>
2026 If "a" is a GIMPLE register, the assignment to it is free on almost
2027 any target, because "a" usually ends up in a real register. Hence
2028 the only cost of this expression comes from the PLUS_EXPR, and we
2029 can ignore the GIMPLE_MODIFY_STMT.
2030 If "a" is not a GIMPLE register, the assignment to "a" will most
2031 likely be a real store, so the cost of the GIMPLE_MODIFY_STMT is the cost
2032 of moving something into "a", which we compute using the function
2033 estimate_move_cost.
2035 The third case deals with TARGET_EXPRs, for which the semantics are
2036 that a temporary is assigned, unless the TARGET_EXPR itself is being
2037 assigned to something else. In the latter case we do not need the
2038 temporary. E.g. in:
2039 <GIMPLE_MODIFY_STMT <var_decl "a"> <target_expr>>, the
2040 GIMPLE_MODIFY_STMT is free. */
2041 case INIT_EXPR:
2042 case GIMPLE_MODIFY_STMT:
2043 /* Is the right and side a TARGET_EXPR? */
2044 if (TREE_CODE (GENERIC_TREE_OPERAND (x, 1)) == TARGET_EXPR)
2045 break;
2046 /* ... fall through ... */
2048 case TARGET_EXPR:
2049 x = GENERIC_TREE_OPERAND (x, 0);
2050 /* Is this an assignments to a register? */
2051 if (is_gimple_reg (x))
2052 break;
2053 /* Otherwise it's a store, so fall through to compute the move cost. */
2055 case CONSTRUCTOR:
2056 d->count += estimate_move_cost (TREE_TYPE (x));
2057 break;
2059 /* Assign cost of 1 to usual operations.
2060 ??? We may consider mapping RTL costs to this. */
2061 case COND_EXPR:
2062 case VEC_COND_EXPR:
2064 case PLUS_EXPR:
2065 case MINUS_EXPR:
2066 case MULT_EXPR:
2068 case FIX_TRUNC_EXPR:
2070 case NEGATE_EXPR:
2071 case FLOAT_EXPR:
2072 case MIN_EXPR:
2073 case MAX_EXPR:
2074 case ABS_EXPR:
2076 case LSHIFT_EXPR:
2077 case RSHIFT_EXPR:
2078 case LROTATE_EXPR:
2079 case RROTATE_EXPR:
2080 case VEC_LSHIFT_EXPR:
2081 case VEC_RSHIFT_EXPR:
2083 case BIT_IOR_EXPR:
2084 case BIT_XOR_EXPR:
2085 case BIT_AND_EXPR:
2086 case BIT_NOT_EXPR:
2088 case TRUTH_ANDIF_EXPR:
2089 case TRUTH_ORIF_EXPR:
2090 case TRUTH_AND_EXPR:
2091 case TRUTH_OR_EXPR:
2092 case TRUTH_XOR_EXPR:
2093 case TRUTH_NOT_EXPR:
2095 case LT_EXPR:
2096 case LE_EXPR:
2097 case GT_EXPR:
2098 case GE_EXPR:
2099 case EQ_EXPR:
2100 case NE_EXPR:
2101 case ORDERED_EXPR:
2102 case UNORDERED_EXPR:
2104 case UNLT_EXPR:
2105 case UNLE_EXPR:
2106 case UNGT_EXPR:
2107 case UNGE_EXPR:
2108 case UNEQ_EXPR:
2109 case LTGT_EXPR:
2111 case CONVERT_EXPR:
2113 case CONJ_EXPR:
2115 case PREDECREMENT_EXPR:
2116 case PREINCREMENT_EXPR:
2117 case POSTDECREMENT_EXPR:
2118 case POSTINCREMENT_EXPR:
2120 case ASM_EXPR:
2122 case REALIGN_LOAD_EXPR:
2124 case REDUC_MAX_EXPR:
2125 case REDUC_MIN_EXPR:
2126 case REDUC_PLUS_EXPR:
2127 case WIDEN_SUM_EXPR:
2128 case DOT_PROD_EXPR:
2129 case VEC_WIDEN_MULT_HI_EXPR:
2130 case VEC_WIDEN_MULT_LO_EXPR:
2131 case VEC_UNPACK_HI_EXPR:
2132 case VEC_UNPACK_LO_EXPR:
2133 case VEC_PACK_MOD_EXPR:
2134 case VEC_PACK_SAT_EXPR:
2136 case WIDEN_MULT_EXPR:
2138 case VEC_EXTRACT_EVEN_EXPR:
2139 case VEC_EXTRACT_ODD_EXPR:
2140 case VEC_INTERLEAVE_HIGH_EXPR:
2141 case VEC_INTERLEAVE_LOW_EXPR:
2143 case RESX_EXPR:
2144 d->count += 1;
2145 break;
2147 case SWITCH_EXPR:
2148 /* TODO: Cost of a switch should be derived from the number of
2149 branches. */
2150 d->count += d->weights->switch_cost;
2151 break;
2153 /* Few special cases of expensive operations. This is useful
2154 to avoid inlining on functions having too many of these. */
2155 case TRUNC_DIV_EXPR:
2156 case CEIL_DIV_EXPR:
2157 case FLOOR_DIV_EXPR:
2158 case ROUND_DIV_EXPR:
2159 case EXACT_DIV_EXPR:
2160 case TRUNC_MOD_EXPR:
2161 case CEIL_MOD_EXPR:
2162 case FLOOR_MOD_EXPR:
2163 case ROUND_MOD_EXPR:
2164 case RDIV_EXPR:
2165 d->count += d->weights->div_mod_cost;
2166 break;
2167 case CALL_EXPR:
2169 tree decl = get_callee_fndecl (x);
2170 tree arg;
2172 cost = d->weights->call_cost;
2173 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
2174 switch (DECL_FUNCTION_CODE (decl))
2176 case BUILT_IN_CONSTANT_P:
2177 *walk_subtrees = 0;
2178 return NULL_TREE;
2179 case BUILT_IN_EXPECT:
2180 return NULL_TREE;
2181 /* Prefetch instruction is not expensive. */
2182 case BUILT_IN_PREFETCH:
2183 cost = 1;
2184 break;
2185 default:
2186 break;
2189 /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
2190 that does use function declaration to figure out the arguments. */
2191 if (!decl)
2193 for (arg = TREE_OPERAND (x, 1); arg; arg = TREE_CHAIN (arg))
2194 d->count += estimate_move_cost (TREE_TYPE (TREE_VALUE (arg)));
2196 else
2198 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2199 d->count += estimate_move_cost (TREE_TYPE (arg));
2202 d->count += cost;
2203 break;
2206 case OMP_PARALLEL:
2207 case OMP_FOR:
2208 case OMP_SECTIONS:
2209 case OMP_SINGLE:
2210 case OMP_SECTION:
2211 case OMP_MASTER:
2212 case OMP_ORDERED:
2213 case OMP_CRITICAL:
2214 case OMP_ATOMIC:
2215 /* OpenMP directives are generally very expensive. */
2216 d->count += d->weights->omp_cost;
2217 break;
2219 default:
2220 gcc_unreachable ();
2222 return NULL;
2225 /* Estimate number of instructions that will be created by expanding EXPR.
2226 WEIGHTS contains weights attributed to various constructs. */
2229 estimate_num_insns (tree expr, eni_weights *weights)
2231 struct pointer_set_t *visited_nodes;
2232 basic_block bb;
2233 block_stmt_iterator bsi;
2234 struct function *my_function;
2235 struct eni_data data;
2237 data.count = 0;
2238 data.weights = weights;
2240 /* If we're given an entire function, walk the CFG. */
2241 if (TREE_CODE (expr) == FUNCTION_DECL)
2243 my_function = DECL_STRUCT_FUNCTION (expr);
2244 gcc_assert (my_function && my_function->cfg);
2245 visited_nodes = pointer_set_create ();
2246 FOR_EACH_BB_FN (bb, my_function)
2248 for (bsi = bsi_start (bb);
2249 !bsi_end_p (bsi);
2250 bsi_next (&bsi))
2252 walk_tree (bsi_stmt_ptr (bsi), estimate_num_insns_1,
2253 &data, visited_nodes);
2256 pointer_set_destroy (visited_nodes);
2258 else
2259 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &data);
2261 return data.count;
2264 /* Initializes weights used by estimate_num_insns. */
2266 void
2267 init_inline_once (void)
2269 eni_inlining_weights.call_cost = PARAM_VALUE (PARAM_INLINE_CALL_COST);
2270 eni_inlining_weights.div_mod_cost = 10;
2271 eni_inlining_weights.switch_cost = 1;
2272 eni_inlining_weights.omp_cost = 40;
2274 eni_size_weights.call_cost = 1;
2275 eni_size_weights.div_mod_cost = 1;
2276 eni_size_weights.switch_cost = 10;
2277 eni_size_weights.omp_cost = 40;
2279 /* Estimating time for call is difficult, since we have no idea what the
2280 called function does. In the current uses of eni_time_weights,
2281 underestimating the cost does less harm than overestimating it, so
2282 we choose a rather small walue here. */
2283 eni_time_weights.call_cost = 10;
2284 eni_time_weights.div_mod_cost = 10;
2285 eni_time_weights.switch_cost = 4;
2286 eni_time_weights.omp_cost = 40;
2289 typedef struct function *function_p;
2291 DEF_VEC_P(function_p);
2292 DEF_VEC_ALLOC_P(function_p,heap);
2294 /* Initialized with NOGC, making this poisonous to the garbage collector. */
2295 static VEC(function_p,heap) *cfun_stack;
2297 void
2298 push_cfun (struct function *new_cfun)
2300 VEC_safe_push (function_p, heap, cfun_stack, cfun);
2301 cfun = new_cfun;
2304 void
2305 pop_cfun (void)
2307 cfun = VEC_pop (function_p, cfun_stack);
2310 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
2311 static void
2312 add_lexical_block (tree current_block, tree new_block)
2314 tree *blk_p;
2316 /* Walk to the last sub-block. */
2317 for (blk_p = &BLOCK_SUBBLOCKS (current_block);
2318 *blk_p;
2319 blk_p = &TREE_CHAIN (*blk_p))
2321 *blk_p = new_block;
2322 BLOCK_SUPERCONTEXT (new_block) = current_block;
2325 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
2327 static bool
2328 expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
2330 copy_body_data *id;
2331 tree t;
2332 tree use_retvar;
2333 tree fn;
2334 splay_tree st;
2335 tree args;
2336 tree return_slot;
2337 tree modify_dest;
2338 location_t saved_location;
2339 struct cgraph_edge *cg_edge;
2340 const char *reason;
2341 basic_block return_block;
2342 edge e;
2343 block_stmt_iterator bsi, stmt_bsi;
2344 bool successfully_inlined = FALSE;
2345 bool purge_dead_abnormal_edges;
2346 tree t_step;
2347 tree var;
2349 /* See what we've got. */
2350 id = (copy_body_data *) data;
2351 t = *tp;
2353 /* Set input_location here so we get the right instantiation context
2354 if we call instantiate_decl from inlinable_function_p. */
2355 saved_location = input_location;
2356 if (EXPR_HAS_LOCATION (t))
2357 input_location = EXPR_LOCATION (t);
2359 /* From here on, we're only interested in CALL_EXPRs. */
2360 if (TREE_CODE (t) != CALL_EXPR)
2361 goto egress;
2363 /* First, see if we can figure out what function is being called.
2364 If we cannot, then there is no hope of inlining the function. */
2365 fn = get_callee_fndecl (t);
2366 if (!fn)
2367 goto egress;
2369 /* Turn forward declarations into real ones. */
2370 fn = cgraph_node (fn)->decl;
2372 /* If fn is a declaration of a function in a nested scope that was
2373 globally declared inline, we don't set its DECL_INITIAL.
2374 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
2375 C++ front-end uses it for cdtors to refer to their internal
2376 declarations, that are not real functions. Fortunately those
2377 don't have trees to be saved, so we can tell by checking their
2378 DECL_SAVED_TREE. */
2379 if (! DECL_INITIAL (fn)
2380 && DECL_ABSTRACT_ORIGIN (fn)
2381 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
2382 fn = DECL_ABSTRACT_ORIGIN (fn);
2384 /* Objective C and fortran still calls tree_rest_of_compilation directly.
2385 Kill this check once this is fixed. */
2386 if (!id->dst_node->analyzed)
2387 goto egress;
2389 cg_edge = cgraph_edge (id->dst_node, stmt);
2391 /* Constant propagation on argument done during previous inlining
2392 may create new direct call. Produce an edge for it. */
2393 if (!cg_edge)
2395 struct cgraph_node *dest = cgraph_node (fn);
2397 /* We have missing edge in the callgraph. This can happen in one case
2398 where previous inlining turned indirect call into direct call by
2399 constant propagating arguments. In all other cases we hit a bug
2400 (incorrect node sharing is most common reason for missing edges. */
2401 gcc_assert (dest->needed || !flag_unit_at_a_time);
2402 cgraph_create_edge (id->dst_node, dest, stmt,
2403 bb->count, bb->loop_depth)->inline_failed
2404 = N_("originally indirect function call not considered for inlining");
2405 goto egress;
2408 /* Don't try to inline functions that are not well-suited to
2409 inlining. */
2410 if (!cgraph_inline_p (cg_edge, &reason))
2412 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
2413 /* Avoid warnings during early inline pass. */
2414 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2416 sorry ("inlining failed in call to %q+F: %s", fn, reason);
2417 sorry ("called from here");
2419 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
2420 && !DECL_IN_SYSTEM_HEADER (fn)
2421 && strlen (reason)
2422 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
2423 /* Avoid warnings during early inline pass. */
2424 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2426 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
2427 fn, reason);
2428 warning (OPT_Winline, "called from here");
2430 goto egress;
2432 fn = cg_edge->callee->decl;
2434 #ifdef ENABLE_CHECKING
2435 if (cg_edge->callee->decl != id->dst_node->decl)
2436 verify_cgraph_node (cg_edge->callee);
2437 #endif
2439 /* We will be inlining this callee. */
2440 id->eh_region = lookup_stmt_eh_region (stmt);
2442 /* Split the block holding the CALL_EXPR. */
2443 e = split_block (bb, stmt);
2444 bb = e->src;
2445 return_block = e->dest;
2446 remove_edge (e);
2448 /* split_block splits after the statement; work around this by
2449 moving the call into the second block manually. Not pretty,
2450 but seems easier than doing the CFG manipulation by hand
2451 when the CALL_EXPR is in the last statement of BB. */
2452 stmt_bsi = bsi_last (bb);
2453 bsi_remove (&stmt_bsi, false);
2455 /* If the CALL_EXPR was in the last statement of BB, it may have
2456 been the source of abnormal edges. In this case, schedule
2457 the removal of dead abnormal edges. */
2458 bsi = bsi_start (return_block);
2459 if (bsi_end_p (bsi))
2461 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2462 purge_dead_abnormal_edges = true;
2464 else
2466 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
2467 purge_dead_abnormal_edges = false;
2470 stmt_bsi = bsi_start (return_block);
2472 /* Build a block containing code to initialize the arguments, the
2473 actual inline expansion of the body, and a label for the return
2474 statements within the function to jump to. The type of the
2475 statement expression is the return type of the function call. */
2476 id->block = make_node (BLOCK);
2477 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
2478 BLOCK_SOURCE_LOCATION (id->block) = input_location;
2479 add_lexical_block (TREE_BLOCK (stmt), id->block);
2481 /* Local declarations will be replaced by their equivalents in this
2482 map. */
2483 st = id->decl_map;
2484 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
2485 NULL, NULL);
2487 /* Initialize the parameters. */
2488 args = TREE_OPERAND (t, 1);
2490 /* Record the function we are about to inline. */
2491 id->src_fn = fn;
2492 id->src_node = cg_edge->callee;
2493 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
2495 initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2), fn, bb);
2497 if (DECL_INITIAL (fn))
2498 add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
2500 /* Return statements in the function body will be replaced by jumps
2501 to the RET_LABEL. */
2503 gcc_assert (DECL_INITIAL (fn));
2504 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
2506 /* Find the lhs to which the result of this call is assigned. */
2507 return_slot = NULL;
2508 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
2510 modify_dest = GIMPLE_STMT_OPERAND (stmt, 0);
2512 /* The function which we are inlining might not return a value,
2513 in which case we should issue a warning that the function
2514 does not return a value. In that case the optimizers will
2515 see that the variable to which the value is assigned was not
2516 initialized. We do not want to issue a warning about that
2517 uninitialized variable. */
2518 if (DECL_P (modify_dest))
2519 TREE_NO_WARNING (modify_dest) = 1;
2520 if (CALL_EXPR_RETURN_SLOT_OPT (t))
2522 return_slot = modify_dest;
2523 modify_dest = NULL;
2526 else
2527 modify_dest = NULL;
2529 /* Declare the return variable for the function. */
2530 declare_return_variable (id, return_slot,
2531 modify_dest, &use_retvar);
2533 /* This is it. Duplicate the callee body. Assume callee is
2534 pre-gimplified. Note that we must not alter the caller
2535 function in any way before this point, as this CALL_EXPR may be
2536 a self-referential call; if we're calling ourselves, we need to
2537 duplicate our body before altering anything. */
2538 copy_body (id, bb->count, bb->frequency, bb, return_block);
2540 /* Add local vars in this inlined callee to caller. */
2541 t_step = id->src_cfun->unexpanded_var_list;
2542 for (; t_step; t_step = TREE_CHAIN (t_step))
2544 var = TREE_VALUE (t_step);
2545 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2546 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2547 cfun->unexpanded_var_list);
2548 else
2549 cfun->unexpanded_var_list = tree_cons (NULL_TREE, remap_decl (var, id),
2550 cfun->unexpanded_var_list);
2553 /* Clean up. */
2554 splay_tree_delete (id->decl_map);
2555 id->decl_map = st;
2557 /* If the inlined function returns a result that we care about,
2558 clobber the CALL_EXPR with a reference to the return variable. */
2559 if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
2561 *tp = use_retvar;
2562 if (gimple_in_ssa_p (cfun))
2564 update_stmt (stmt);
2565 mark_symbols_for_renaming (stmt);
2567 maybe_clean_or_replace_eh_stmt (stmt, stmt);
2569 else
2570 /* We're modifying a TSI owned by gimple_expand_calls_inline();
2571 tsi_delink() will leave the iterator in a sane state. */
2573 /* Handle case of inlining function that miss return statement so
2574 return value becomes undefined. */
2575 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
2576 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME)
2578 tree name = TREE_OPERAND (stmt, 0);
2579 tree var = SSA_NAME_VAR (TREE_OPERAND (stmt, 0));
2580 tree def = gimple_default_def (cfun, var);
2582 /* If the variable is used undefined, make this name undefined via
2583 move. */
2584 if (def)
2586 TREE_OPERAND (stmt, 1) = def;
2587 update_stmt (stmt);
2589 /* Otherwise make this variable undefined. */
2590 else
2592 bsi_remove (&stmt_bsi, true);
2593 set_default_def (var, name);
2594 SSA_NAME_DEF_STMT (name) = build_empty_stmt ();
2597 else
2598 bsi_remove (&stmt_bsi, true);
2601 if (purge_dead_abnormal_edges)
2602 tree_purge_dead_abnormal_call_edges (return_block);
2604 /* If the value of the new expression is ignored, that's OK. We
2605 don't warn about this for CALL_EXPRs, so we shouldn't warn about
2606 the equivalent inlined version either. */
2607 TREE_USED (*tp) = 1;
2609 /* Output the inlining info for this abstract function, since it has been
2610 inlined. If we don't do this now, we can lose the information about the
2611 variables in the function when the blocks get blown away as soon as we
2612 remove the cgraph node. */
2613 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
2615 /* Update callgraph if needed. */
2616 cgraph_remove_node (cg_edge->callee);
2618 id->block = NULL_TREE;
2619 successfully_inlined = TRUE;
2621 egress:
2622 input_location = saved_location;
2623 return successfully_inlined;
2626 /* Expand call statements reachable from STMT_P.
2627 We can only have CALL_EXPRs as the "toplevel" tree code or nested
2628 in a GIMPLE_MODIFY_STMT. See tree-gimple.c:get_call_expr_in(). We can
2629 unfortunately not use that function here because we need a pointer
2630 to the CALL_EXPR, not the tree itself. */
2632 static bool
2633 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
2635 block_stmt_iterator bsi;
2637 /* Register specific tree functions. */
2638 tree_register_cfg_hooks ();
2639 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2641 tree *expr_p = bsi_stmt_ptr (bsi);
2642 tree stmt = *expr_p;
2644 if (TREE_CODE (*expr_p) == GIMPLE_MODIFY_STMT)
2645 expr_p = &GIMPLE_STMT_OPERAND (*expr_p, 1);
2646 if (TREE_CODE (*expr_p) == WITH_SIZE_EXPR)
2647 expr_p = &TREE_OPERAND (*expr_p, 0);
2648 if (TREE_CODE (*expr_p) == CALL_EXPR)
2649 if (expand_call_inline (bb, stmt, expr_p, id))
2650 return true;
2652 return false;
2655 /* Walk all basic blocks created after FIRST and try to fold every statement
2656 in the STATEMENTS pointer set. */
2657 static void
2658 fold_marked_statements (int first, struct pointer_set_t *statements)
2660 for (;first < n_basic_blocks;first++)
2661 if (BASIC_BLOCK (first))
2663 block_stmt_iterator bsi;
2664 for (bsi = bsi_start (BASIC_BLOCK (first));
2665 !bsi_end_p (bsi); bsi_next (&bsi))
2666 if (pointer_set_contains (statements, bsi_stmt (bsi)))
2668 tree old_stmt = bsi_stmt (bsi);
2669 if (fold_stmt (bsi_stmt_ptr (bsi)))
2671 update_stmt (bsi_stmt (bsi));
2672 if (maybe_clean_or_replace_eh_stmt (old_stmt, bsi_stmt (bsi)))
2673 tree_purge_dead_eh_edges (BASIC_BLOCK (first));
2679 /* Return true if BB has at least one abnormal outgoing edge. */
2681 static inline bool
2682 has_abnormal_outgoing_edge_p (basic_block bb)
2684 edge e;
2685 edge_iterator ei;
2687 FOR_EACH_EDGE (e, ei, bb->succs)
2688 if (e->flags & EDGE_ABNORMAL)
2689 return true;
2691 return false;
2694 /* When a block from the inlined function contains a call with side-effects
2695 in the middle gets inlined in a function with non-locals labels, the call
2696 becomes a potential non-local goto so we need to add appropriate edge. */
2698 static void
2699 make_nonlocal_label_edges (void)
2701 block_stmt_iterator bsi;
2702 basic_block bb;
2704 FOR_EACH_BB (bb)
2706 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2708 tree stmt = bsi_stmt (bsi);
2709 if (tree_can_make_abnormal_goto (stmt))
2711 if (stmt == bsi_stmt (bsi_last (bb)))
2713 if (!has_abnormal_outgoing_edge_p (bb))
2714 make_abnormal_goto_edges (bb, true);
2716 else
2718 edge e = split_block (bb, stmt);
2719 bb = e->src;
2720 make_abnormal_goto_edges (bb, true);
2722 break;
2725 /* Update PHIs on nonlocal goto receivers we (possibly)
2726 just created new edges into. */
2727 if (TREE_CODE (stmt) == LABEL_EXPR
2728 && gimple_in_ssa_p (cfun))
2730 tree target = LABEL_EXPR_LABEL (stmt);
2731 if (DECL_NONLOCAL (target))
2733 tree phi;
2735 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2737 gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
2738 (PHI_RESULT (phi)));
2739 mark_sym_for_renaming
2740 (SSA_NAME_VAR (PHI_RESULT (phi)));
2748 /* Expand calls to inline functions in the body of FN. */
2750 unsigned int
2751 optimize_inline_calls (tree fn)
2753 copy_body_data id;
2754 tree prev_fn;
2755 basic_block bb;
2756 int last = n_basic_blocks;
2757 /* There is no point in performing inlining if errors have already
2758 occurred -- and we might crash if we try to inline invalid
2759 code. */
2760 if (errorcount || sorrycount)
2761 return 0;
2763 /* Clear out ID. */
2764 memset (&id, 0, sizeof (id));
2766 id.src_node = id.dst_node = cgraph_node (fn);
2767 id.dst_fn = fn;
2768 /* Or any functions that aren't finished yet. */
2769 prev_fn = NULL_TREE;
2770 if (current_function_decl)
2772 id.dst_fn = current_function_decl;
2773 prev_fn = current_function_decl;
2776 id.copy_decl = copy_decl_maybe_to_var;
2777 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2778 id.transform_new_cfg = false;
2779 id.transform_return_to_modify = true;
2780 id.transform_lang_insert_block = false;
2781 id.statements_to_fold = pointer_set_create ();
2783 push_gimplify_context ();
2785 /* Reach the trees by walking over the CFG, and note the
2786 enclosing basic-blocks in the call edges. */
2787 /* We walk the blocks going forward, because inlined function bodies
2788 will split id->current_basic_block, and the new blocks will
2789 follow it; we'll trudge through them, processing their CALL_EXPRs
2790 along the way. */
2791 FOR_EACH_BB (bb)
2792 gimple_expand_calls_inline (bb, &id);
2794 pop_gimplify_context (NULL);
2795 /* Renumber the (code) basic_blocks consecutively. */
2796 compact_blocks ();
2797 /* Renumber the lexical scoping (non-code) blocks consecutively. */
2798 number_blocks (fn);
2800 #ifdef ENABLE_CHECKING
2802 struct cgraph_edge *e;
2804 verify_cgraph_node (id.dst_node);
2806 /* Double check that we inlined everything we are supposed to inline. */
2807 for (e = id.dst_node->callees; e; e = e->next_callee)
2808 gcc_assert (e->inline_failed);
2810 #endif
2811 /* We need to rescale frequencies again to peak at REG_BR_PROB_BASE
2812 as inlining loops might increase the maximum. */
2813 if (ENTRY_BLOCK_PTR->count)
2814 counts_to_freqs ();
2816 /* We are not going to maintain the cgraph edges up to date.
2817 Kill it so it won't confuse us. */
2818 cgraph_node_remove_callees (id.dst_node);
2820 fold_marked_statements (last, id.statements_to_fold);
2821 pointer_set_destroy (id.statements_to_fold);
2822 fold_cond_expr_cond ();
2823 if (current_function_has_nonlocal_label)
2824 make_nonlocal_label_edges ();
2825 /* We make no attempts to keep dominance info up-to-date. */
2826 free_dominance_info (CDI_DOMINATORS);
2827 free_dominance_info (CDI_POST_DOMINATORS);
2828 /* It would be nice to check SSA/CFG/statement consistency here, but it is
2829 not possible yet - the IPA passes might make various functions to not
2830 throw and they don't care to proactively update local EH info. This is
2831 done later in fixup_cfg pass that also execute the verification. */
2832 return (TODO_update_ssa | TODO_cleanup_cfg
2833 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0));
2836 /* FN is a function that has a complete body, and CLONE is a function whose
2837 body is to be set to a copy of FN, mapping argument declarations according
2838 to the ARG_MAP splay_tree. */
2840 void
2841 clone_body (tree clone, tree fn, void *arg_map)
2843 copy_body_data id;
2845 /* Clone the body, as if we were making an inline call. But, remap the
2846 parameters in the callee to the parameters of caller. */
2847 memset (&id, 0, sizeof (id));
2848 id.src_fn = fn;
2849 id.dst_fn = clone;
2850 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
2851 id.decl_map = (splay_tree)arg_map;
2853 id.copy_decl = copy_decl_no_change;
2854 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2855 id.transform_new_cfg = true;
2856 id.transform_return_to_modify = false;
2857 id.transform_lang_insert_block = true;
2859 /* We're not inside any EH region. */
2860 id.eh_region = -1;
2862 /* Actually copy the body. */
2863 append_to_statement_list_force (copy_generic_body (&id), &DECL_SAVED_TREE (clone));
2866 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2868 tree
2869 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2871 enum tree_code code = TREE_CODE (*tp);
2872 enum tree_code_class cl = TREE_CODE_CLASS (code);
2874 /* We make copies of most nodes. */
2875 if (IS_EXPR_CODE_CLASS (cl)
2876 || IS_GIMPLE_STMT_CODE_CLASS (cl)
2877 || code == TREE_LIST
2878 || code == TREE_VEC
2879 || code == TYPE_DECL
2880 || code == OMP_CLAUSE)
2882 /* Because the chain gets clobbered when we make a copy, we save it
2883 here. */
2884 tree chain = NULL_TREE, new;
2886 if (!GIMPLE_TUPLE_P (*tp))
2887 chain = TREE_CHAIN (*tp);
2889 /* Copy the node. */
2890 new = copy_node (*tp);
2892 /* Propagate mudflap marked-ness. */
2893 if (flag_mudflap && mf_marked_p (*tp))
2894 mf_mark (new);
2896 *tp = new;
2898 /* Now, restore the chain, if appropriate. That will cause
2899 walk_tree to walk into the chain as well. */
2900 if (code == PARM_DECL
2901 || code == TREE_LIST
2902 || code == OMP_CLAUSE)
2903 TREE_CHAIN (*tp) = chain;
2905 /* For now, we don't update BLOCKs when we make copies. So, we
2906 have to nullify all BIND_EXPRs. */
2907 if (TREE_CODE (*tp) == BIND_EXPR)
2908 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2910 else if (code == CONSTRUCTOR)
2912 /* CONSTRUCTOR nodes need special handling because
2913 we need to duplicate the vector of elements. */
2914 tree new;
2916 new = copy_node (*tp);
2918 /* Propagate mudflap marked-ness. */
2919 if (flag_mudflap && mf_marked_p (*tp))
2920 mf_mark (new);
2922 CONSTRUCTOR_ELTS (new) = VEC_copy (constructor_elt, gc,
2923 CONSTRUCTOR_ELTS (*tp));
2924 *tp = new;
2926 else if (TREE_CODE_CLASS (code) == tcc_type)
2927 *walk_subtrees = 0;
2928 else if (TREE_CODE_CLASS (code) == tcc_declaration)
2929 *walk_subtrees = 0;
2930 else if (TREE_CODE_CLASS (code) == tcc_constant)
2931 *walk_subtrees = 0;
2932 else
2933 gcc_assert (code != STATEMENT_LIST);
2934 return NULL_TREE;
2937 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2938 information indicating to what new SAVE_EXPR this one should be mapped,
2939 use that one. Otherwise, create a new node and enter it in ST. FN is
2940 the function into which the copy will be placed. */
2942 static void
2943 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2945 splay_tree st = (splay_tree) st_;
2946 splay_tree_node n;
2947 tree t;
2949 /* See if we already encountered this SAVE_EXPR. */
2950 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2952 /* If we didn't already remap this SAVE_EXPR, do so now. */
2953 if (!n)
2955 t = copy_node (*tp);
2957 /* Remember this SAVE_EXPR. */
2958 splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2959 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2960 splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2962 else
2964 /* We've already walked into this SAVE_EXPR; don't do it again. */
2965 *walk_subtrees = 0;
2966 t = (tree) n->value;
2969 /* Replace this SAVE_EXPR with the copy. */
2970 *tp = t;
2973 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
2974 copies the declaration and enters it in the splay_tree in DATA (which is
2975 really an `copy_body_data *'). */
2977 static tree
2978 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2979 void *data)
2981 copy_body_data *id = (copy_body_data *) data;
2983 /* Don't walk into types. */
2984 if (TYPE_P (*tp))
2985 *walk_subtrees = 0;
2987 else if (TREE_CODE (*tp) == LABEL_EXPR)
2989 tree decl = TREE_OPERAND (*tp, 0);
2991 /* Copy the decl and remember the copy. */
2992 insert_decl_map (id, decl, id->copy_decl (decl, id));
2995 return NULL_TREE;
2998 /* Perform any modifications to EXPR required when it is unsaved. Does
2999 not recurse into EXPR's subtrees. */
3001 static void
3002 unsave_expr_1 (tree expr)
3004 switch (TREE_CODE (expr))
3006 case TARGET_EXPR:
3007 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
3008 It's OK for this to happen if it was part of a subtree that
3009 isn't immediately expanded, such as operand 2 of another
3010 TARGET_EXPR. */
3011 if (TREE_OPERAND (expr, 1))
3012 break;
3014 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
3015 TREE_OPERAND (expr, 3) = NULL_TREE;
3016 break;
3018 default:
3019 break;
3023 /* Called via walk_tree when an expression is unsaved. Using the
3024 splay_tree pointed to by ST (which is really a `splay_tree'),
3025 remaps all local declarations to appropriate replacements. */
3027 static tree
3028 unsave_r (tree *tp, int *walk_subtrees, void *data)
3030 copy_body_data *id = (copy_body_data *) data;
3031 splay_tree st = id->decl_map;
3032 splay_tree_node n;
3034 /* Only a local declaration (variable or label). */
3035 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
3036 || TREE_CODE (*tp) == LABEL_DECL)
3038 /* Lookup the declaration. */
3039 n = splay_tree_lookup (st, (splay_tree_key) *tp);
3041 /* If it's there, remap it. */
3042 if (n)
3043 *tp = (tree) n->value;
3046 else if (TREE_CODE (*tp) == STATEMENT_LIST)
3047 copy_statement_list (tp);
3048 else if (TREE_CODE (*tp) == BIND_EXPR)
3049 copy_bind_expr (tp, walk_subtrees, id);
3050 else if (TREE_CODE (*tp) == SAVE_EXPR)
3051 remap_save_expr (tp, st, walk_subtrees);
3052 else
3054 copy_tree_r (tp, walk_subtrees, NULL);
3056 /* Do whatever unsaving is required. */
3057 unsave_expr_1 (*tp);
3060 /* Keep iterating. */
3061 return NULL_TREE;
3064 /* Copies everything in EXPR and replaces variables, labels
3065 and SAVE_EXPRs local to EXPR. */
3067 tree
3068 unsave_expr_now (tree expr)
3070 copy_body_data id;
3072 /* There's nothing to do for NULL_TREE. */
3073 if (expr == 0)
3074 return expr;
3076 /* Set up ID. */
3077 memset (&id, 0, sizeof (id));
3078 id.src_fn = current_function_decl;
3079 id.dst_fn = current_function_decl;
3080 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
3082 id.copy_decl = copy_decl_no_change;
3083 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3084 id.transform_new_cfg = false;
3085 id.transform_return_to_modify = false;
3086 id.transform_lang_insert_block = false;
3088 /* Walk the tree once to find local labels. */
3089 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
3091 /* Walk the tree again, copying, remapping, and unsaving. */
3092 walk_tree (&expr, unsave_r, &id, NULL);
3094 /* Clean up. */
3095 splay_tree_delete (id.decl_map);
3097 return expr;
3100 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
3102 static tree
3103 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
3105 if (*tp == data)
3106 return (tree) data;
3107 else
3108 return NULL;
3111 bool
3112 debug_find_tree (tree top, tree search)
3114 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
3118 /* Declare the variables created by the inliner. Add all the variables in
3119 VARS to BIND_EXPR. */
3121 static void
3122 declare_inline_vars (tree block, tree vars)
3124 tree t;
3125 for (t = vars; t; t = TREE_CHAIN (t))
3127 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
3128 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
3129 cfun->unexpanded_var_list =
3130 tree_cons (NULL_TREE, t,
3131 cfun->unexpanded_var_list);
3134 if (block)
3135 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
3139 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
3140 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
3141 VAR_DECL translation. */
3143 static tree
3144 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
3146 /* Don't generate debug information for the copy if we wouldn't have
3147 generated it for the copy either. */
3148 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
3149 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
3151 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
3152 declaration inspired this copy. */
3153 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
3155 /* The new variable/label has no RTL, yet. */
3156 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
3157 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
3158 SET_DECL_RTL (copy, NULL_RTX);
3160 /* These args would always appear unused, if not for this. */
3161 TREE_USED (copy) = 1;
3163 /* Set the context for the new declaration. */
3164 if (!DECL_CONTEXT (decl))
3165 /* Globals stay global. */
3167 else if (DECL_CONTEXT (decl) != id->src_fn)
3168 /* Things that weren't in the scope of the function we're inlining
3169 from aren't in the scope we're inlining to, either. */
3171 else if (TREE_STATIC (decl))
3172 /* Function-scoped static variables should stay in the original
3173 function. */
3175 else
3176 /* Ordinary automatic local variables are now in the scope of the
3177 new function. */
3178 DECL_CONTEXT (copy) = id->dst_fn;
3180 return copy;
3183 static tree
3184 copy_decl_to_var (tree decl, copy_body_data *id)
3186 tree copy, type;
3188 gcc_assert (TREE_CODE (decl) == PARM_DECL
3189 || TREE_CODE (decl) == RESULT_DECL);
3191 type = TREE_TYPE (decl);
3193 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
3194 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
3195 TREE_READONLY (copy) = TREE_READONLY (decl);
3196 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
3197 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
3199 return copy_decl_for_dup_finish (id, decl, copy);
3202 /* Like copy_decl_to_var, but create a return slot object instead of a
3203 pointer variable for return by invisible reference. */
3205 static tree
3206 copy_result_decl_to_var (tree decl, copy_body_data *id)
3208 tree copy, type;
3210 gcc_assert (TREE_CODE (decl) == PARM_DECL
3211 || TREE_CODE (decl) == RESULT_DECL);
3213 type = TREE_TYPE (decl);
3214 if (DECL_BY_REFERENCE (decl))
3215 type = TREE_TYPE (type);
3217 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
3218 TREE_READONLY (copy) = TREE_READONLY (decl);
3219 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
3220 if (!DECL_BY_REFERENCE (decl))
3222 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
3223 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
3226 return copy_decl_for_dup_finish (id, decl, copy);
3230 static tree
3231 copy_decl_no_change (tree decl, copy_body_data *id)
3233 tree copy;
3235 copy = copy_node (decl);
3237 /* The COPY is not abstract; it will be generated in DST_FN. */
3238 DECL_ABSTRACT (copy) = 0;
3239 lang_hooks.dup_lang_specific_decl (copy);
3241 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
3242 been taken; it's for internal bookkeeping in expand_goto_internal. */
3243 if (TREE_CODE (copy) == LABEL_DECL)
3245 TREE_ADDRESSABLE (copy) = 0;
3246 LABEL_DECL_UID (copy) = -1;
3249 return copy_decl_for_dup_finish (id, decl, copy);
3252 static tree
3253 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
3255 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
3256 return copy_decl_to_var (decl, id);
3257 else
3258 return copy_decl_no_change (decl, id);
3261 /* Return a copy of the function's argument tree. */
3262 static tree
3263 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id)
3265 tree *arg_copy, *parg;
3267 arg_copy = &orig_parm;
3268 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
3270 tree new = remap_decl (*parg, id);
3271 lang_hooks.dup_lang_specific_decl (new);
3272 TREE_CHAIN (new) = TREE_CHAIN (*parg);
3273 *parg = new;
3275 return orig_parm;
3278 /* Return a copy of the function's static chain. */
3279 static tree
3280 copy_static_chain (tree static_chain, copy_body_data * id)
3282 tree *chain_copy, *pvar;
3284 chain_copy = &static_chain;
3285 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
3287 tree new = remap_decl (*pvar, id);
3288 lang_hooks.dup_lang_specific_decl (new);
3289 TREE_CHAIN (new) = TREE_CHAIN (*pvar);
3290 *pvar = new;
3292 return static_chain;
3295 /* Return true if the function is allowed to be versioned.
3296 This is a guard for the versioning functionality. */
3297 bool
3298 tree_versionable_function_p (tree fndecl)
3300 if (fndecl == NULL_TREE)
3301 return false;
3302 /* ??? There are cases where a function is
3303 uninlinable but can be versioned. */
3304 if (!tree_inlinable_function_p (fndecl))
3305 return false;
3307 return true;
3310 /* Create a copy of a function's tree.
3311 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
3312 of the original function and the new copied function
3313 respectively. In case we want to replace a DECL
3314 tree with another tree while duplicating the function's
3315 body, TREE_MAP represents the mapping between these
3316 trees. If UPDATE_CLONES is set, the call_stmt fields
3317 of edges of clones of the function will be updated. */
3318 void
3319 tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
3320 bool update_clones)
3322 struct cgraph_node *old_version_node;
3323 struct cgraph_node *new_version_node;
3324 copy_body_data id;
3325 tree p;
3326 unsigned i;
3327 struct ipa_replace_map *replace_info;
3328 basic_block old_entry_block;
3329 tree t_step;
3330 tree old_current_function_decl = current_function_decl;
3332 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
3333 && TREE_CODE (new_decl) == FUNCTION_DECL);
3334 DECL_POSSIBLY_INLINED (old_decl) = 1;
3336 old_version_node = cgraph_node (old_decl);
3337 new_version_node = cgraph_node (new_decl);
3339 DECL_ARTIFICIAL (new_decl) = 1;
3340 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
3342 /* Prepare the data structures for the tree copy. */
3343 memset (&id, 0, sizeof (id));
3345 /* Generate a new name for the new version. */
3346 if (!update_clones)
3348 DECL_NAME (new_decl) = create_tmp_var_name (NULL);
3349 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
3350 SET_DECL_RTL (new_decl, NULL_RTX);
3351 id.statements_to_fold = pointer_set_create ();
3354 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
3355 id.src_fn = old_decl;
3356 id.dst_fn = new_decl;
3357 id.src_node = old_version_node;
3358 id.dst_node = new_version_node;
3359 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
3361 id.copy_decl = copy_decl_no_change;
3362 id.transform_call_graph_edges
3363 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
3364 id.transform_new_cfg = true;
3365 id.transform_return_to_modify = false;
3366 id.transform_lang_insert_block = false;
3368 current_function_decl = new_decl;
3369 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
3370 (DECL_STRUCT_FUNCTION (old_decl));
3371 initialize_cfun (new_decl, old_decl,
3372 old_entry_block->count,
3373 old_entry_block->frequency);
3374 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
3376 /* Copy the function's static chain. */
3377 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
3378 if (p)
3379 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
3380 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
3381 &id);
3382 /* Copy the function's arguments. */
3383 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
3384 DECL_ARGUMENTS (new_decl) =
3385 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id);
3387 /* If there's a tree_map, prepare for substitution. */
3388 if (tree_map)
3389 for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
3391 replace_info = VARRAY_GENERIC_PTR (tree_map, i);
3392 if (replace_info->replace_p)
3393 insert_decl_map (&id, replace_info->old_tree,
3394 replace_info->new_tree);
3397 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
3399 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3400 number_blocks (id.dst_fn);
3402 if (DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list != NULL_TREE)
3403 /* Add local vars. */
3404 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list;
3405 t_step; t_step = TREE_CHAIN (t_step))
3407 tree var = TREE_VALUE (t_step);
3408 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3409 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
3410 cfun->unexpanded_var_list);
3411 else
3412 cfun->unexpanded_var_list =
3413 tree_cons (NULL_TREE, remap_decl (var, &id),
3414 cfun->unexpanded_var_list);
3417 /* Copy the Function's body. */
3418 copy_body (&id, old_entry_block->count, old_entry_block->frequency, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
3420 if (DECL_RESULT (old_decl) != NULL_TREE)
3422 tree *res_decl = &DECL_RESULT (old_decl);
3423 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
3424 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
3427 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3428 number_blocks (new_decl);
3430 /* Clean up. */
3431 splay_tree_delete (id.decl_map);
3432 if (!update_clones)
3434 fold_marked_statements (0, id.statements_to_fold);
3435 pointer_set_destroy (id.statements_to_fold);
3436 fold_cond_expr_cond ();
3438 if (gimple_in_ssa_p (cfun))
3440 free_dominance_info (CDI_DOMINATORS);
3441 free_dominance_info (CDI_POST_DOMINATORS);
3442 if (!update_clones)
3443 delete_unreachable_blocks ();
3444 update_ssa (TODO_update_ssa);
3445 if (!update_clones)
3447 fold_cond_expr_cond ();
3448 if (need_ssa_update_p ())
3449 update_ssa (TODO_update_ssa);
3452 free_dominance_info (CDI_DOMINATORS);
3453 free_dominance_info (CDI_POST_DOMINATORS);
3454 pop_cfun ();
3455 current_function_decl = old_current_function_decl;
3456 gcc_assert (!current_function_decl
3457 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
3458 return;
3461 /* Duplicate a type, fields and all. */
3463 tree
3464 build_duplicate_type (tree type)
3466 struct copy_body_data id;
3468 memset (&id, 0, sizeof (id));
3469 id.src_fn = current_function_decl;
3470 id.dst_fn = current_function_decl;
3471 id.src_cfun = cfun;
3472 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
3474 type = remap_type_1 (type, &id);
3476 splay_tree_delete (id.decl_map);
3478 return type;