* jump.c: Remove prototypes for delete_computation and
[official-gcc.git] / gcc / tree-inline.c
blobd2d9487d367f6c9e31891f739a4ee174ec47ed64
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "varray.h"
36 #include "hashtab.h"
37 #include "langhooks.h"
38 #include "basic-block.h"
39 #include "tree-iterator.h"
40 #include "cgraph.h"
41 #include "intl.h"
42 #include "tree-mudflap.h"
43 #include "tree-flow.h"
44 #include "function.h"
45 #include "ggc.h"
46 #include "tree-flow.h"
47 #include "diagnostic.h"
48 #include "except.h"
49 #include "debug.h"
50 #include "pointer-set.h"
51 #include "ipa-prop.h"
52 #include "value-prof.h"
53 #include "tree-pass.h"
55 /* I'm not real happy about this, but we need to handle gimple and
56 non-gimple trees. */
57 #include "tree-gimple.h"
59 /* Inlining, Cloning, Versioning, Parallelization
61 Inlining: a function body is duplicated, but the PARM_DECLs are
62 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
63 GIMPLE_MODIFY_STMTs that store to a dedicated returned-value variable.
64 The duplicated eh_region info of the copy will later be appended
65 to the info for the caller; the eh_region info in copied throwing
66 statements and RESX_EXPRs is adjusted accordingly.
68 Cloning: (only in C++) We have one body for a con/de/structor, and
69 multiple function decls, each with a unique parameter list.
70 Duplicate the body, using the given splay tree; some parameters
71 will become constants (like 0 or 1).
73 Versioning: a function body is duplicated and the result is a new
74 function rather than into blocks of an existing function as with
75 inlining. Some parameters will become constants.
77 Parallelization: a region of a function is duplicated resulting in
78 a new function. Variables may be replaced with complex expressions
79 to enable shared variable semantics.
81 All of these will simultaneously lookup any callgraph edges. If
82 we're going to inline the duplicated function body, and the given
83 function has some cloned callgraph nodes (one for each place this
84 function will be inlined) those callgraph edges will be duplicated.
85 If we're cloning the body, those callgraph edges will be
86 updated to point into the new body. (Note that the original
87 callgraph node and edge list will not be altered.)
89 See the CALL_EXPR handling case in copy_body_r (). */
91 /* 0 if we should not perform inlining.
92 1 if we should expand functions calls inline at the tree level.
93 2 if we should consider *all* functions to be inline
94 candidates. */
96 int flag_inline_trees = 0;
98 /* To Do:
100 o In order to make inlining-on-trees work, we pessimized
101 function-local static constants. In particular, they are now
102 always output, even when not addressed. Fix this by treating
103 function-local static constants just like global static
104 constants; the back-end already knows not to output them if they
105 are not needed.
107 o Provide heuristics to clamp inlining of recursive template
108 calls? */
111 /* Weights that estimate_num_insns uses for heuristics in inlining. */
113 eni_weights eni_inlining_weights;
115 /* Weights that estimate_num_insns uses to estimate the size of the
116 produced code. */
118 eni_weights eni_size_weights;
120 /* Weights that estimate_num_insns uses to estimate the time necessary
121 to execute the produced code. */
123 eni_weights eni_time_weights;
125 /* Prototypes. */
127 static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
128 static tree copy_generic_body (copy_body_data *);
129 static bool inlinable_function_p (tree);
130 static void remap_block (tree *, copy_body_data *);
131 static tree remap_decls (tree, copy_body_data *);
132 static void copy_bind_expr (tree *, int *, copy_body_data *);
133 static tree mark_local_for_remap_r (tree *, int *, void *);
134 static void unsave_expr_1 (tree);
135 static tree unsave_r (tree *, int *, void *);
136 static void declare_inline_vars (tree, tree);
137 static void remap_save_expr (tree *, void *, int *);
138 static void add_lexical_block (tree current_block, tree new_block);
139 static tree copy_decl_to_var (tree, copy_body_data *);
140 static tree copy_result_decl_to_var (tree, copy_body_data *);
141 static tree copy_decl_no_change (tree, copy_body_data *);
142 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
144 /* Insert a tree->tree mapping for ID. Despite the name suggests
145 that the trees should be variables, it is used for more than that. */
147 void
148 insert_decl_map (copy_body_data *id, tree key, tree value)
150 splay_tree_insert (id->decl_map, (splay_tree_key) key,
151 (splay_tree_value) value);
153 /* Always insert an identity map as well. If we see this same new
154 node again, we won't want to duplicate it a second time. */
155 if (key != value)
156 splay_tree_insert (id->decl_map, (splay_tree_key) value,
157 (splay_tree_value) value);
160 /* Construct new SSA name for old NAME. ID is the inline context. */
162 static tree
163 remap_ssa_name (tree name, copy_body_data *id)
165 tree new;
166 splay_tree_node n;
168 gcc_assert (TREE_CODE (name) == SSA_NAME);
170 n = splay_tree_lookup (id->decl_map, (splay_tree_key) name);
171 if (n)
172 return (tree) n->value;
174 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
175 in copy_bb. */
176 new = remap_decl (SSA_NAME_VAR (name), id);
177 /* We might've substituted constant or another SSA_NAME for
178 the variable.
180 Replace the SSA name representing RESULT_DECL by variable during
181 inlining: this saves us from need to introduce PHI node in a case
182 return value is just partly initialized. */
183 if ((TREE_CODE (new) == VAR_DECL || TREE_CODE (new) == PARM_DECL)
184 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
185 || !id->transform_return_to_modify))
187 new = make_ssa_name (new, NULL);
188 insert_decl_map (id, name, new);
189 if (IS_EMPTY_STMT (SSA_NAME_DEF_STMT (name)))
191 SSA_NAME_DEF_STMT (new) = build_empty_stmt ();
192 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name)) == name)
193 set_default_def (SSA_NAME_VAR (new), new);
195 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new)
196 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
197 TREE_TYPE (new) = TREE_TYPE (SSA_NAME_VAR (new));
199 else
200 insert_decl_map (id, name, new);
201 return new;
204 /* Remap DECL during the copying of the BLOCK tree for the function. */
206 tree
207 remap_decl (tree decl, copy_body_data *id)
209 splay_tree_node n;
210 tree fn;
212 /* We only remap local variables in the current function. */
213 fn = id->src_fn;
215 /* See if we have remapped this declaration. */
217 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
219 /* If we didn't already have an equivalent for this declaration,
220 create one now. */
221 if (!n)
223 /* Make a copy of the variable or label. */
224 tree t = id->copy_decl (decl, id);
226 /* Remember it, so that if we encounter this local entity again
227 we can reuse this copy. Do this early because remap_type may
228 need this decl for TYPE_STUB_DECL. */
229 insert_decl_map (id, decl, t);
231 if (!DECL_P (t))
232 return t;
234 /* Remap types, if necessary. */
235 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
236 if (TREE_CODE (t) == TYPE_DECL)
237 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
239 /* Remap sizes as necessary. */
240 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
241 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
243 /* If fields, do likewise for offset and qualifier. */
244 if (TREE_CODE (t) == FIELD_DECL)
246 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
247 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
248 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
251 if (cfun && gimple_in_ssa_p (cfun)
252 && (TREE_CODE (t) == VAR_DECL
253 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
255 tree def = gimple_default_def (id->src_cfun, decl);
256 get_var_ann (t);
257 if (TREE_CODE (decl) != PARM_DECL && def)
259 tree map = remap_ssa_name (def, id);
260 /* Watch out RESULT_DECLs whose SSA names map directly
261 to them. */
262 if (TREE_CODE (map) == SSA_NAME)
263 set_default_def (t, map);
265 add_referenced_var (t);
267 return t;
270 return unshare_expr ((tree) n->value);
273 static tree
274 remap_type_1 (tree type, copy_body_data *id)
276 splay_tree_node node;
277 tree new, t;
279 if (type == NULL)
280 return type;
282 /* See if we have remapped this type. */
283 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
284 if (node)
285 return (tree) node->value;
287 /* The type only needs remapping if it's variably modified. */
288 if (! variably_modified_type_p (type, id->src_fn))
290 insert_decl_map (id, type, type);
291 return type;
294 /* We do need a copy. build and register it now. If this is a pointer or
295 reference type, remap the designated type and make a new pointer or
296 reference type. */
297 if (TREE_CODE (type) == POINTER_TYPE)
299 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
300 TYPE_MODE (type),
301 TYPE_REF_CAN_ALIAS_ALL (type));
302 insert_decl_map (id, type, new);
303 return new;
305 else if (TREE_CODE (type) == REFERENCE_TYPE)
307 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
308 TYPE_MODE (type),
309 TYPE_REF_CAN_ALIAS_ALL (type));
310 insert_decl_map (id, type, new);
311 return new;
313 else
314 new = copy_node (type);
316 insert_decl_map (id, type, new);
318 /* This is a new type, not a copy of an old type. Need to reassociate
319 variants. We can handle everything except the main variant lazily. */
320 t = TYPE_MAIN_VARIANT (type);
321 if (type != t)
323 t = remap_type (t, id);
324 TYPE_MAIN_VARIANT (new) = t;
325 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
326 TYPE_NEXT_VARIANT (t) = new;
328 else
330 TYPE_MAIN_VARIANT (new) = new;
331 TYPE_NEXT_VARIANT (new) = NULL;
334 if (TYPE_STUB_DECL (type))
335 TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
337 /* Lazily create pointer and reference types. */
338 TYPE_POINTER_TO (new) = NULL;
339 TYPE_REFERENCE_TO (new) = NULL;
341 switch (TREE_CODE (new))
343 case INTEGER_TYPE:
344 case REAL_TYPE:
345 case ENUMERAL_TYPE:
346 case BOOLEAN_TYPE:
347 t = TYPE_MIN_VALUE (new);
348 if (t && TREE_CODE (t) != INTEGER_CST)
349 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
351 t = TYPE_MAX_VALUE (new);
352 if (t && TREE_CODE (t) != INTEGER_CST)
353 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
354 return new;
356 case FUNCTION_TYPE:
357 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
358 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
359 return new;
361 case ARRAY_TYPE:
362 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
363 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
364 break;
366 case RECORD_TYPE:
367 case UNION_TYPE:
368 case QUAL_UNION_TYPE:
370 tree f, nf = NULL;
372 for (f = TYPE_FIELDS (new); f ; f = TREE_CHAIN (f))
374 t = remap_decl (f, id);
375 DECL_CONTEXT (t) = new;
376 TREE_CHAIN (t) = nf;
377 nf = t;
379 TYPE_FIELDS (new) = nreverse (nf);
381 break;
383 case OFFSET_TYPE:
384 default:
385 /* Shouldn't have been thought variable sized. */
386 gcc_unreachable ();
389 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
390 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
392 return new;
395 tree
396 remap_type (tree type, copy_body_data *id)
398 splay_tree_node node;
400 if (type == NULL)
401 return type;
403 /* See if we have remapped this type. */
404 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
405 if (node)
406 return (tree) node->value;
408 /* The type only needs remapping if it's variably modified. */
409 if (! variably_modified_type_p (type, id->src_fn))
411 insert_decl_map (id, type, type);
412 return type;
415 return remap_type_1 (type, id);
418 static tree
419 remap_decls (tree decls, copy_body_data *id)
421 tree old_var;
422 tree new_decls = NULL_TREE;
424 /* Remap its variables. */
425 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
427 tree new_var;
429 /* We can not chain the local static declarations into the unexpanded_var_list
430 as we can't duplicate them or break one decl rule. Go ahead and link
431 them into unexpanded_var_list. */
432 if (!lang_hooks.tree_inlining.auto_var_in_fn_p (old_var, id->src_fn)
433 && !DECL_EXTERNAL (old_var))
435 cfun->unexpanded_var_list = tree_cons (NULL_TREE, old_var,
436 cfun->unexpanded_var_list);
437 continue;
440 /* Remap the variable. */
441 new_var = remap_decl (old_var, id);
443 /* If we didn't remap this variable, so we can't mess with its
444 TREE_CHAIN. If we remapped this variable to the return slot, it's
445 already declared somewhere else, so don't declare it here. */
446 if (!new_var || new_var == id->retvar)
448 else
450 gcc_assert (DECL_P (new_var));
451 TREE_CHAIN (new_var) = new_decls;
452 new_decls = new_var;
456 return nreverse (new_decls);
459 /* Copy the BLOCK to contain remapped versions of the variables
460 therein. And hook the new block into the block-tree. */
462 static void
463 remap_block (tree *block, copy_body_data *id)
465 tree old_block;
466 tree new_block;
467 tree fn;
469 /* Make the new block. */
470 old_block = *block;
471 new_block = make_node (BLOCK);
472 TREE_USED (new_block) = TREE_USED (old_block);
473 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
474 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
475 *block = new_block;
477 /* Remap its variables. */
478 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
480 fn = id->dst_fn;
482 if (id->transform_lang_insert_block)
483 lang_hooks.decls.insert_block (new_block);
485 /* Remember the remapped block. */
486 insert_decl_map (id, old_block, new_block);
489 /* Copy the whole block tree and root it in id->block. */
490 static tree
491 remap_blocks (tree block, copy_body_data *id)
493 tree t;
494 tree new = block;
496 if (!block)
497 return NULL;
499 remap_block (&new, id);
500 gcc_assert (new != block);
501 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
502 add_lexical_block (new, remap_blocks (t, id));
503 return new;
506 static void
507 copy_statement_list (tree *tp)
509 tree_stmt_iterator oi, ni;
510 tree new;
512 new = alloc_stmt_list ();
513 ni = tsi_start (new);
514 oi = tsi_start (*tp);
515 *tp = new;
517 for (; !tsi_end_p (oi); tsi_next (&oi))
518 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
521 static void
522 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
524 tree block = BIND_EXPR_BLOCK (*tp);
525 /* Copy (and replace) the statement. */
526 copy_tree_r (tp, walk_subtrees, NULL);
527 if (block)
529 remap_block (&block, id);
530 BIND_EXPR_BLOCK (*tp) = block;
533 if (BIND_EXPR_VARS (*tp))
534 /* This will remap a lot of the same decls again, but this should be
535 harmless. */
536 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
539 /* Called from copy_body_id via walk_tree. DATA is really an
540 `copy_body_data *'. */
542 tree
543 copy_body_r (tree *tp, int *walk_subtrees, void *data)
545 copy_body_data *id = (copy_body_data *) data;
546 tree fn = id->src_fn;
547 tree new_block;
549 /* Begin by recognizing trees that we'll completely rewrite for the
550 inlining context. Our output for these trees is completely
551 different from out input (e.g. RETURN_EXPR is deleted, and morphs
552 into an edge). Further down, we'll handle trees that get
553 duplicated and/or tweaked. */
555 /* When requested, RETURN_EXPRs should be transformed to just the
556 contained GIMPLE_MODIFY_STMT. The branch semantics of the return will
557 be handled elsewhere by manipulating the CFG rather than a statement. */
558 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
560 tree assignment = TREE_OPERAND (*tp, 0);
562 /* If we're returning something, just turn that into an
563 assignment into the equivalent of the original RESULT_DECL.
564 If the "assignment" is just the result decl, the result
565 decl has already been set (e.g. a recent "foo (&result_decl,
566 ...)"); just toss the entire RETURN_EXPR. */
567 if (assignment && TREE_CODE (assignment) == GIMPLE_MODIFY_STMT)
569 /* Replace the RETURN_EXPR with (a copy of) the
570 GIMPLE_MODIFY_STMT hanging underneath. */
571 *tp = copy_node (assignment);
573 else /* Else the RETURN_EXPR returns no value. */
575 *tp = NULL;
576 return (tree) (void *)1;
579 else if (TREE_CODE (*tp) == SSA_NAME)
581 *tp = remap_ssa_name (*tp, id);
582 *walk_subtrees = 0;
583 return NULL;
586 /* Local variables and labels need to be replaced by equivalent
587 variables. We don't want to copy static variables; there's only
588 one of those, no matter how many times we inline the containing
589 function. Similarly for globals from an outer function. */
590 else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
592 tree new_decl;
594 /* Remap the declaration. */
595 new_decl = remap_decl (*tp, id);
596 gcc_assert (new_decl);
597 /* Replace this variable with the copy. */
598 STRIP_TYPE_NOPS (new_decl);
599 *tp = new_decl;
600 *walk_subtrees = 0;
602 else if (TREE_CODE (*tp) == STATEMENT_LIST)
603 copy_statement_list (tp);
604 else if (TREE_CODE (*tp) == SAVE_EXPR)
605 remap_save_expr (tp, id->decl_map, walk_subtrees);
606 else if (TREE_CODE (*tp) == LABEL_DECL
607 && (! DECL_CONTEXT (*tp)
608 || decl_function_context (*tp) == id->src_fn))
609 /* These may need to be remapped for EH handling. */
610 *tp = remap_decl (*tp, id);
611 else if (TREE_CODE (*tp) == BIND_EXPR)
612 copy_bind_expr (tp, walk_subtrees, id);
613 /* Types may need remapping as well. */
614 else if (TYPE_P (*tp))
615 *tp = remap_type (*tp, id);
617 /* If this is a constant, we have to copy the node iff the type will be
618 remapped. copy_tree_r will not copy a constant. */
619 else if (CONSTANT_CLASS_P (*tp))
621 tree new_type = remap_type (TREE_TYPE (*tp), id);
623 if (new_type == TREE_TYPE (*tp))
624 *walk_subtrees = 0;
626 else if (TREE_CODE (*tp) == INTEGER_CST)
627 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
628 TREE_INT_CST_HIGH (*tp));
629 else
631 *tp = copy_node (*tp);
632 TREE_TYPE (*tp) = new_type;
636 /* Otherwise, just copy the node. Note that copy_tree_r already
637 knows not to copy VAR_DECLs, etc., so this is safe. */
638 else
640 /* Here we handle trees that are not completely rewritten.
641 First we detect some inlining-induced bogosities for
642 discarding. */
643 if (TREE_CODE (*tp) == GIMPLE_MODIFY_STMT
644 && GIMPLE_STMT_OPERAND (*tp, 0) == GIMPLE_STMT_OPERAND (*tp, 1)
645 && (lang_hooks.tree_inlining.auto_var_in_fn_p
646 (GIMPLE_STMT_OPERAND (*tp, 0), fn)))
648 /* Some assignments VAR = VAR; don't generate any rtl code
649 and thus don't count as variable modification. Avoid
650 keeping bogosities like 0 = 0. */
651 tree decl = GIMPLE_STMT_OPERAND (*tp, 0), value;
652 splay_tree_node n;
654 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
655 if (n)
657 value = (tree) n->value;
658 STRIP_TYPE_NOPS (value);
659 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
661 *tp = build_empty_stmt ();
662 return copy_body_r (tp, walk_subtrees, data);
666 else if (TREE_CODE (*tp) == INDIRECT_REF)
668 /* Get rid of *& from inline substitutions that can happen when a
669 pointer argument is an ADDR_EXPR. */
670 tree decl = TREE_OPERAND (*tp, 0);
671 splay_tree_node n;
673 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
674 if (n)
676 tree new;
677 tree old;
678 /* If we happen to get an ADDR_EXPR in n->value, strip
679 it manually here as we'll eventually get ADDR_EXPRs
680 which lie about their types pointed to. In this case
681 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
682 but we absolutely rely on that. As fold_indirect_ref
683 does other useful transformations, try that first, though. */
684 tree type = TREE_TYPE (TREE_TYPE ((tree)n->value));
685 new = unshare_expr ((tree)n->value);
686 old = *tp;
687 *tp = fold_indirect_ref_1 (type, new);
688 if (! *tp)
690 if (TREE_CODE (new) == ADDR_EXPR)
691 *tp = TREE_OPERAND (new, 0);
692 else
694 *tp = build1 (INDIRECT_REF, type, new);
695 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
698 *walk_subtrees = 0;
699 return NULL;
703 /* Here is the "usual case". Copy this tree node, and then
704 tweak some special cases. */
705 copy_tree_r (tp, walk_subtrees, NULL);
707 /* Global variables we didn't seen yet needs to go into referenced
708 vars. */
709 if (gimple_in_ssa_p (cfun) && TREE_CODE (*tp) == VAR_DECL)
710 add_referenced_var (*tp);
712 /* If EXPR has block defined, map it to newly constructed block.
713 When inlining we want EXPRs without block appear in the block
714 of function call. */
715 if (EXPR_P (*tp) || GIMPLE_STMT_P (*tp))
717 new_block = id->block;
718 if (TREE_BLOCK (*tp))
720 splay_tree_node n;
721 n = splay_tree_lookup (id->decl_map,
722 (splay_tree_key) TREE_BLOCK (*tp));
723 gcc_assert (n);
724 new_block = (tree) n->value;
726 TREE_BLOCK (*tp) = new_block;
729 if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
730 TREE_OPERAND (*tp, 0) =
731 build_int_cst
732 (NULL_TREE,
733 id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
735 if (!GIMPLE_TUPLE_P (*tp))
736 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
738 /* The copied TARGET_EXPR has never been expanded, even if the
739 original node was expanded already. */
740 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
742 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
743 TREE_OPERAND (*tp, 3) = NULL_TREE;
746 /* Variable substitution need not be simple. In particular, the
747 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
748 and friends are up-to-date. */
749 else if (TREE_CODE (*tp) == ADDR_EXPR)
751 walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
752 /* Handle the case where we substituted an INDIRECT_REF
753 into the operand of the ADDR_EXPR. */
754 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
755 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
756 else
757 recompute_tree_invariant_for_addr_expr (*tp);
758 *walk_subtrees = 0;
762 /* Keep iterating. */
763 return NULL_TREE;
766 /* Copy basic block, scale profile accordingly. Edges will be taken care of
767 later */
769 static basic_block
770 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, int count_scale)
772 block_stmt_iterator bsi, copy_bsi;
773 basic_block copy_basic_block;
775 /* create_basic_block() will append every new block to
776 basic_block_info automatically. */
777 copy_basic_block = create_basic_block (NULL, (void *) 0,
778 (basic_block) bb->prev_bb->aux);
779 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
781 /* We are going to rebuild frequencies from scratch. These values have just
782 small importance to drive canonicalize_loop_headers. */
783 copy_basic_block->frequency = ((gcov_type)bb->frequency
784 * frequency_scale / REG_BR_PROB_BASE);
785 if (copy_basic_block->frequency > BB_FREQ_MAX)
786 copy_basic_block->frequency = BB_FREQ_MAX;
787 copy_bsi = bsi_start (copy_basic_block);
789 for (bsi = bsi_start (bb);
790 !bsi_end_p (bsi); bsi_next (&bsi))
792 tree stmt = bsi_stmt (bsi);
793 tree orig_stmt = stmt;
795 walk_tree (&stmt, copy_body_r, id, NULL);
797 /* RETURN_EXPR might be removed,
798 this is signalled by making stmt pointer NULL. */
799 if (stmt)
801 tree call, decl;
803 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
805 /* With return slot optimization we can end up with
806 non-gimple (foo *)&this->m, fix that here. */
807 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
808 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == NOP_EXPR
809 && !is_gimple_val (TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0)))
810 gimplify_stmt (&stmt);
812 bsi_insert_after (&copy_bsi, stmt, BSI_NEW_STMT);
814 /* Process new statement. gimplify_stmt possibly turned statement
815 into multiple statements, we need to process all of them. */
816 while (!bsi_end_p (copy_bsi))
818 stmt = bsi_stmt (copy_bsi);
819 call = get_call_expr_in (stmt);
821 /* Statements produced by inlining can be unfolded, especially
822 when we constant propagated some operands. We can't fold
823 them right now for two reasons:
824 1) folding require SSA_NAME_DEF_STMTs to be correct
825 2) we can't change function calls to builtins.
826 So we just mark statement for later folding. We mark
827 all new statements, instead just statements that has changed
828 by some nontrivial substitution so even statements made
829 foldable indirectly are updated. If this turns out to be
830 expensive, copy_body can be told to watch for nontrivial
831 changes. */
832 if (id->statements_to_fold)
833 pointer_set_insert (id->statements_to_fold, stmt);
834 /* We're duplicating a CALL_EXPR. Find any corresponding
835 callgraph edges and update or duplicate them. */
836 if (call && (decl = get_callee_fndecl (call)))
838 struct cgraph_node *node;
839 struct cgraph_edge *edge;
841 switch (id->transform_call_graph_edges)
843 case CB_CGE_DUPLICATE:
844 edge = cgraph_edge (id->src_node, orig_stmt);
845 if (edge)
846 cgraph_clone_edge (edge, id->dst_node, stmt,
847 REG_BR_PROB_BASE, 1, edge->frequency, true);
848 break;
850 case CB_CGE_MOVE_CLONES:
851 for (node = id->dst_node->next_clone;
852 node;
853 node = node->next_clone)
855 edge = cgraph_edge (node, orig_stmt);
856 gcc_assert (edge);
857 cgraph_set_call_stmt (edge, stmt);
859 /* FALLTHRU */
861 case CB_CGE_MOVE:
862 edge = cgraph_edge (id->dst_node, orig_stmt);
863 if (edge)
864 cgraph_set_call_stmt (edge, stmt);
865 break;
867 default:
868 gcc_unreachable ();
871 /* If you think we can abort here, you are wrong.
872 There is no region 0 in tree land. */
873 gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt)
874 != 0);
876 if (tree_could_throw_p (stmt))
878 int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
879 /* Add an entry for the copied tree in the EH hashtable.
880 When cloning or versioning, use the hashtable in
881 cfun, and just copy the EH number. When inlining, use the
882 hashtable in the caller, and adjust the region number. */
883 if (region > 0)
884 add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
886 /* If this tree doesn't have a region associated with it,
887 and there is a "current region,"
888 then associate this tree with the current region
889 and add edges associated with this region. */
890 if ((lookup_stmt_eh_region_fn (id->src_cfun,
891 orig_stmt) <= 0
892 && id->eh_region > 0)
893 && tree_could_throw_p (stmt))
894 add_stmt_to_eh_region (stmt, id->eh_region);
896 if (gimple_in_ssa_p (cfun))
898 ssa_op_iter i;
899 tree def;
901 find_new_referenced_vars (bsi_stmt_ptr (copy_bsi));
902 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
903 if (TREE_CODE (def) == SSA_NAME)
904 SSA_NAME_DEF_STMT (def) = stmt;
906 bsi_next (&copy_bsi);
908 copy_bsi = bsi_last (copy_basic_block);
911 return copy_basic_block;
914 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
915 form is quite easy, since dominator relationship for old basic blocks does
916 not change.
918 There is however exception where inlining might change dominator relation
919 across EH edges from basic block within inlined functions destinating
920 to landing pads in function we inline into.
922 The function mark PHI_RESULT of such PHI nodes for renaming; it is
923 safe the EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI
924 must be set. This means, that there will be no overlapping live ranges
925 for the underlying symbol.
927 This might change in future if we allow redirecting of EH edges and
928 we might want to change way build CFG pre-inlining to include
929 all the possible edges then. */
930 static void
931 update_ssa_across_eh_edges (basic_block bb)
933 edge e;
934 edge_iterator ei;
936 FOR_EACH_EDGE (e, ei, bb->succs)
937 if (!e->dest->aux
938 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
940 tree phi;
942 gcc_assert (e->flags & EDGE_EH);
943 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
945 gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
946 (PHI_RESULT (phi)));
947 mark_sym_for_renaming
948 (SSA_NAME_VAR (PHI_RESULT (phi)));
953 /* Copy edges from BB into its copy constructed earlier, scale profile
954 accordingly. Edges will be taken care of later. Assume aux
955 pointers to point to the copies of each BB. */
956 static void
957 copy_edges_for_bb (basic_block bb, int count_scale)
959 basic_block new_bb = (basic_block) bb->aux;
960 edge_iterator ei;
961 edge old_edge;
962 block_stmt_iterator bsi;
963 int flags;
965 /* Use the indices from the original blocks to create edges for the
966 new ones. */
967 FOR_EACH_EDGE (old_edge, ei, bb->succs)
968 if (!(old_edge->flags & EDGE_EH))
970 edge new;
972 flags = old_edge->flags;
974 /* Return edges do get a FALLTHRU flag when the get inlined. */
975 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
976 && old_edge->dest->aux != EXIT_BLOCK_PTR)
977 flags |= EDGE_FALLTHRU;
978 new = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
979 new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
980 new->probability = old_edge->probability;
983 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
984 return;
986 for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
988 tree copy_stmt;
990 copy_stmt = bsi_stmt (bsi);
991 update_stmt (copy_stmt);
992 if (gimple_in_ssa_p (cfun))
993 mark_symbols_for_renaming (copy_stmt);
994 /* Do this before the possible split_block. */
995 bsi_next (&bsi);
997 /* If this tree could throw an exception, there are two
998 cases where we need to add abnormal edge(s): the
999 tree wasn't in a region and there is a "current
1000 region" in the caller; or the original tree had
1001 EH edges. In both cases split the block after the tree,
1002 and add abnormal edge(s) as needed; we need both
1003 those from the callee and the caller.
1004 We check whether the copy can throw, because the const
1005 propagation can change an INDIRECT_REF which throws
1006 into a COMPONENT_REF which doesn't. If the copy
1007 can throw, the original could also throw. */
1009 if (tree_can_throw_internal (copy_stmt))
1011 if (!bsi_end_p (bsi))
1012 /* Note that bb's predecessor edges aren't necessarily
1013 right at this point; split_block doesn't care. */
1015 edge e = split_block (new_bb, copy_stmt);
1017 new_bb = e->dest;
1018 new_bb->aux = e->src->aux;
1019 bsi = bsi_start (new_bb);
1022 make_eh_edges (copy_stmt);
1024 if (gimple_in_ssa_p (cfun))
1025 update_ssa_across_eh_edges (bb_for_stmt (copy_stmt));
1030 /* Copy the PHIs. All blocks and edges are copied, some blocks
1031 was possibly split and new outgoing EH edges inserted.
1032 BB points to the block of original function and AUX pointers links
1033 the original and newly copied blocks. */
1035 static void
1036 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1038 basic_block new_bb = bb->aux;
1039 edge_iterator ei;
1040 tree phi;
1042 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1044 tree res = PHI_RESULT (phi);
1045 tree new_res = res;
1046 tree new_phi;
1047 edge new_edge;
1049 if (is_gimple_reg (res))
1051 walk_tree (&new_res, copy_body_r, id, NULL);
1052 SSA_NAME_DEF_STMT (new_res)
1053 = new_phi = create_phi_node (new_res, new_bb);
1054 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1056 edge old_edge = find_edge (new_edge->src->aux, bb);
1057 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1058 tree new_arg = arg;
1060 walk_tree (&new_arg, copy_body_r, id, NULL);
1061 gcc_assert (new_arg);
1062 add_phi_arg (new_phi, new_arg, new_edge);
1068 /* Wrapper for remap_decl so it can be used as a callback. */
1069 static tree
1070 remap_decl_1 (tree decl, void *data)
1072 return remap_decl (decl, (copy_body_data *) data);
1075 /* Build struct function and associated datastructures for the new clone
1076 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1078 static void
1079 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
1080 int frequency)
1082 struct function *new_cfun
1083 = (struct function *) ggc_alloc_cleared (sizeof (struct function));
1084 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1085 int count_scale, frequency_scale;
1087 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1088 count_scale = (REG_BR_PROB_BASE * count
1089 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1090 else
1091 count_scale = 1;
1093 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1094 frequency_scale = (REG_BR_PROB_BASE * frequency
1096 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1097 else
1098 frequency_scale = count_scale;
1100 /* Register specific tree functions. */
1101 tree_register_cfg_hooks ();
1102 *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
1103 new_cfun->funcdef_no = get_next_funcdef_no ();
1104 VALUE_HISTOGRAMS (new_cfun) = NULL;
1105 new_cfun->unexpanded_var_list = NULL;
1106 new_cfun->cfg = NULL;
1107 new_cfun->decl = new_fndecl /*= copy_node (callee_fndecl)*/;
1108 new_cfun->ib_boundaries_block = NULL;
1109 DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
1110 push_cfun (new_cfun);
1111 init_empty_tree_cfg ();
1113 ENTRY_BLOCK_PTR->count =
1114 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1115 REG_BR_PROB_BASE);
1116 ENTRY_BLOCK_PTR->frequency =
1117 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1118 frequency_scale / REG_BR_PROB_BASE);
1119 EXIT_BLOCK_PTR->count =
1120 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1121 REG_BR_PROB_BASE);
1122 EXIT_BLOCK_PTR->frequency =
1123 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1124 frequency_scale / REG_BR_PROB_BASE);
1125 if (src_cfun->eh)
1126 init_eh_for_function ();
1128 if (src_cfun->gimple_df)
1130 init_tree_ssa ();
1131 cfun->gimple_df->in_ssa_p = true;
1132 init_ssa_operands ();
1134 pop_cfun ();
1137 /* Make a copy of the body of FN so that it can be inserted inline in
1138 another function. Walks FN via CFG, returns new fndecl. */
1140 static tree
1141 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
1142 basic_block entry_block_map, basic_block exit_block_map)
1144 tree callee_fndecl = id->src_fn;
1145 /* Original cfun for the callee, doesn't change. */
1146 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1147 struct function *cfun_to_copy;
1148 basic_block bb;
1149 tree new_fndecl = NULL;
1150 int count_scale, frequency_scale;
1151 int last;
1153 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1154 count_scale = (REG_BR_PROB_BASE * count
1155 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1156 else
1157 count_scale = 1;
1159 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1160 frequency_scale = (REG_BR_PROB_BASE * frequency
1162 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1163 else
1164 frequency_scale = count_scale;
1166 /* Register specific tree functions. */
1167 tree_register_cfg_hooks ();
1169 /* Must have a CFG here at this point. */
1170 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
1171 (DECL_STRUCT_FUNCTION (callee_fndecl)));
1173 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1176 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
1177 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
1178 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1179 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1181 /* Duplicate any exception-handling regions. */
1182 if (cfun->eh)
1184 id->eh_region_offset
1185 = duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
1186 0, id->eh_region);
1188 /* Use aux pointers to map the original blocks to copy. */
1189 FOR_EACH_BB_FN (bb, cfun_to_copy)
1191 basic_block new = copy_bb (id, bb, frequency_scale, count_scale);
1192 bb->aux = new;
1193 new->aux = bb;
1196 last = n_basic_blocks;
1197 /* Now that we've duplicated the blocks, duplicate their edges. */
1198 FOR_ALL_BB_FN (bb, cfun_to_copy)
1199 copy_edges_for_bb (bb, count_scale);
1200 if (gimple_in_ssa_p (cfun))
1201 FOR_ALL_BB_FN (bb, cfun_to_copy)
1202 copy_phis_for_bb (bb, id);
1203 FOR_ALL_BB_FN (bb, cfun_to_copy)
1205 ((basic_block)bb->aux)->aux = NULL;
1206 bb->aux = NULL;
1208 /* Zero out AUX fields of newly created block during EH edge
1209 insertion. */
1210 for (; last < n_basic_blocks; last++)
1211 BASIC_BLOCK (last)->aux = NULL;
1212 entry_block_map->aux = NULL;
1213 exit_block_map->aux = NULL;
1215 return new_fndecl;
1218 /* Make a copy of the body of FN so that it can be inserted inline in
1219 another function. */
1221 static tree
1222 copy_generic_body (copy_body_data *id)
1224 tree body;
1225 tree fndecl = id->src_fn;
1227 body = DECL_SAVED_TREE (fndecl);
1228 walk_tree (&body, copy_body_r, id, NULL);
1230 return body;
1233 static tree
1234 copy_body (copy_body_data *id, gcov_type count, int frequency,
1235 basic_block entry_block_map, basic_block exit_block_map)
1237 tree fndecl = id->src_fn;
1238 tree body;
1240 /* If this body has a CFG, walk CFG and copy. */
1241 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
1242 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
1244 return body;
1247 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
1248 defined in function FN, or of a data member thereof. */
1250 static bool
1251 self_inlining_addr_expr (tree value, tree fn)
1253 tree var;
1255 if (TREE_CODE (value) != ADDR_EXPR)
1256 return false;
1258 var = get_base_address (TREE_OPERAND (value, 0));
1260 return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
1263 static void
1264 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
1265 basic_block bb, tree *vars)
1267 tree init_stmt;
1268 tree var;
1269 tree var_sub;
1270 tree rhs = value ? fold_convert (TREE_TYPE (p), value) : NULL;
1271 tree def = (gimple_in_ssa_p (cfun)
1272 ? gimple_default_def (id->src_cfun, p) : NULL);
1274 /* If the parameter is never assigned to, has no SSA_NAMEs created,
1275 we may not need to create a new variable here at all. Instead, we may
1276 be able to just use the argument value. */
1277 if (TREE_READONLY (p)
1278 && !TREE_ADDRESSABLE (p)
1279 && value && !TREE_SIDE_EFFECTS (value)
1280 && !def)
1282 /* We may produce non-gimple trees by adding NOPs or introduce
1283 invalid sharing when operand is not really constant.
1284 It is not big deal to prohibit constant propagation here as
1285 we will constant propagate in DOM1 pass anyway. */
1286 if (is_gimple_min_invariant (value)
1287 && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
1288 /* We have to be very careful about ADDR_EXPR. Make sure
1289 the base variable isn't a local variable of the inlined
1290 function, e.g., when doing recursive inlining, direct or
1291 mutually-recursive or whatever, which is why we don't
1292 just test whether fn == current_function_decl. */
1293 && ! self_inlining_addr_expr (value, fn))
1295 insert_decl_map (id, p, value);
1296 return;
1300 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
1301 here since the type of this decl must be visible to the calling
1302 function. */
1303 var = copy_decl_to_var (p, id);
1304 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
1306 get_var_ann (var);
1307 add_referenced_var (var);
1310 /* See if the frontend wants to pass this by invisible reference. If
1311 so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
1312 replace uses of the PARM_DECL with dereferences. */
1313 if (TREE_TYPE (var) != TREE_TYPE (p)
1314 && POINTER_TYPE_P (TREE_TYPE (var))
1315 && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
1317 insert_decl_map (id, var, var);
1318 var_sub = build_fold_indirect_ref (var);
1320 else
1321 var_sub = var;
1323 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
1324 that way, when the PARM_DECL is encountered, it will be
1325 automatically replaced by the VAR_DECL. */
1326 insert_decl_map (id, p, var_sub);
1328 /* Declare this new variable. */
1329 TREE_CHAIN (var) = *vars;
1330 *vars = var;
1332 /* Make gimplifier happy about this variable. */
1333 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1335 /* Even if P was TREE_READONLY, the new VAR should not be.
1336 In the original code, we would have constructed a
1337 temporary, and then the function body would have never
1338 changed the value of P. However, now, we will be
1339 constructing VAR directly. The constructor body may
1340 change its value multiple times as it is being
1341 constructed. Therefore, it must not be TREE_READONLY;
1342 the back-end assumes that TREE_READONLY variable is
1343 assigned to only once. */
1344 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
1345 TREE_READONLY (var) = 0;
1347 /* If there is no setup required and we are in SSA, take the easy route
1348 replacing all SSA names representing the function parameter by the
1349 SSA name passed to function.
1351 We need to construct map for the variable anyway as it might be used
1352 in different SSA names when parameter is set in function.
1354 FIXME: This usually kills the last connection in between inlined
1355 function parameter and the actual value in debug info. Can we do
1356 better here? If we just inserted the statement, copy propagation
1357 would kill it anyway as it always did in older versions of GCC.
1359 We might want to introduce a notion that single SSA_NAME might
1360 represent multiple variables for purposes of debugging. */
1361 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
1362 && (TREE_CODE (rhs) == SSA_NAME
1363 || is_gimple_min_invariant (rhs))
1364 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1366 insert_decl_map (id, def, rhs);
1367 return;
1370 /* Initialize this VAR_DECL from the equivalent argument. Convert
1371 the argument to the proper type in case it was promoted. */
1372 if (value)
1374 block_stmt_iterator bsi = bsi_last (bb);
1376 if (rhs == error_mark_node)
1378 insert_decl_map (id, p, var_sub);
1379 return;
1382 STRIP_USELESS_TYPE_CONVERSION (rhs);
1384 /* We want to use GIMPLE_MODIFY_STMT, not INIT_EXPR here so that we
1385 keep our trees in gimple form. */
1386 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
1388 def = remap_ssa_name (def, id);
1389 init_stmt = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (var), def, rhs);
1390 SSA_NAME_DEF_STMT (def) = init_stmt;
1391 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
1392 set_default_def (var, NULL);
1394 else
1395 init_stmt = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (var), var, rhs);
1397 /* If we did not create a gimple value and we did not create a gimple
1398 cast of a gimple value, then we will need to gimplify INIT_STMTS
1399 at the end. Note that is_gimple_cast only checks the outer
1400 tree code, not its operand. Thus the explicit check that its
1401 operand is a gimple value. */
1402 if ((!is_gimple_val (rhs)
1403 && (!is_gimple_cast (rhs)
1404 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
1405 || !is_gimple_reg (var))
1407 tree_stmt_iterator i;
1409 push_gimplify_context ();
1410 gimplify_stmt (&init_stmt);
1411 if (gimple_in_ssa_p (cfun)
1412 && init_stmt && TREE_CODE (init_stmt) == STATEMENT_LIST)
1414 /* The replacement can expose previously unreferenced
1415 variables. */
1416 for (i = tsi_start (init_stmt); !tsi_end_p (i); tsi_next (&i))
1417 find_new_referenced_vars (tsi_stmt_ptr (i));
1419 pop_gimplify_context (NULL);
1422 /* If VAR represents a zero-sized variable, it's possible that the
1423 assignment statment may result in no gimple statements. */
1424 if (init_stmt)
1425 bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
1426 if (gimple_in_ssa_p (cfun))
1427 for (;!bsi_end_p (bsi); bsi_next (&bsi))
1428 mark_symbols_for_renaming (bsi_stmt (bsi));
1432 /* Generate code to initialize the parameters of the function at the
1433 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
1435 static void
1436 initialize_inlined_parameters (copy_body_data *id, tree args, tree static_chain,
1437 tree fn, basic_block bb)
1439 tree parms;
1440 tree a;
1441 tree p;
1442 tree vars = NULL_TREE;
1443 int argnum = 0;
1445 /* Figure out what the parameters are. */
1446 parms = DECL_ARGUMENTS (fn);
1448 /* Loop through the parameter declarations, replacing each with an
1449 equivalent VAR_DECL, appropriately initialized. */
1450 for (p = parms, a = args; p;
1451 a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
1453 tree value;
1455 ++argnum;
1457 /* Find the initializer. */
1458 value = lang_hooks.tree_inlining.convert_parm_for_inlining
1459 (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
1461 setup_one_parameter (id, p, value, fn, bb, &vars);
1464 /* Initialize the static chain. */
1465 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1466 gcc_assert (fn != current_function_decl);
1467 if (p)
1469 /* No static chain? Seems like a bug in tree-nested.c. */
1470 gcc_assert (static_chain);
1472 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
1475 declare_inline_vars (id->block, vars);
1478 /* Declare a return variable to replace the RESULT_DECL for the
1479 function we are calling. An appropriate DECL_STMT is returned.
1480 The USE_STMT is filled to contain a use of the declaration to
1481 indicate the return value of the function.
1483 RETURN_SLOT, if non-null is place where to store the result. It
1484 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
1485 was the LHS of the GIMPLE_MODIFY_STMT to which this call is the RHS.
1487 The return value is a (possibly null) value that is the result of the
1488 function as seen by the callee. *USE_P is a (possibly null) value that
1489 holds the result as seen by the caller. */
1491 static tree
1492 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
1493 tree *use_p)
1495 tree callee = id->src_fn;
1496 tree caller = id->dst_fn;
1497 tree result = DECL_RESULT (callee);
1498 tree callee_type = TREE_TYPE (result);
1499 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
1500 tree var, use;
1502 /* We don't need to do anything for functions that don't return
1503 anything. */
1504 if (!result || VOID_TYPE_P (callee_type))
1506 *use_p = NULL_TREE;
1507 return NULL_TREE;
1510 /* If there was a return slot, then the return value is the
1511 dereferenced address of that object. */
1512 if (return_slot)
1514 /* The front end shouldn't have used both return_slot and
1515 a modify expression. */
1516 gcc_assert (!modify_dest);
1517 if (DECL_BY_REFERENCE (result))
1519 tree return_slot_addr = build_fold_addr_expr (return_slot);
1520 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
1522 /* We are going to construct *&return_slot and we can't do that
1523 for variables believed to be not addressable.
1525 FIXME: This check possibly can match, because values returned
1526 via return slot optimization are not believed to have address
1527 taken by alias analysis. */
1528 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
1529 if (gimple_in_ssa_p (cfun))
1531 HOST_WIDE_INT bitsize;
1532 HOST_WIDE_INT bitpos;
1533 tree offset;
1534 enum machine_mode mode;
1535 int unsignedp;
1536 int volatilep;
1537 tree base;
1538 base = get_inner_reference (return_slot, &bitsize, &bitpos,
1539 &offset,
1540 &mode, &unsignedp, &volatilep,
1541 false);
1542 if (TREE_CODE (base) == INDIRECT_REF)
1543 base = TREE_OPERAND (base, 0);
1544 if (TREE_CODE (base) == SSA_NAME)
1545 base = SSA_NAME_VAR (base);
1546 mark_sym_for_renaming (base);
1548 var = return_slot_addr;
1550 else
1552 var = return_slot;
1553 gcc_assert (TREE_CODE (var) != SSA_NAME);
1555 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1556 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1557 && !DECL_GIMPLE_REG_P (result)
1558 && DECL_P (var))
1559 DECL_GIMPLE_REG_P (var) = 0;
1560 use = NULL;
1561 goto done;
1564 /* All types requiring non-trivial constructors should have been handled. */
1565 gcc_assert (!TREE_ADDRESSABLE (callee_type));
1567 /* Attempt to avoid creating a new temporary variable. */
1568 if (modify_dest
1569 && TREE_CODE (modify_dest) != SSA_NAME)
1571 bool use_it = false;
1573 /* We can't use MODIFY_DEST if there's type promotion involved. */
1574 if (!lang_hooks.types_compatible_p (caller_type, callee_type))
1575 use_it = false;
1577 /* ??? If we're assigning to a variable sized type, then we must
1578 reuse the destination variable, because we've no good way to
1579 create variable sized temporaries at this point. */
1580 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
1581 use_it = true;
1583 /* If the callee cannot possibly modify MODIFY_DEST, then we can
1584 reuse it as the result of the call directly. Don't do this if
1585 it would promote MODIFY_DEST to addressable. */
1586 else if (TREE_ADDRESSABLE (result))
1587 use_it = false;
1588 else
1590 tree base_m = get_base_address (modify_dest);
1592 /* If the base isn't a decl, then it's a pointer, and we don't
1593 know where that's going to go. */
1594 if (!DECL_P (base_m))
1595 use_it = false;
1596 else if (is_global_var (base_m))
1597 use_it = false;
1598 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1599 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1600 && !DECL_GIMPLE_REG_P (result)
1601 && DECL_GIMPLE_REG_P (base_m))
1602 use_it = false;
1603 else if (!TREE_ADDRESSABLE (base_m))
1604 use_it = true;
1607 if (use_it)
1609 var = modify_dest;
1610 use = NULL;
1611 goto done;
1615 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
1617 var = copy_result_decl_to_var (result, id);
1618 if (gimple_in_ssa_p (cfun))
1620 get_var_ann (var);
1621 add_referenced_var (var);
1624 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1625 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
1626 = tree_cons (NULL_TREE, var,
1627 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
1629 /* Do not have the rest of GCC warn about this variable as it should
1630 not be visible to the user. */
1631 TREE_NO_WARNING (var) = 1;
1633 declare_inline_vars (id->block, var);
1635 /* Build the use expr. If the return type of the function was
1636 promoted, convert it back to the expected type. */
1637 use = var;
1638 if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
1639 use = fold_convert (caller_type, var);
1641 STRIP_USELESS_TYPE_CONVERSION (use);
1643 if (DECL_BY_REFERENCE (result))
1644 var = build_fold_addr_expr (var);
1646 done:
1647 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
1648 way, when the RESULT_DECL is encountered, it will be
1649 automatically replaced by the VAR_DECL. */
1650 insert_decl_map (id, result, var);
1652 /* Remember this so we can ignore it in remap_decls. */
1653 id->retvar = var;
1655 *use_p = use;
1656 return var;
1659 /* Returns nonzero if a function can be inlined as a tree. */
1661 bool
1662 tree_inlinable_function_p (tree fn)
1664 return inlinable_function_p (fn);
1667 static const char *inline_forbidden_reason;
1669 static tree
1670 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
1671 void *fnp)
1673 tree node = *nodep;
1674 tree fn = (tree) fnp;
1675 tree t;
1677 switch (TREE_CODE (node))
1679 case CALL_EXPR:
1680 /* Refuse to inline alloca call unless user explicitly forced so as
1681 this may change program's memory overhead drastically when the
1682 function using alloca is called in loop. In GCC present in
1683 SPEC2000 inlining into schedule_block cause it to require 2GB of
1684 RAM instead of 256MB. */
1685 if (alloca_call_p (node)
1686 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1688 inline_forbidden_reason
1689 = G_("function %q+F can never be inlined because it uses "
1690 "alloca (override using the always_inline attribute)");
1691 return node;
1693 t = get_callee_fndecl (node);
1694 if (! t)
1695 break;
1697 /* We cannot inline functions that call setjmp. */
1698 if (setjmp_call_p (t))
1700 inline_forbidden_reason
1701 = G_("function %q+F can never be inlined because it uses setjmp");
1702 return node;
1705 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
1706 switch (DECL_FUNCTION_CODE (t))
1708 /* We cannot inline functions that take a variable number of
1709 arguments. */
1710 case BUILT_IN_VA_START:
1711 case BUILT_IN_STDARG_START:
1712 case BUILT_IN_NEXT_ARG:
1713 case BUILT_IN_VA_END:
1714 inline_forbidden_reason
1715 = G_("function %q+F can never be inlined because it "
1716 "uses variable argument lists");
1717 return node;
1719 case BUILT_IN_LONGJMP:
1720 /* We can't inline functions that call __builtin_longjmp at
1721 all. The non-local goto machinery really requires the
1722 destination be in a different function. If we allow the
1723 function calling __builtin_longjmp to be inlined into the
1724 function calling __builtin_setjmp, Things will Go Awry. */
1725 inline_forbidden_reason
1726 = G_("function %q+F can never be inlined because "
1727 "it uses setjmp-longjmp exception handling");
1728 return node;
1730 case BUILT_IN_NONLOCAL_GOTO:
1731 /* Similarly. */
1732 inline_forbidden_reason
1733 = G_("function %q+F can never be inlined because "
1734 "it uses non-local goto");
1735 return node;
1737 case BUILT_IN_RETURN:
1738 case BUILT_IN_APPLY_ARGS:
1739 /* If a __builtin_apply_args caller would be inlined,
1740 it would be saving arguments of the function it has
1741 been inlined into. Similarly __builtin_return would
1742 return from the function the inline has been inlined into. */
1743 inline_forbidden_reason
1744 = G_("function %q+F can never be inlined because "
1745 "it uses __builtin_return or __builtin_apply_args");
1746 return node;
1748 default:
1749 break;
1751 break;
1753 case GOTO_EXPR:
1754 t = TREE_OPERAND (node, 0);
1756 /* We will not inline a function which uses computed goto. The
1757 addresses of its local labels, which may be tucked into
1758 global storage, are of course not constant across
1759 instantiations, which causes unexpected behavior. */
1760 if (TREE_CODE (t) != LABEL_DECL)
1762 inline_forbidden_reason
1763 = G_("function %q+F can never be inlined "
1764 "because it contains a computed goto");
1765 return node;
1767 break;
1769 case LABEL_EXPR:
1770 t = TREE_OPERAND (node, 0);
1771 if (DECL_NONLOCAL (t))
1773 /* We cannot inline a function that receives a non-local goto
1774 because we cannot remap the destination label used in the
1775 function that is performing the non-local goto. */
1776 inline_forbidden_reason
1777 = G_("function %q+F can never be inlined "
1778 "because it receives a non-local goto");
1779 return node;
1781 break;
1783 case RECORD_TYPE:
1784 case UNION_TYPE:
1785 /* We cannot inline a function of the form
1787 void F (int i) { struct S { int ar[i]; } s; }
1789 Attempting to do so produces a catch-22.
1790 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1791 UNION_TYPE nodes, then it goes into infinite recursion on a
1792 structure containing a pointer to its own type. If it doesn't,
1793 then the type node for S doesn't get adjusted properly when
1794 F is inlined.
1796 ??? This is likely no longer true, but it's too late in the 4.0
1797 cycle to try to find out. This should be checked for 4.1. */
1798 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1799 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1801 inline_forbidden_reason
1802 = G_("function %q+F can never be inlined "
1803 "because it uses variable sized variables");
1804 return node;
1807 default:
1808 break;
1811 return NULL_TREE;
1814 /* Return subexpression representing possible alloca call, if any. */
1815 static tree
1816 inline_forbidden_p (tree fndecl)
1818 location_t saved_loc = input_location;
1819 block_stmt_iterator bsi;
1820 basic_block bb;
1821 tree ret = NULL_TREE;
1823 FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (fndecl))
1824 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1826 ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
1827 inline_forbidden_p_1, fndecl);
1828 if (ret)
1829 goto egress;
1832 egress:
1833 input_location = saved_loc;
1834 return ret;
1837 /* Returns nonzero if FN is a function that does not have any
1838 fundamental inline blocking properties. */
1840 static bool
1841 inlinable_function_p (tree fn)
1843 bool inlinable = true;
1845 /* If we've already decided this function shouldn't be inlined,
1846 there's no need to check again. */
1847 if (DECL_UNINLINABLE (fn))
1848 return false;
1850 /* See if there is any language-specific reason it cannot be
1851 inlined. (It is important that this hook be called early because
1852 in C++ it may result in template instantiation.)
1853 If the function is not inlinable for language-specific reasons,
1854 it is left up to the langhook to explain why. */
1855 inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1857 /* If we don't have the function body available, we can't inline it.
1858 However, this should not be recorded since we also get here for
1859 forward declared inline functions. Therefore, return at once. */
1860 if (!DECL_SAVED_TREE (fn))
1861 return false;
1863 /* If we're not inlining at all, then we cannot inline this function. */
1864 else if (!flag_inline_trees)
1865 inlinable = false;
1867 /* Only try to inline functions if DECL_INLINE is set. This should be
1868 true for all functions declared `inline', and for all other functions
1869 as well with -finline-functions.
1871 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1872 it's the front-end that must set DECL_INLINE in this case, because
1873 dwarf2out loses if a function that does not have DECL_INLINE set is
1874 inlined anyway. That is why we have both DECL_INLINE and
1875 DECL_DECLARED_INLINE_P. */
1876 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1877 here should be redundant. */
1878 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1879 inlinable = false;
1881 else if (inline_forbidden_p (fn))
1883 /* See if we should warn about uninlinable functions. Previously,
1884 some of these warnings would be issued while trying to expand
1885 the function inline, but that would cause multiple warnings
1886 about functions that would for example call alloca. But since
1887 this a property of the function, just one warning is enough.
1888 As a bonus we can now give more details about the reason why a
1889 function is not inlinable.
1890 We only warn for functions declared `inline' by the user. */
1891 bool do_warning = (warn_inline
1892 && DECL_INLINE (fn)
1893 && DECL_DECLARED_INLINE_P (fn)
1894 && !DECL_IN_SYSTEM_HEADER (fn));
1896 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1897 sorry (inline_forbidden_reason, fn);
1898 else if (do_warning)
1899 warning (OPT_Winline, inline_forbidden_reason, fn);
1901 inlinable = false;
1904 /* Squirrel away the result so that we don't have to check again. */
1905 DECL_UNINLINABLE (fn) = !inlinable;
1907 return inlinable;
1910 /* Estimate the cost of a memory move. Use machine dependent
1911 word size and take possible memcpy call into account. */
1914 estimate_move_cost (tree type)
1916 HOST_WIDE_INT size;
1918 size = int_size_in_bytes (type);
1920 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1921 /* Cost of a memcpy call, 3 arguments and the call. */
1922 return 4;
1923 else
1924 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1927 /* Arguments for estimate_num_insns_1. */
1929 struct eni_data
1931 /* Used to return the number of insns. */
1932 int count;
1934 /* Weights of various constructs. */
1935 eni_weights *weights;
1938 /* Used by estimate_num_insns. Estimate number of instructions seen
1939 by given statement. */
1941 static tree
1942 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1944 struct eni_data *d = data;
1945 tree x = *tp;
1946 unsigned cost;
1948 if (IS_TYPE_OR_DECL_P (x))
1950 *walk_subtrees = 0;
1951 return NULL;
1953 /* Assume that constants and references counts nothing. These should
1954 be majorized by amount of operations among them we count later
1955 and are common target of CSE and similar optimizations. */
1956 else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
1957 return NULL;
1959 switch (TREE_CODE (x))
1961 /* Containers have no cost. */
1962 case TREE_LIST:
1963 case TREE_VEC:
1964 case BLOCK:
1965 case COMPONENT_REF:
1966 case BIT_FIELD_REF:
1967 case INDIRECT_REF:
1968 case ALIGN_INDIRECT_REF:
1969 case MISALIGNED_INDIRECT_REF:
1970 case ARRAY_REF:
1971 case ARRAY_RANGE_REF:
1972 case OBJ_TYPE_REF:
1973 case EXC_PTR_EXPR: /* ??? */
1974 case FILTER_EXPR: /* ??? */
1975 case COMPOUND_EXPR:
1976 case BIND_EXPR:
1977 case WITH_CLEANUP_EXPR:
1978 case NOP_EXPR:
1979 case VIEW_CONVERT_EXPR:
1980 case SAVE_EXPR:
1981 case ADDR_EXPR:
1982 case COMPLEX_EXPR:
1983 case RANGE_EXPR:
1984 case CASE_LABEL_EXPR:
1985 case SSA_NAME:
1986 case CATCH_EXPR:
1987 case EH_FILTER_EXPR:
1988 case STATEMENT_LIST:
1989 case ERROR_MARK:
1990 case NON_LVALUE_EXPR:
1991 case FDESC_EXPR:
1992 case VA_ARG_EXPR:
1993 case TRY_CATCH_EXPR:
1994 case TRY_FINALLY_EXPR:
1995 case LABEL_EXPR:
1996 case GOTO_EXPR:
1997 case RETURN_EXPR:
1998 case EXIT_EXPR:
1999 case LOOP_EXPR:
2000 case PHI_NODE:
2001 case WITH_SIZE_EXPR:
2002 case OMP_CLAUSE:
2003 case OMP_RETURN:
2004 case OMP_CONTINUE:
2005 break;
2007 /* We don't account constants for now. Assume that the cost is amortized
2008 by operations that do use them. We may re-consider this decision once
2009 we are able to optimize the tree before estimating its size and break
2010 out static initializers. */
2011 case IDENTIFIER_NODE:
2012 case INTEGER_CST:
2013 case REAL_CST:
2014 case COMPLEX_CST:
2015 case VECTOR_CST:
2016 case STRING_CST:
2017 *walk_subtrees = 0;
2018 return NULL;
2020 /* Try to estimate the cost of assignments. We have three cases to
2021 deal with:
2022 1) Simple assignments to registers;
2023 2) Stores to things that must live in memory. This includes
2024 "normal" stores to scalars, but also assignments of large
2025 structures, or constructors of big arrays;
2026 3) TARGET_EXPRs.
2028 Let us look at the first two cases, assuming we have "a = b + C":
2029 <GIMPLE_MODIFY_STMT <var_decl "a">
2030 <plus_expr <var_decl "b"> <constant C>>
2031 If "a" is a GIMPLE register, the assignment to it is free on almost
2032 any target, because "a" usually ends up in a real register. Hence
2033 the only cost of this expression comes from the PLUS_EXPR, and we
2034 can ignore the GIMPLE_MODIFY_STMT.
2035 If "a" is not a GIMPLE register, the assignment to "a" will most
2036 likely be a real store, so the cost of the GIMPLE_MODIFY_STMT is the cost
2037 of moving something into "a", which we compute using the function
2038 estimate_move_cost.
2040 The third case deals with TARGET_EXPRs, for which the semantics are
2041 that a temporary is assigned, unless the TARGET_EXPR itself is being
2042 assigned to something else. In the latter case we do not need the
2043 temporary. E.g. in:
2044 <GIMPLE_MODIFY_STMT <var_decl "a"> <target_expr>>, the
2045 GIMPLE_MODIFY_STMT is free. */
2046 case INIT_EXPR:
2047 case GIMPLE_MODIFY_STMT:
2048 /* Is the right and side a TARGET_EXPR? */
2049 if (TREE_CODE (GENERIC_TREE_OPERAND (x, 1)) == TARGET_EXPR)
2050 break;
2051 /* ... fall through ... */
2053 case TARGET_EXPR:
2054 x = GENERIC_TREE_OPERAND (x, 0);
2055 /* Is this an assignments to a register? */
2056 if (is_gimple_reg (x))
2057 break;
2058 /* Otherwise it's a store, so fall through to compute the move cost. */
2060 case CONSTRUCTOR:
2061 d->count += estimate_move_cost (TREE_TYPE (x));
2062 break;
2064 /* Assign cost of 1 to usual operations.
2065 ??? We may consider mapping RTL costs to this. */
2066 case COND_EXPR:
2067 case VEC_COND_EXPR:
2069 case PLUS_EXPR:
2070 case MINUS_EXPR:
2071 case MULT_EXPR:
2073 case FIX_TRUNC_EXPR:
2075 case NEGATE_EXPR:
2076 case FLOAT_EXPR:
2077 case MIN_EXPR:
2078 case MAX_EXPR:
2079 case ABS_EXPR:
2081 case LSHIFT_EXPR:
2082 case RSHIFT_EXPR:
2083 case LROTATE_EXPR:
2084 case RROTATE_EXPR:
2085 case VEC_LSHIFT_EXPR:
2086 case VEC_RSHIFT_EXPR:
2088 case BIT_IOR_EXPR:
2089 case BIT_XOR_EXPR:
2090 case BIT_AND_EXPR:
2091 case BIT_NOT_EXPR:
2093 case TRUTH_ANDIF_EXPR:
2094 case TRUTH_ORIF_EXPR:
2095 case TRUTH_AND_EXPR:
2096 case TRUTH_OR_EXPR:
2097 case TRUTH_XOR_EXPR:
2098 case TRUTH_NOT_EXPR:
2100 case LT_EXPR:
2101 case LE_EXPR:
2102 case GT_EXPR:
2103 case GE_EXPR:
2104 case EQ_EXPR:
2105 case NE_EXPR:
2106 case ORDERED_EXPR:
2107 case UNORDERED_EXPR:
2109 case UNLT_EXPR:
2110 case UNLE_EXPR:
2111 case UNGT_EXPR:
2112 case UNGE_EXPR:
2113 case UNEQ_EXPR:
2114 case LTGT_EXPR:
2116 case CONVERT_EXPR:
2118 case CONJ_EXPR:
2120 case PREDECREMENT_EXPR:
2121 case PREINCREMENT_EXPR:
2122 case POSTDECREMENT_EXPR:
2123 case POSTINCREMENT_EXPR:
2125 case ASM_EXPR:
2127 case REALIGN_LOAD_EXPR:
2129 case REDUC_MAX_EXPR:
2130 case REDUC_MIN_EXPR:
2131 case REDUC_PLUS_EXPR:
2132 case WIDEN_SUM_EXPR:
2133 case DOT_PROD_EXPR:
2134 case VEC_WIDEN_MULT_HI_EXPR:
2135 case VEC_WIDEN_MULT_LO_EXPR:
2136 case VEC_UNPACK_HI_EXPR:
2137 case VEC_UNPACK_LO_EXPR:
2138 case VEC_PACK_MOD_EXPR:
2139 case VEC_PACK_SAT_EXPR:
2141 case WIDEN_MULT_EXPR:
2143 case VEC_EXTRACT_EVEN_EXPR:
2144 case VEC_EXTRACT_ODD_EXPR:
2145 case VEC_INTERLEAVE_HIGH_EXPR:
2146 case VEC_INTERLEAVE_LOW_EXPR:
2148 case RESX_EXPR:
2149 d->count += 1;
2150 break;
2152 case SWITCH_EXPR:
2153 /* TODO: Cost of a switch should be derived from the number of
2154 branches. */
2155 d->count += d->weights->switch_cost;
2156 break;
2158 /* Few special cases of expensive operations. This is useful
2159 to avoid inlining on functions having too many of these. */
2160 case TRUNC_DIV_EXPR:
2161 case CEIL_DIV_EXPR:
2162 case FLOOR_DIV_EXPR:
2163 case ROUND_DIV_EXPR:
2164 case EXACT_DIV_EXPR:
2165 case TRUNC_MOD_EXPR:
2166 case CEIL_MOD_EXPR:
2167 case FLOOR_MOD_EXPR:
2168 case ROUND_MOD_EXPR:
2169 case RDIV_EXPR:
2170 d->count += d->weights->div_mod_cost;
2171 break;
2172 case CALL_EXPR:
2174 tree decl = get_callee_fndecl (x);
2175 tree arg;
2177 cost = d->weights->call_cost;
2178 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
2179 switch (DECL_FUNCTION_CODE (decl))
2181 case BUILT_IN_CONSTANT_P:
2182 *walk_subtrees = 0;
2183 return NULL_TREE;
2184 case BUILT_IN_EXPECT:
2185 return NULL_TREE;
2186 /* Prefetch instruction is not expensive. */
2187 case BUILT_IN_PREFETCH:
2188 cost = 1;
2189 break;
2190 default:
2191 break;
2194 /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
2195 that does use function declaration to figure out the arguments. */
2196 if (!decl)
2198 for (arg = TREE_OPERAND (x, 1); arg; arg = TREE_CHAIN (arg))
2199 d->count += estimate_move_cost (TREE_TYPE (TREE_VALUE (arg)));
2201 else
2203 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2204 d->count += estimate_move_cost (TREE_TYPE (arg));
2207 d->count += cost;
2208 break;
2211 case OMP_PARALLEL:
2212 case OMP_FOR:
2213 case OMP_SECTIONS:
2214 case OMP_SINGLE:
2215 case OMP_SECTION:
2216 case OMP_MASTER:
2217 case OMP_ORDERED:
2218 case OMP_CRITICAL:
2219 case OMP_ATOMIC:
2220 /* OpenMP directives are generally very expensive. */
2221 d->count += d->weights->omp_cost;
2222 break;
2224 default:
2225 gcc_unreachable ();
2227 return NULL;
2230 /* Estimate number of instructions that will be created by expanding EXPR.
2231 WEIGHTS contains weights attributed to various constructs. */
2234 estimate_num_insns (tree expr, eni_weights *weights)
2236 struct pointer_set_t *visited_nodes;
2237 basic_block bb;
2238 block_stmt_iterator bsi;
2239 struct function *my_function;
2240 struct eni_data data;
2242 data.count = 0;
2243 data.weights = weights;
2245 /* If we're given an entire function, walk the CFG. */
2246 if (TREE_CODE (expr) == FUNCTION_DECL)
2248 my_function = DECL_STRUCT_FUNCTION (expr);
2249 gcc_assert (my_function && my_function->cfg);
2250 visited_nodes = pointer_set_create ();
2251 FOR_EACH_BB_FN (bb, my_function)
2253 for (bsi = bsi_start (bb);
2254 !bsi_end_p (bsi);
2255 bsi_next (&bsi))
2257 walk_tree (bsi_stmt_ptr (bsi), estimate_num_insns_1,
2258 &data, visited_nodes);
2261 pointer_set_destroy (visited_nodes);
2263 else
2264 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &data);
2266 return data.count;
2269 /* Initializes weights used by estimate_num_insns. */
2271 void
2272 init_inline_once (void)
2274 eni_inlining_weights.call_cost = PARAM_VALUE (PARAM_INLINE_CALL_COST);
2275 eni_inlining_weights.div_mod_cost = 10;
2276 eni_inlining_weights.switch_cost = 1;
2277 eni_inlining_weights.omp_cost = 40;
2279 eni_size_weights.call_cost = 1;
2280 eni_size_weights.div_mod_cost = 1;
2281 eni_size_weights.switch_cost = 10;
2282 eni_size_weights.omp_cost = 40;
2284 /* Estimating time for call is difficult, since we have no idea what the
2285 called function does. In the current uses of eni_time_weights,
2286 underestimating the cost does less harm than overestimating it, so
2287 we choose a rather small walue here. */
2288 eni_time_weights.call_cost = 10;
2289 eni_time_weights.div_mod_cost = 10;
2290 eni_time_weights.switch_cost = 4;
2291 eni_time_weights.omp_cost = 40;
2294 typedef struct function *function_p;
2296 DEF_VEC_P(function_p);
2297 DEF_VEC_ALLOC_P(function_p,heap);
2299 /* Initialized with NOGC, making this poisonous to the garbage collector. */
2300 static VEC(function_p,heap) *cfun_stack;
2302 void
2303 push_cfun (struct function *new_cfun)
2305 VEC_safe_push (function_p, heap, cfun_stack, cfun);
2306 cfun = new_cfun;
2309 void
2310 pop_cfun (void)
2312 cfun = VEC_pop (function_p, cfun_stack);
2315 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
2316 static void
2317 add_lexical_block (tree current_block, tree new_block)
2319 tree *blk_p;
2321 /* Walk to the last sub-block. */
2322 for (blk_p = &BLOCK_SUBBLOCKS (current_block);
2323 *blk_p;
2324 blk_p = &TREE_CHAIN (*blk_p))
2326 *blk_p = new_block;
2327 BLOCK_SUPERCONTEXT (new_block) = current_block;
2330 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
2332 static bool
2333 expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
2335 copy_body_data *id;
2336 tree t;
2337 tree use_retvar;
2338 tree fn;
2339 splay_tree st;
2340 tree args;
2341 tree return_slot;
2342 tree modify_dest;
2343 location_t saved_location;
2344 struct cgraph_edge *cg_edge;
2345 const char *reason;
2346 basic_block return_block;
2347 edge e;
2348 block_stmt_iterator bsi, stmt_bsi;
2349 bool successfully_inlined = FALSE;
2350 bool purge_dead_abnormal_edges;
2351 tree t_step;
2352 tree var;
2354 /* See what we've got. */
2355 id = (copy_body_data *) data;
2356 t = *tp;
2358 /* Set input_location here so we get the right instantiation context
2359 if we call instantiate_decl from inlinable_function_p. */
2360 saved_location = input_location;
2361 if (EXPR_HAS_LOCATION (t))
2362 input_location = EXPR_LOCATION (t);
2364 /* From here on, we're only interested in CALL_EXPRs. */
2365 if (TREE_CODE (t) != CALL_EXPR)
2366 goto egress;
2368 /* First, see if we can figure out what function is being called.
2369 If we cannot, then there is no hope of inlining the function. */
2370 fn = get_callee_fndecl (t);
2371 if (!fn)
2372 goto egress;
2374 /* Turn forward declarations into real ones. */
2375 fn = cgraph_node (fn)->decl;
2377 /* If fn is a declaration of a function in a nested scope that was
2378 globally declared inline, we don't set its DECL_INITIAL.
2379 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
2380 C++ front-end uses it for cdtors to refer to their internal
2381 declarations, that are not real functions. Fortunately those
2382 don't have trees to be saved, so we can tell by checking their
2383 DECL_SAVED_TREE. */
2384 if (! DECL_INITIAL (fn)
2385 && DECL_ABSTRACT_ORIGIN (fn)
2386 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
2387 fn = DECL_ABSTRACT_ORIGIN (fn);
2389 /* Objective C and fortran still calls tree_rest_of_compilation directly.
2390 Kill this check once this is fixed. */
2391 if (!id->dst_node->analyzed)
2392 goto egress;
2394 cg_edge = cgraph_edge (id->dst_node, stmt);
2396 /* Constant propagation on argument done during previous inlining
2397 may create new direct call. Produce an edge for it. */
2398 if (!cg_edge)
2400 struct cgraph_node *dest = cgraph_node (fn);
2402 /* We have missing edge in the callgraph. This can happen in one case
2403 where previous inlining turned indirect call into direct call by
2404 constant propagating arguments. In all other cases we hit a bug
2405 (incorrect node sharing is most common reason for missing edges. */
2406 gcc_assert (dest->needed || !flag_unit_at_a_time);
2407 cgraph_create_edge (id->dst_node, dest, stmt,
2408 bb->count, CGRAPH_FREQ_BASE,
2409 bb->loop_depth)->inline_failed
2410 = N_("originally indirect function call not considered for inlining");
2411 if (dump_file)
2413 fprintf (dump_file, "Created new direct edge to %s",
2414 cgraph_node_name (dest));
2416 goto egress;
2419 /* Don't try to inline functions that are not well-suited to
2420 inlining. */
2421 if (!cgraph_inline_p (cg_edge, &reason))
2423 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
2424 /* Avoid warnings during early inline pass. */
2425 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2427 sorry ("inlining failed in call to %q+F: %s", fn, reason);
2428 sorry ("called from here");
2430 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
2431 && !DECL_IN_SYSTEM_HEADER (fn)
2432 && strlen (reason)
2433 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
2434 /* Avoid warnings during early inline pass. */
2435 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2437 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
2438 fn, reason);
2439 warning (OPT_Winline, "called from here");
2441 goto egress;
2443 fn = cg_edge->callee->decl;
2445 #ifdef ENABLE_CHECKING
2446 if (cg_edge->callee->decl != id->dst_node->decl)
2447 verify_cgraph_node (cg_edge->callee);
2448 #endif
2450 /* We will be inlining this callee. */
2451 id->eh_region = lookup_stmt_eh_region (stmt);
2453 /* Split the block holding the CALL_EXPR. */
2454 e = split_block (bb, stmt);
2455 bb = e->src;
2456 return_block = e->dest;
2457 remove_edge (e);
2459 /* split_block splits after the statement; work around this by
2460 moving the call into the second block manually. Not pretty,
2461 but seems easier than doing the CFG manipulation by hand
2462 when the CALL_EXPR is in the last statement of BB. */
2463 stmt_bsi = bsi_last (bb);
2464 bsi_remove (&stmt_bsi, false);
2466 /* If the CALL_EXPR was in the last statement of BB, it may have
2467 been the source of abnormal edges. In this case, schedule
2468 the removal of dead abnormal edges. */
2469 bsi = bsi_start (return_block);
2470 if (bsi_end_p (bsi))
2472 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2473 purge_dead_abnormal_edges = true;
2475 else
2477 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
2478 purge_dead_abnormal_edges = false;
2481 stmt_bsi = bsi_start (return_block);
2483 /* Build a block containing code to initialize the arguments, the
2484 actual inline expansion of the body, and a label for the return
2485 statements within the function to jump to. The type of the
2486 statement expression is the return type of the function call. */
2487 id->block = make_node (BLOCK);
2488 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
2489 BLOCK_SOURCE_LOCATION (id->block) = input_location;
2490 add_lexical_block (TREE_BLOCK (stmt), id->block);
2492 /* Local declarations will be replaced by their equivalents in this
2493 map. */
2494 st = id->decl_map;
2495 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
2496 NULL, NULL);
2498 /* Initialize the parameters. */
2499 args = TREE_OPERAND (t, 1);
2501 /* Record the function we are about to inline. */
2502 id->src_fn = fn;
2503 id->src_node = cg_edge->callee;
2504 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
2506 initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2), fn, bb);
2508 if (DECL_INITIAL (fn))
2509 add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
2511 /* Return statements in the function body will be replaced by jumps
2512 to the RET_LABEL. */
2514 gcc_assert (DECL_INITIAL (fn));
2515 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
2517 /* Find the lhs to which the result of this call is assigned. */
2518 return_slot = NULL;
2519 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
2521 modify_dest = GIMPLE_STMT_OPERAND (stmt, 0);
2523 /* The function which we are inlining might not return a value,
2524 in which case we should issue a warning that the function
2525 does not return a value. In that case the optimizers will
2526 see that the variable to which the value is assigned was not
2527 initialized. We do not want to issue a warning about that
2528 uninitialized variable. */
2529 if (DECL_P (modify_dest))
2530 TREE_NO_WARNING (modify_dest) = 1;
2531 if (CALL_EXPR_RETURN_SLOT_OPT (t))
2533 return_slot = modify_dest;
2534 modify_dest = NULL;
2537 else
2538 modify_dest = NULL;
2540 /* Declare the return variable for the function. */
2541 declare_return_variable (id, return_slot,
2542 modify_dest, &use_retvar);
2544 /* This is it. Duplicate the callee body. Assume callee is
2545 pre-gimplified. Note that we must not alter the caller
2546 function in any way before this point, as this CALL_EXPR may be
2547 a self-referential call; if we're calling ourselves, we need to
2548 duplicate our body before altering anything. */
2549 copy_body (id, bb->count, bb->frequency, bb, return_block);
2551 /* Add local vars in this inlined callee to caller. */
2552 t_step = id->src_cfun->unexpanded_var_list;
2553 for (; t_step; t_step = TREE_CHAIN (t_step))
2555 var = TREE_VALUE (t_step);
2556 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2557 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2558 cfun->unexpanded_var_list);
2559 else
2560 cfun->unexpanded_var_list = tree_cons (NULL_TREE, remap_decl (var, id),
2561 cfun->unexpanded_var_list);
2564 /* Clean up. */
2565 splay_tree_delete (id->decl_map);
2566 id->decl_map = st;
2568 /* If the inlined function returns a result that we care about,
2569 clobber the CALL_EXPR with a reference to the return variable. */
2570 if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
2572 *tp = use_retvar;
2573 if (gimple_in_ssa_p (cfun))
2575 update_stmt (stmt);
2576 mark_symbols_for_renaming (stmt);
2578 maybe_clean_or_replace_eh_stmt (stmt, stmt);
2580 else
2581 /* We're modifying a TSI owned by gimple_expand_calls_inline();
2582 tsi_delink() will leave the iterator in a sane state. */
2584 /* Handle case of inlining function that miss return statement so
2585 return value becomes undefined. */
2586 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
2587 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME)
2589 tree name = TREE_OPERAND (stmt, 0);
2590 tree var = SSA_NAME_VAR (TREE_OPERAND (stmt, 0));
2591 tree def = gimple_default_def (cfun, var);
2593 /* If the variable is used undefined, make this name undefined via
2594 move. */
2595 if (def)
2597 TREE_OPERAND (stmt, 1) = def;
2598 update_stmt (stmt);
2600 /* Otherwise make this variable undefined. */
2601 else
2603 bsi_remove (&stmt_bsi, true);
2604 set_default_def (var, name);
2605 SSA_NAME_DEF_STMT (name) = build_empty_stmt ();
2608 else
2609 bsi_remove (&stmt_bsi, true);
2612 if (purge_dead_abnormal_edges)
2613 tree_purge_dead_abnormal_call_edges (return_block);
2615 /* If the value of the new expression is ignored, that's OK. We
2616 don't warn about this for CALL_EXPRs, so we shouldn't warn about
2617 the equivalent inlined version either. */
2618 TREE_USED (*tp) = 1;
2620 /* Output the inlining info for this abstract function, since it has been
2621 inlined. If we don't do this now, we can lose the information about the
2622 variables in the function when the blocks get blown away as soon as we
2623 remove the cgraph node. */
2624 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
2626 /* Update callgraph if needed. */
2627 cgraph_remove_node (cg_edge->callee);
2629 id->block = NULL_TREE;
2630 successfully_inlined = TRUE;
2632 egress:
2633 input_location = saved_location;
2634 return successfully_inlined;
2637 /* Expand call statements reachable from STMT_P.
2638 We can only have CALL_EXPRs as the "toplevel" tree code or nested
2639 in a GIMPLE_MODIFY_STMT. See tree-gimple.c:get_call_expr_in(). We can
2640 unfortunately not use that function here because we need a pointer
2641 to the CALL_EXPR, not the tree itself. */
2643 static bool
2644 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
2646 block_stmt_iterator bsi;
2648 /* Register specific tree functions. */
2649 tree_register_cfg_hooks ();
2650 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2652 tree *expr_p = bsi_stmt_ptr (bsi);
2653 tree stmt = *expr_p;
2655 if (TREE_CODE (*expr_p) == GIMPLE_MODIFY_STMT)
2656 expr_p = &GIMPLE_STMT_OPERAND (*expr_p, 1);
2657 if (TREE_CODE (*expr_p) == WITH_SIZE_EXPR)
2658 expr_p = &TREE_OPERAND (*expr_p, 0);
2659 if (TREE_CODE (*expr_p) == CALL_EXPR)
2660 if (expand_call_inline (bb, stmt, expr_p, id))
2661 return true;
2663 return false;
2666 /* Walk all basic blocks created after FIRST and try to fold every statement
2667 in the STATEMENTS pointer set. */
2668 static void
2669 fold_marked_statements (int first, struct pointer_set_t *statements)
2671 for (;first < n_basic_blocks;first++)
2672 if (BASIC_BLOCK (first))
2674 block_stmt_iterator bsi;
2675 for (bsi = bsi_start (BASIC_BLOCK (first));
2676 !bsi_end_p (bsi); bsi_next (&bsi))
2677 if (pointer_set_contains (statements, bsi_stmt (bsi)))
2679 tree old_stmt = bsi_stmt (bsi);
2680 if (fold_stmt (bsi_stmt_ptr (bsi)))
2682 update_stmt (bsi_stmt (bsi));
2683 if (maybe_clean_or_replace_eh_stmt (old_stmt, bsi_stmt (bsi)))
2684 tree_purge_dead_eh_edges (BASIC_BLOCK (first));
2690 /* Return true if BB has at least one abnormal outgoing edge. */
2692 static inline bool
2693 has_abnormal_outgoing_edge_p (basic_block bb)
2695 edge e;
2696 edge_iterator ei;
2698 FOR_EACH_EDGE (e, ei, bb->succs)
2699 if (e->flags & EDGE_ABNORMAL)
2700 return true;
2702 return false;
2705 /* When a block from the inlined function contains a call with side-effects
2706 in the middle gets inlined in a function with non-locals labels, the call
2707 becomes a potential non-local goto so we need to add appropriate edge. */
2709 static void
2710 make_nonlocal_label_edges (void)
2712 block_stmt_iterator bsi;
2713 basic_block bb;
2715 FOR_EACH_BB (bb)
2717 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2719 tree stmt = bsi_stmt (bsi);
2720 if (tree_can_make_abnormal_goto (stmt))
2722 if (stmt == bsi_stmt (bsi_last (bb)))
2724 if (!has_abnormal_outgoing_edge_p (bb))
2725 make_abnormal_goto_edges (bb, true);
2727 else
2729 edge e = split_block (bb, stmt);
2730 bb = e->src;
2731 make_abnormal_goto_edges (bb, true);
2733 break;
2736 /* Update PHIs on nonlocal goto receivers we (possibly)
2737 just created new edges into. */
2738 if (TREE_CODE (stmt) == LABEL_EXPR
2739 && gimple_in_ssa_p (cfun))
2741 tree target = LABEL_EXPR_LABEL (stmt);
2742 if (DECL_NONLOCAL (target))
2744 tree phi;
2746 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2748 gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
2749 (PHI_RESULT (phi)));
2750 mark_sym_for_renaming
2751 (SSA_NAME_VAR (PHI_RESULT (phi)));
2759 /* Expand calls to inline functions in the body of FN. */
2761 unsigned int
2762 optimize_inline_calls (tree fn)
2764 copy_body_data id;
2765 tree prev_fn;
2766 basic_block bb;
2767 int last = n_basic_blocks;
2768 /* There is no point in performing inlining if errors have already
2769 occurred -- and we might crash if we try to inline invalid
2770 code. */
2771 if (errorcount || sorrycount)
2772 return 0;
2774 /* Clear out ID. */
2775 memset (&id, 0, sizeof (id));
2777 id.src_node = id.dst_node = cgraph_node (fn);
2778 id.dst_fn = fn;
2779 /* Or any functions that aren't finished yet. */
2780 prev_fn = NULL_TREE;
2781 if (current_function_decl)
2783 id.dst_fn = current_function_decl;
2784 prev_fn = current_function_decl;
2787 id.copy_decl = copy_decl_maybe_to_var;
2788 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2789 id.transform_new_cfg = false;
2790 id.transform_return_to_modify = true;
2791 id.transform_lang_insert_block = false;
2792 id.statements_to_fold = pointer_set_create ();
2794 push_gimplify_context ();
2796 /* Reach the trees by walking over the CFG, and note the
2797 enclosing basic-blocks in the call edges. */
2798 /* We walk the blocks going forward, because inlined function bodies
2799 will split id->current_basic_block, and the new blocks will
2800 follow it; we'll trudge through them, processing their CALL_EXPRs
2801 along the way. */
2802 FOR_EACH_BB (bb)
2803 gimple_expand_calls_inline (bb, &id);
2805 pop_gimplify_context (NULL);
2806 /* Renumber the (code) basic_blocks consecutively. */
2807 compact_blocks ();
2808 /* Renumber the lexical scoping (non-code) blocks consecutively. */
2809 number_blocks (fn);
2811 #ifdef ENABLE_CHECKING
2813 struct cgraph_edge *e;
2815 verify_cgraph_node (id.dst_node);
2817 /* Double check that we inlined everything we are supposed to inline. */
2818 for (e = id.dst_node->callees; e; e = e->next_callee)
2819 gcc_assert (e->inline_failed);
2821 #endif
2823 /* We are not going to maintain the cgraph edges up to date.
2824 Kill it so it won't confuse us. */
2825 cgraph_node_remove_callees (id.dst_node);
2827 fold_marked_statements (last, id.statements_to_fold);
2828 pointer_set_destroy (id.statements_to_fold);
2829 fold_cond_expr_cond ();
2830 if (current_function_has_nonlocal_label)
2831 make_nonlocal_label_edges ();
2832 /* We make no attempts to keep dominance info up-to-date. */
2833 free_dominance_info (CDI_DOMINATORS);
2834 free_dominance_info (CDI_POST_DOMINATORS);
2835 /* It would be nice to check SSA/CFG/statement consistency here, but it is
2836 not possible yet - the IPA passes might make various functions to not
2837 throw and they don't care to proactively update local EH info. This is
2838 done later in fixup_cfg pass that also execute the verification. */
2839 return (TODO_update_ssa | TODO_cleanup_cfg
2840 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
2841 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
2844 /* FN is a function that has a complete body, and CLONE is a function whose
2845 body is to be set to a copy of FN, mapping argument declarations according
2846 to the ARG_MAP splay_tree. */
2848 void
2849 clone_body (tree clone, tree fn, void *arg_map)
2851 copy_body_data id;
2853 /* Clone the body, as if we were making an inline call. But, remap the
2854 parameters in the callee to the parameters of caller. */
2855 memset (&id, 0, sizeof (id));
2856 id.src_fn = fn;
2857 id.dst_fn = clone;
2858 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
2859 id.decl_map = (splay_tree)arg_map;
2861 id.copy_decl = copy_decl_no_change;
2862 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2863 id.transform_new_cfg = true;
2864 id.transform_return_to_modify = false;
2865 id.transform_lang_insert_block = true;
2867 /* We're not inside any EH region. */
2868 id.eh_region = -1;
2870 /* Actually copy the body. */
2871 append_to_statement_list_force (copy_generic_body (&id), &DECL_SAVED_TREE (clone));
2874 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2876 tree
2877 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2879 enum tree_code code = TREE_CODE (*tp);
2880 enum tree_code_class cl = TREE_CODE_CLASS (code);
2882 /* We make copies of most nodes. */
2883 if (IS_EXPR_CODE_CLASS (cl)
2884 || IS_GIMPLE_STMT_CODE_CLASS (cl)
2885 || code == TREE_LIST
2886 || code == TREE_VEC
2887 || code == TYPE_DECL
2888 || code == OMP_CLAUSE)
2890 /* Because the chain gets clobbered when we make a copy, we save it
2891 here. */
2892 tree chain = NULL_TREE, new;
2894 if (!GIMPLE_TUPLE_P (*tp))
2895 chain = TREE_CHAIN (*tp);
2897 /* Copy the node. */
2898 new = copy_node (*tp);
2900 /* Propagate mudflap marked-ness. */
2901 if (flag_mudflap && mf_marked_p (*tp))
2902 mf_mark (new);
2904 *tp = new;
2906 /* Now, restore the chain, if appropriate. That will cause
2907 walk_tree to walk into the chain as well. */
2908 if (code == PARM_DECL
2909 || code == TREE_LIST
2910 || code == OMP_CLAUSE)
2911 TREE_CHAIN (*tp) = chain;
2913 /* For now, we don't update BLOCKs when we make copies. So, we
2914 have to nullify all BIND_EXPRs. */
2915 if (TREE_CODE (*tp) == BIND_EXPR)
2916 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2918 else if (code == CONSTRUCTOR)
2920 /* CONSTRUCTOR nodes need special handling because
2921 we need to duplicate the vector of elements. */
2922 tree new;
2924 new = copy_node (*tp);
2926 /* Propagate mudflap marked-ness. */
2927 if (flag_mudflap && mf_marked_p (*tp))
2928 mf_mark (new);
2930 CONSTRUCTOR_ELTS (new) = VEC_copy (constructor_elt, gc,
2931 CONSTRUCTOR_ELTS (*tp));
2932 *tp = new;
2934 else if (TREE_CODE_CLASS (code) == tcc_type)
2935 *walk_subtrees = 0;
2936 else if (TREE_CODE_CLASS (code) == tcc_declaration)
2937 *walk_subtrees = 0;
2938 else if (TREE_CODE_CLASS (code) == tcc_constant)
2939 *walk_subtrees = 0;
2940 else
2941 gcc_assert (code != STATEMENT_LIST);
2942 return NULL_TREE;
2945 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2946 information indicating to what new SAVE_EXPR this one should be mapped,
2947 use that one. Otherwise, create a new node and enter it in ST. FN is
2948 the function into which the copy will be placed. */
2950 static void
2951 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2953 splay_tree st = (splay_tree) st_;
2954 splay_tree_node n;
2955 tree t;
2957 /* See if we already encountered this SAVE_EXPR. */
2958 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2960 /* If we didn't already remap this SAVE_EXPR, do so now. */
2961 if (!n)
2963 t = copy_node (*tp);
2965 /* Remember this SAVE_EXPR. */
2966 splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2967 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2968 splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2970 else
2972 /* We've already walked into this SAVE_EXPR; don't do it again. */
2973 *walk_subtrees = 0;
2974 t = (tree) n->value;
2977 /* Replace this SAVE_EXPR with the copy. */
2978 *tp = t;
2981 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
2982 copies the declaration and enters it in the splay_tree in DATA (which is
2983 really an `copy_body_data *'). */
2985 static tree
2986 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2987 void *data)
2989 copy_body_data *id = (copy_body_data *) data;
2991 /* Don't walk into types. */
2992 if (TYPE_P (*tp))
2993 *walk_subtrees = 0;
2995 else if (TREE_CODE (*tp) == LABEL_EXPR)
2997 tree decl = TREE_OPERAND (*tp, 0);
2999 /* Copy the decl and remember the copy. */
3000 insert_decl_map (id, decl, id->copy_decl (decl, id));
3003 return NULL_TREE;
3006 /* Perform any modifications to EXPR required when it is unsaved. Does
3007 not recurse into EXPR's subtrees. */
3009 static void
3010 unsave_expr_1 (tree expr)
3012 switch (TREE_CODE (expr))
3014 case TARGET_EXPR:
3015 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
3016 It's OK for this to happen if it was part of a subtree that
3017 isn't immediately expanded, such as operand 2 of another
3018 TARGET_EXPR. */
3019 if (TREE_OPERAND (expr, 1))
3020 break;
3022 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
3023 TREE_OPERAND (expr, 3) = NULL_TREE;
3024 break;
3026 default:
3027 break;
3031 /* Called via walk_tree when an expression is unsaved. Using the
3032 splay_tree pointed to by ST (which is really a `splay_tree'),
3033 remaps all local declarations to appropriate replacements. */
3035 static tree
3036 unsave_r (tree *tp, int *walk_subtrees, void *data)
3038 copy_body_data *id = (copy_body_data *) data;
3039 splay_tree st = id->decl_map;
3040 splay_tree_node n;
3042 /* Only a local declaration (variable or label). */
3043 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
3044 || TREE_CODE (*tp) == LABEL_DECL)
3046 /* Lookup the declaration. */
3047 n = splay_tree_lookup (st, (splay_tree_key) *tp);
3049 /* If it's there, remap it. */
3050 if (n)
3051 *tp = (tree) n->value;
3054 else if (TREE_CODE (*tp) == STATEMENT_LIST)
3055 copy_statement_list (tp);
3056 else if (TREE_CODE (*tp) == BIND_EXPR)
3057 copy_bind_expr (tp, walk_subtrees, id);
3058 else if (TREE_CODE (*tp) == SAVE_EXPR)
3059 remap_save_expr (tp, st, walk_subtrees);
3060 else
3062 copy_tree_r (tp, walk_subtrees, NULL);
3064 /* Do whatever unsaving is required. */
3065 unsave_expr_1 (*tp);
3068 /* Keep iterating. */
3069 return NULL_TREE;
3072 /* Copies everything in EXPR and replaces variables, labels
3073 and SAVE_EXPRs local to EXPR. */
3075 tree
3076 unsave_expr_now (tree expr)
3078 copy_body_data id;
3080 /* There's nothing to do for NULL_TREE. */
3081 if (expr == 0)
3082 return expr;
3084 /* Set up ID. */
3085 memset (&id, 0, sizeof (id));
3086 id.src_fn = current_function_decl;
3087 id.dst_fn = current_function_decl;
3088 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
3090 id.copy_decl = copy_decl_no_change;
3091 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3092 id.transform_new_cfg = false;
3093 id.transform_return_to_modify = false;
3094 id.transform_lang_insert_block = false;
3096 /* Walk the tree once to find local labels. */
3097 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
3099 /* Walk the tree again, copying, remapping, and unsaving. */
3100 walk_tree (&expr, unsave_r, &id, NULL);
3102 /* Clean up. */
3103 splay_tree_delete (id.decl_map);
3105 return expr;
3108 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
3110 static tree
3111 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
3113 if (*tp == data)
3114 return (tree) data;
3115 else
3116 return NULL;
3119 bool
3120 debug_find_tree (tree top, tree search)
3122 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
3126 /* Declare the variables created by the inliner. Add all the variables in
3127 VARS to BIND_EXPR. */
3129 static void
3130 declare_inline_vars (tree block, tree vars)
3132 tree t;
3133 for (t = vars; t; t = TREE_CHAIN (t))
3135 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
3136 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
3137 cfun->unexpanded_var_list =
3138 tree_cons (NULL_TREE, t,
3139 cfun->unexpanded_var_list);
3142 if (block)
3143 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
3147 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
3148 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
3149 VAR_DECL translation. */
3151 static tree
3152 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
3154 /* Don't generate debug information for the copy if we wouldn't have
3155 generated it for the copy either. */
3156 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
3157 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
3159 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
3160 declaration inspired this copy. */
3161 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
3163 /* The new variable/label has no RTL, yet. */
3164 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
3165 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
3166 SET_DECL_RTL (copy, NULL_RTX);
3168 /* These args would always appear unused, if not for this. */
3169 TREE_USED (copy) = 1;
3171 /* Set the context for the new declaration. */
3172 if (!DECL_CONTEXT (decl))
3173 /* Globals stay global. */
3175 else if (DECL_CONTEXT (decl) != id->src_fn)
3176 /* Things that weren't in the scope of the function we're inlining
3177 from aren't in the scope we're inlining to, either. */
3179 else if (TREE_STATIC (decl))
3180 /* Function-scoped static variables should stay in the original
3181 function. */
3183 else
3184 /* Ordinary automatic local variables are now in the scope of the
3185 new function. */
3186 DECL_CONTEXT (copy) = id->dst_fn;
3188 return copy;
3191 static tree
3192 copy_decl_to_var (tree decl, copy_body_data *id)
3194 tree copy, type;
3196 gcc_assert (TREE_CODE (decl) == PARM_DECL
3197 || TREE_CODE (decl) == RESULT_DECL);
3199 type = TREE_TYPE (decl);
3201 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
3202 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
3203 TREE_READONLY (copy) = TREE_READONLY (decl);
3204 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
3205 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
3207 return copy_decl_for_dup_finish (id, decl, copy);
3210 /* Like copy_decl_to_var, but create a return slot object instead of a
3211 pointer variable for return by invisible reference. */
3213 static tree
3214 copy_result_decl_to_var (tree decl, copy_body_data *id)
3216 tree copy, type;
3218 gcc_assert (TREE_CODE (decl) == PARM_DECL
3219 || TREE_CODE (decl) == RESULT_DECL);
3221 type = TREE_TYPE (decl);
3222 if (DECL_BY_REFERENCE (decl))
3223 type = TREE_TYPE (type);
3225 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
3226 TREE_READONLY (copy) = TREE_READONLY (decl);
3227 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
3228 if (!DECL_BY_REFERENCE (decl))
3230 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
3231 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
3234 return copy_decl_for_dup_finish (id, decl, copy);
3238 static tree
3239 copy_decl_no_change (tree decl, copy_body_data *id)
3241 tree copy;
3243 copy = copy_node (decl);
3245 /* The COPY is not abstract; it will be generated in DST_FN. */
3246 DECL_ABSTRACT (copy) = 0;
3247 lang_hooks.dup_lang_specific_decl (copy);
3249 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
3250 been taken; it's for internal bookkeeping in expand_goto_internal. */
3251 if (TREE_CODE (copy) == LABEL_DECL)
3253 TREE_ADDRESSABLE (copy) = 0;
3254 LABEL_DECL_UID (copy) = -1;
3257 return copy_decl_for_dup_finish (id, decl, copy);
3260 static tree
3261 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
3263 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
3264 return copy_decl_to_var (decl, id);
3265 else
3266 return copy_decl_no_change (decl, id);
3269 /* Return a copy of the function's argument tree. */
3270 static tree
3271 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id)
3273 tree *arg_copy, *parg;
3275 arg_copy = &orig_parm;
3276 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
3278 tree new = remap_decl (*parg, id);
3279 lang_hooks.dup_lang_specific_decl (new);
3280 TREE_CHAIN (new) = TREE_CHAIN (*parg);
3281 *parg = new;
3283 return orig_parm;
3286 /* Return a copy of the function's static chain. */
3287 static tree
3288 copy_static_chain (tree static_chain, copy_body_data * id)
3290 tree *chain_copy, *pvar;
3292 chain_copy = &static_chain;
3293 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
3295 tree new = remap_decl (*pvar, id);
3296 lang_hooks.dup_lang_specific_decl (new);
3297 TREE_CHAIN (new) = TREE_CHAIN (*pvar);
3298 *pvar = new;
3300 return static_chain;
3303 /* Return true if the function is allowed to be versioned.
3304 This is a guard for the versioning functionality. */
3305 bool
3306 tree_versionable_function_p (tree fndecl)
3308 if (fndecl == NULL_TREE)
3309 return false;
3310 /* ??? There are cases where a function is
3311 uninlinable but can be versioned. */
3312 if (!tree_inlinable_function_p (fndecl))
3313 return false;
3315 return true;
3318 /* Create a copy of a function's tree.
3319 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
3320 of the original function and the new copied function
3321 respectively. In case we want to replace a DECL
3322 tree with another tree while duplicating the function's
3323 body, TREE_MAP represents the mapping between these
3324 trees. If UPDATE_CLONES is set, the call_stmt fields
3325 of edges of clones of the function will be updated. */
3326 void
3327 tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
3328 bool update_clones)
3330 struct cgraph_node *old_version_node;
3331 struct cgraph_node *new_version_node;
3332 copy_body_data id;
3333 tree p;
3334 unsigned i;
3335 struct ipa_replace_map *replace_info;
3336 basic_block old_entry_block;
3337 tree t_step;
3338 tree old_current_function_decl = current_function_decl;
3340 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
3341 && TREE_CODE (new_decl) == FUNCTION_DECL);
3342 DECL_POSSIBLY_INLINED (old_decl) = 1;
3344 old_version_node = cgraph_node (old_decl);
3345 new_version_node = cgraph_node (new_decl);
3347 DECL_ARTIFICIAL (new_decl) = 1;
3348 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
3350 /* Prepare the data structures for the tree copy. */
3351 memset (&id, 0, sizeof (id));
3353 /* Generate a new name for the new version. */
3354 if (!update_clones)
3356 DECL_NAME (new_decl) = create_tmp_var_name (NULL);
3357 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
3358 SET_DECL_RTL (new_decl, NULL_RTX);
3359 id.statements_to_fold = pointer_set_create ();
3362 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
3363 id.src_fn = old_decl;
3364 id.dst_fn = new_decl;
3365 id.src_node = old_version_node;
3366 id.dst_node = new_version_node;
3367 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
3369 id.copy_decl = copy_decl_no_change;
3370 id.transform_call_graph_edges
3371 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
3372 id.transform_new_cfg = true;
3373 id.transform_return_to_modify = false;
3374 id.transform_lang_insert_block = false;
3376 current_function_decl = new_decl;
3377 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
3378 (DECL_STRUCT_FUNCTION (old_decl));
3379 initialize_cfun (new_decl, old_decl,
3380 old_entry_block->count,
3381 old_entry_block->frequency);
3382 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
3384 /* Copy the function's static chain. */
3385 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
3386 if (p)
3387 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
3388 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
3389 &id);
3390 /* Copy the function's arguments. */
3391 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
3392 DECL_ARGUMENTS (new_decl) =
3393 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id);
3395 /* If there's a tree_map, prepare for substitution. */
3396 if (tree_map)
3397 for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
3399 replace_info = VARRAY_GENERIC_PTR (tree_map, i);
3400 if (replace_info->replace_p)
3401 insert_decl_map (&id, replace_info->old_tree,
3402 replace_info->new_tree);
3405 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
3407 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3408 number_blocks (id.dst_fn);
3410 if (DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list != NULL_TREE)
3411 /* Add local vars. */
3412 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list;
3413 t_step; t_step = TREE_CHAIN (t_step))
3415 tree var = TREE_VALUE (t_step);
3416 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3417 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
3418 cfun->unexpanded_var_list);
3419 else
3420 cfun->unexpanded_var_list =
3421 tree_cons (NULL_TREE, remap_decl (var, &id),
3422 cfun->unexpanded_var_list);
3425 /* Copy the Function's body. */
3426 copy_body (&id, old_entry_block->count, old_entry_block->frequency, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
3428 if (DECL_RESULT (old_decl) != NULL_TREE)
3430 tree *res_decl = &DECL_RESULT (old_decl);
3431 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
3432 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
3435 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3436 number_blocks (new_decl);
3438 /* Clean up. */
3439 splay_tree_delete (id.decl_map);
3440 if (!update_clones)
3442 fold_marked_statements (0, id.statements_to_fold);
3443 pointer_set_destroy (id.statements_to_fold);
3444 fold_cond_expr_cond ();
3446 if (gimple_in_ssa_p (cfun))
3448 free_dominance_info (CDI_DOMINATORS);
3449 free_dominance_info (CDI_POST_DOMINATORS);
3450 if (!update_clones)
3451 delete_unreachable_blocks ();
3452 update_ssa (TODO_update_ssa);
3453 if (!update_clones)
3455 fold_cond_expr_cond ();
3456 if (need_ssa_update_p ())
3457 update_ssa (TODO_update_ssa);
3460 free_dominance_info (CDI_DOMINATORS);
3461 free_dominance_info (CDI_POST_DOMINATORS);
3462 pop_cfun ();
3463 current_function_decl = old_current_function_decl;
3464 gcc_assert (!current_function_decl
3465 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
3466 return;
3469 /* Duplicate a type, fields and all. */
3471 tree
3472 build_duplicate_type (tree type)
3474 struct copy_body_data id;
3476 memset (&id, 0, sizeof (id));
3477 id.src_fn = current_function_decl;
3478 id.dst_fn = current_function_decl;
3479 id.src_cfun = cfun;
3480 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
3482 type = remap_type_1 (type, &id);
3484 splay_tree_delete (id.decl_map);
3486 return type;