Daily bump.
[official-gcc.git] / gcc / tree-inline.c
blob349d109a7ca18572bce7f0c17540d8c55f5500e3
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "varray.h"
36 #include "hashtab.h"
37 #include "langhooks.h"
38 #include "basic-block.h"
39 #include "tree-iterator.h"
40 #include "cgraph.h"
41 #include "intl.h"
42 #include "tree-mudflap.h"
43 #include "tree-flow.h"
44 #include "function.h"
45 #include "ggc.h"
46 #include "tree-flow.h"
47 #include "diagnostic.h"
48 #include "except.h"
49 #include "debug.h"
50 #include "pointer-set.h"
51 #include "ipa-prop.h"
52 #include "value-prof.h"
53 #include "tree-pass.h"
55 /* I'm not real happy about this, but we need to handle gimple and
56 non-gimple trees. */
57 #include "tree-gimple.h"
59 /* Inlining, Cloning, Versioning, Parallelization
61 Inlining: a function body is duplicated, but the PARM_DECLs are
62 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
63 GIMPLE_MODIFY_STMTs that store to a dedicated returned-value variable.
64 The duplicated eh_region info of the copy will later be appended
65 to the info for the caller; the eh_region info in copied throwing
66 statements and RESX_EXPRs is adjusted accordingly.
68 Cloning: (only in C++) We have one body for a con/de/structor, and
69 multiple function decls, each with a unique parameter list.
70 Duplicate the body, using the given splay tree; some parameters
71 will become constants (like 0 or 1).
73 Versioning: a function body is duplicated and the result is a new
74 function rather than into blocks of an existing function as with
75 inlining. Some parameters will become constants.
77 Parallelization: a region of a function is duplicated resulting in
78 a new function. Variables may be replaced with complex expressions
79 to enable shared variable semantics.
81 All of these will simultaneously lookup any callgraph edges. If
82 we're going to inline the duplicated function body, and the given
83 function has some cloned callgraph nodes (one for each place this
84 function will be inlined) those callgraph edges will be duplicated.
85 If we're cloning the body, those callgraph edges will be
86 updated to point into the new body. (Note that the original
87 callgraph node and edge list will not be altered.)
89 See the CALL_EXPR handling case in copy_body_r (). */
91 /* 0 if we should not perform inlining.
92 1 if we should expand functions calls inline at the tree level.
93 2 if we should consider *all* functions to be inline
94 candidates. */
96 int flag_inline_trees = 0;
98 /* To Do:
100 o In order to make inlining-on-trees work, we pessimized
101 function-local static constants. In particular, they are now
102 always output, even when not addressed. Fix this by treating
103 function-local static constants just like global static
104 constants; the back-end already knows not to output them if they
105 are not needed.
107 o Provide heuristics to clamp inlining of recursive template
108 calls? */
111 /* Weights that estimate_num_insns uses for heuristics in inlining. */
113 eni_weights eni_inlining_weights;
115 /* Weights that estimate_num_insns uses to estimate the size of the
116 produced code. */
118 eni_weights eni_size_weights;
120 /* Weights that estimate_num_insns uses to estimate the time necessary
121 to execute the produced code. */
123 eni_weights eni_time_weights;
125 /* Prototypes. */
127 static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
128 static tree copy_generic_body (copy_body_data *);
129 static bool inlinable_function_p (tree);
130 static void remap_block (tree *, copy_body_data *);
131 static tree remap_decls (tree, copy_body_data *);
132 static void copy_bind_expr (tree *, int *, copy_body_data *);
133 static tree mark_local_for_remap_r (tree *, int *, void *);
134 static void unsave_expr_1 (tree);
135 static tree unsave_r (tree *, int *, void *);
136 static void declare_inline_vars (tree, tree);
137 static void remap_save_expr (tree *, void *, int *);
138 static void add_lexical_block (tree current_block, tree new_block);
139 static tree copy_decl_to_var (tree, copy_body_data *);
140 static tree copy_result_decl_to_var (tree, copy_body_data *);
141 static tree copy_decl_no_change (tree, copy_body_data *);
142 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
144 /* Insert a tree->tree mapping for ID. Despite the name suggests
145 that the trees should be variables, it is used for more than that. */
147 void
148 insert_decl_map (copy_body_data *id, tree key, tree value)
150 splay_tree_insert (id->decl_map, (splay_tree_key) key,
151 (splay_tree_value) value);
153 /* Always insert an identity map as well. If we see this same new
154 node again, we won't want to duplicate it a second time. */
155 if (key != value)
156 splay_tree_insert (id->decl_map, (splay_tree_key) value,
157 (splay_tree_value) value);
160 /* Construct new SSA name for old NAME. ID is the inline context. */
162 static tree
163 remap_ssa_name (tree name, copy_body_data *id)
165 tree new;
166 splay_tree_node n;
168 gcc_assert (TREE_CODE (name) == SSA_NAME);
170 n = splay_tree_lookup (id->decl_map, (splay_tree_key) name);
171 if (n)
172 return (tree) n->value;
174 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
175 in copy_bb. */
176 new = remap_decl (SSA_NAME_VAR (name), id);
177 /* We might've substituted constant or another SSA_NAME for
178 the variable.
180 Replace the SSA name representing RESULT_DECL by variable during
181 inlining: this saves us from need to introduce PHI node in a case
182 return value is just partly initialized. */
183 if ((TREE_CODE (new) == VAR_DECL || TREE_CODE (new) == PARM_DECL)
184 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
185 || !id->transform_return_to_modify))
187 new = make_ssa_name (new, NULL);
188 insert_decl_map (id, name, new);
189 if (IS_EMPTY_STMT (SSA_NAME_DEF_STMT (name)))
191 SSA_NAME_DEF_STMT (new) = build_empty_stmt ();
192 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name)) == name)
193 set_default_def (SSA_NAME_VAR (new), new);
195 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new)
196 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
197 TREE_TYPE (new) = TREE_TYPE (SSA_NAME_VAR (new));
199 else
200 insert_decl_map (id, name, new);
201 return new;
204 /* Remap DECL during the copying of the BLOCK tree for the function. */
206 tree
207 remap_decl (tree decl, copy_body_data *id)
209 splay_tree_node n;
210 tree fn;
212 /* We only remap local variables in the current function. */
213 fn = id->src_fn;
215 /* See if we have remapped this declaration. */
217 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
219 /* If we didn't already have an equivalent for this declaration,
220 create one now. */
221 if (!n)
223 /* Make a copy of the variable or label. */
224 tree t = id->copy_decl (decl, id);
226 /* Remember it, so that if we encounter this local entity again
227 we can reuse this copy. Do this early because remap_type may
228 need this decl for TYPE_STUB_DECL. */
229 insert_decl_map (id, decl, t);
231 if (!DECL_P (t))
232 return t;
234 /* Remap types, if necessary. */
235 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
236 if (TREE_CODE (t) == TYPE_DECL)
237 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
239 /* Remap sizes as necessary. */
240 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
241 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
243 /* If fields, do likewise for offset and qualifier. */
244 if (TREE_CODE (t) == FIELD_DECL)
246 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
247 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
248 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
251 if (cfun && gimple_in_ssa_p (cfun)
252 && (TREE_CODE (t) == VAR_DECL
253 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
255 tree def = gimple_default_def (id->src_cfun, decl);
256 get_var_ann (t);
257 if (TREE_CODE (decl) != PARM_DECL && def)
259 tree map = remap_ssa_name (def, id);
260 /* Watch out RESULT_DECLs whose SSA names map directly
261 to them. */
262 if (TREE_CODE (map) == SSA_NAME)
263 set_default_def (t, map);
265 add_referenced_var (t);
267 return t;
270 return unshare_expr ((tree) n->value);
273 static tree
274 remap_type_1 (tree type, copy_body_data *id)
276 splay_tree_node node;
277 tree new, t;
279 if (type == NULL)
280 return type;
282 /* See if we have remapped this type. */
283 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
284 if (node)
285 return (tree) node->value;
287 /* The type only needs remapping if it's variably modified. */
288 if (! variably_modified_type_p (type, id->src_fn))
290 insert_decl_map (id, type, type);
291 return type;
294 /* We do need a copy. build and register it now. If this is a pointer or
295 reference type, remap the designated type and make a new pointer or
296 reference type. */
297 if (TREE_CODE (type) == POINTER_TYPE)
299 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
300 TYPE_MODE (type),
301 TYPE_REF_CAN_ALIAS_ALL (type));
302 insert_decl_map (id, type, new);
303 return new;
305 else if (TREE_CODE (type) == REFERENCE_TYPE)
307 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
308 TYPE_MODE (type),
309 TYPE_REF_CAN_ALIAS_ALL (type));
310 insert_decl_map (id, type, new);
311 return new;
313 else
314 new = copy_node (type);
316 insert_decl_map (id, type, new);
318 /* This is a new type, not a copy of an old type. Need to reassociate
319 variants. We can handle everything except the main variant lazily. */
320 t = TYPE_MAIN_VARIANT (type);
321 if (type != t)
323 t = remap_type (t, id);
324 TYPE_MAIN_VARIANT (new) = t;
325 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
326 TYPE_NEXT_VARIANT (t) = new;
328 else
330 TYPE_MAIN_VARIANT (new) = new;
331 TYPE_NEXT_VARIANT (new) = NULL;
334 if (TYPE_STUB_DECL (type))
335 TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
337 /* Lazily create pointer and reference types. */
338 TYPE_POINTER_TO (new) = NULL;
339 TYPE_REFERENCE_TO (new) = NULL;
341 switch (TREE_CODE (new))
343 case INTEGER_TYPE:
344 case REAL_TYPE:
345 case ENUMERAL_TYPE:
346 case BOOLEAN_TYPE:
347 t = TYPE_MIN_VALUE (new);
348 if (t && TREE_CODE (t) != INTEGER_CST)
349 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
351 t = TYPE_MAX_VALUE (new);
352 if (t && TREE_CODE (t) != INTEGER_CST)
353 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
354 return new;
356 case FUNCTION_TYPE:
357 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
358 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
359 return new;
361 case ARRAY_TYPE:
362 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
363 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
364 break;
366 case RECORD_TYPE:
367 case UNION_TYPE:
368 case QUAL_UNION_TYPE:
370 tree f, nf = NULL;
372 for (f = TYPE_FIELDS (new); f ; f = TREE_CHAIN (f))
374 t = remap_decl (f, id);
375 DECL_CONTEXT (t) = new;
376 TREE_CHAIN (t) = nf;
377 nf = t;
379 TYPE_FIELDS (new) = nreverse (nf);
381 break;
383 case OFFSET_TYPE:
384 default:
385 /* Shouldn't have been thought variable sized. */
386 gcc_unreachable ();
389 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
390 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
392 return new;
395 tree
396 remap_type (tree type, copy_body_data *id)
398 splay_tree_node node;
400 if (type == NULL)
401 return type;
403 /* See if we have remapped this type. */
404 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
405 if (node)
406 return (tree) node->value;
408 /* The type only needs remapping if it's variably modified. */
409 if (! variably_modified_type_p (type, id->src_fn))
411 insert_decl_map (id, type, type);
412 return type;
415 return remap_type_1 (type, id);
418 static tree
419 remap_decls (tree decls, copy_body_data *id)
421 tree old_var;
422 tree new_decls = NULL_TREE;
424 /* Remap its variables. */
425 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
427 tree new_var;
429 /* We can not chain the local static declarations into the unexpanded_var_list
430 as we can't duplicate them or break one decl rule. Go ahead and link
431 them into unexpanded_var_list. */
432 if (!lang_hooks.tree_inlining.auto_var_in_fn_p (old_var, id->src_fn)
433 && !DECL_EXTERNAL (old_var))
435 cfun->unexpanded_var_list = tree_cons (NULL_TREE, old_var,
436 cfun->unexpanded_var_list);
437 continue;
440 /* Remap the variable. */
441 new_var = remap_decl (old_var, id);
443 /* If we didn't remap this variable, so we can't mess with its
444 TREE_CHAIN. If we remapped this variable to the return slot, it's
445 already declared somewhere else, so don't declare it here. */
446 if (!new_var || new_var == id->retvar)
448 else
450 gcc_assert (DECL_P (new_var));
451 TREE_CHAIN (new_var) = new_decls;
452 new_decls = new_var;
456 return nreverse (new_decls);
459 /* Copy the BLOCK to contain remapped versions of the variables
460 therein. And hook the new block into the block-tree. */
462 static void
463 remap_block (tree *block, copy_body_data *id)
465 tree old_block;
466 tree new_block;
467 tree fn;
469 /* Make the new block. */
470 old_block = *block;
471 new_block = make_node (BLOCK);
472 TREE_USED (new_block) = TREE_USED (old_block);
473 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
474 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
475 *block = new_block;
477 /* Remap its variables. */
478 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
480 fn = id->dst_fn;
482 if (id->transform_lang_insert_block)
483 lang_hooks.decls.insert_block (new_block);
485 /* Remember the remapped block. */
486 insert_decl_map (id, old_block, new_block);
489 /* Copy the whole block tree and root it in id->block. */
490 static tree
491 remap_blocks (tree block, copy_body_data *id)
493 tree t;
494 tree new = block;
496 if (!block)
497 return NULL;
499 remap_block (&new, id);
500 gcc_assert (new != block);
501 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
502 add_lexical_block (new, remap_blocks (t, id));
503 return new;
506 static void
507 copy_statement_list (tree *tp)
509 tree_stmt_iterator oi, ni;
510 tree new;
512 new = alloc_stmt_list ();
513 ni = tsi_start (new);
514 oi = tsi_start (*tp);
515 *tp = new;
517 for (; !tsi_end_p (oi); tsi_next (&oi))
518 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
521 static void
522 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
524 tree block = BIND_EXPR_BLOCK (*tp);
525 /* Copy (and replace) the statement. */
526 copy_tree_r (tp, walk_subtrees, NULL);
527 if (block)
529 remap_block (&block, id);
530 BIND_EXPR_BLOCK (*tp) = block;
533 if (BIND_EXPR_VARS (*tp))
534 /* This will remap a lot of the same decls again, but this should be
535 harmless. */
536 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
539 /* Called from copy_body_id via walk_tree. DATA is really an
540 `copy_body_data *'. */
542 tree
543 copy_body_r (tree *tp, int *walk_subtrees, void *data)
545 copy_body_data *id = (copy_body_data *) data;
546 tree fn = id->src_fn;
547 tree new_block;
549 /* Begin by recognizing trees that we'll completely rewrite for the
550 inlining context. Our output for these trees is completely
551 different from out input (e.g. RETURN_EXPR is deleted, and morphs
552 into an edge). Further down, we'll handle trees that get
553 duplicated and/or tweaked. */
555 /* When requested, RETURN_EXPRs should be transformed to just the
556 contained GIMPLE_MODIFY_STMT. The branch semantics of the return will
557 be handled elsewhere by manipulating the CFG rather than a statement. */
558 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
560 tree assignment = TREE_OPERAND (*tp, 0);
562 /* If we're returning something, just turn that into an
563 assignment into the equivalent of the original RESULT_DECL.
564 If the "assignment" is just the result decl, the result
565 decl has already been set (e.g. a recent "foo (&result_decl,
566 ...)"); just toss the entire RETURN_EXPR. */
567 if (assignment && TREE_CODE (assignment) == GIMPLE_MODIFY_STMT)
569 /* Replace the RETURN_EXPR with (a copy of) the
570 GIMPLE_MODIFY_STMT hanging underneath. */
571 *tp = copy_node (assignment);
573 else /* Else the RETURN_EXPR returns no value. */
575 *tp = NULL;
576 return (tree) (void *)1;
579 else if (TREE_CODE (*tp) == SSA_NAME)
581 *tp = remap_ssa_name (*tp, id);
582 *walk_subtrees = 0;
583 return NULL;
586 /* Local variables and labels need to be replaced by equivalent
587 variables. We don't want to copy static variables; there's only
588 one of those, no matter how many times we inline the containing
589 function. Similarly for globals from an outer function. */
590 else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
592 tree new_decl;
594 /* Remap the declaration. */
595 new_decl = remap_decl (*tp, id);
596 gcc_assert (new_decl);
597 /* Replace this variable with the copy. */
598 STRIP_TYPE_NOPS (new_decl);
599 *tp = new_decl;
600 *walk_subtrees = 0;
602 else if (TREE_CODE (*tp) == STATEMENT_LIST)
603 copy_statement_list (tp);
604 else if (TREE_CODE (*tp) == SAVE_EXPR)
605 remap_save_expr (tp, id->decl_map, walk_subtrees);
606 else if (TREE_CODE (*tp) == LABEL_DECL
607 && (! DECL_CONTEXT (*tp)
608 || decl_function_context (*tp) == id->src_fn))
609 /* These may need to be remapped for EH handling. */
610 *tp = remap_decl (*tp, id);
611 else if (TREE_CODE (*tp) == BIND_EXPR)
612 copy_bind_expr (tp, walk_subtrees, id);
613 /* Types may need remapping as well. */
614 else if (TYPE_P (*tp))
615 *tp = remap_type (*tp, id);
617 /* If this is a constant, we have to copy the node iff the type will be
618 remapped. copy_tree_r will not copy a constant. */
619 else if (CONSTANT_CLASS_P (*tp))
621 tree new_type = remap_type (TREE_TYPE (*tp), id);
623 if (new_type == TREE_TYPE (*tp))
624 *walk_subtrees = 0;
626 else if (TREE_CODE (*tp) == INTEGER_CST)
627 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
628 TREE_INT_CST_HIGH (*tp));
629 else
631 *tp = copy_node (*tp);
632 TREE_TYPE (*tp) = new_type;
636 /* Otherwise, just copy the node. Note that copy_tree_r already
637 knows not to copy VAR_DECLs, etc., so this is safe. */
638 else
640 /* Here we handle trees that are not completely rewritten.
641 First we detect some inlining-induced bogosities for
642 discarding. */
643 if (TREE_CODE (*tp) == GIMPLE_MODIFY_STMT
644 && GIMPLE_STMT_OPERAND (*tp, 0) == GIMPLE_STMT_OPERAND (*tp, 1)
645 && (lang_hooks.tree_inlining.auto_var_in_fn_p
646 (GIMPLE_STMT_OPERAND (*tp, 0), fn)))
648 /* Some assignments VAR = VAR; don't generate any rtl code
649 and thus don't count as variable modification. Avoid
650 keeping bogosities like 0 = 0. */
651 tree decl = GIMPLE_STMT_OPERAND (*tp, 0), value;
652 splay_tree_node n;
654 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
655 if (n)
657 value = (tree) n->value;
658 STRIP_TYPE_NOPS (value);
659 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
661 *tp = build_empty_stmt ();
662 return copy_body_r (tp, walk_subtrees, data);
666 else if (TREE_CODE (*tp) == INDIRECT_REF)
668 /* Get rid of *& from inline substitutions that can happen when a
669 pointer argument is an ADDR_EXPR. */
670 tree decl = TREE_OPERAND (*tp, 0);
671 splay_tree_node n;
673 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
674 if (n)
676 tree new;
677 tree old;
678 /* If we happen to get an ADDR_EXPR in n->value, strip
679 it manually here as we'll eventually get ADDR_EXPRs
680 which lie about their types pointed to. In this case
681 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
682 but we absolutely rely on that. As fold_indirect_ref
683 does other useful transformations, try that first, though. */
684 tree type = TREE_TYPE (TREE_TYPE ((tree)n->value));
685 new = unshare_expr ((tree)n->value);
686 old = *tp;
687 *tp = fold_indirect_ref_1 (type, new);
688 if (! *tp)
690 if (TREE_CODE (new) == ADDR_EXPR)
691 *tp = TREE_OPERAND (new, 0);
692 else
694 *tp = build1 (INDIRECT_REF, type, new);
695 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
698 *walk_subtrees = 0;
699 return NULL;
703 /* Here is the "usual case". Copy this tree node, and then
704 tweak some special cases. */
705 copy_tree_r (tp, walk_subtrees, NULL);
707 /* Global variables we didn't seen yet needs to go into referenced
708 vars. */
709 if (gimple_in_ssa_p (cfun) && TREE_CODE (*tp) == VAR_DECL)
710 add_referenced_var (*tp);
712 /* If EXPR has block defined, map it to newly constructed block.
713 When inlining we want EXPRs without block appear in the block
714 of function call. */
715 if (EXPR_P (*tp) || GIMPLE_STMT_P (*tp))
717 new_block = id->block;
718 if (TREE_BLOCK (*tp))
720 splay_tree_node n;
721 n = splay_tree_lookup (id->decl_map,
722 (splay_tree_key) TREE_BLOCK (*tp));
723 gcc_assert (n);
724 new_block = (tree) n->value;
726 TREE_BLOCK (*tp) = new_block;
729 if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
730 TREE_OPERAND (*tp, 0) =
731 build_int_cst
732 (NULL_TREE,
733 id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
735 if (!GIMPLE_TUPLE_P (*tp))
736 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
738 /* The copied TARGET_EXPR has never been expanded, even if the
739 original node was expanded already. */
740 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
742 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
743 TREE_OPERAND (*tp, 3) = NULL_TREE;
746 /* Variable substitution need not be simple. In particular, the
747 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
748 and friends are up-to-date. */
749 else if (TREE_CODE (*tp) == ADDR_EXPR)
751 walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
752 /* Handle the case where we substituted an INDIRECT_REF
753 into the operand of the ADDR_EXPR. */
754 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
755 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
756 else
757 recompute_tree_invariant_for_addr_expr (*tp);
758 *walk_subtrees = 0;
762 /* Keep iterating. */
763 return NULL_TREE;
766 /* Copy basic block, scale profile accordingly. Edges will be taken care of
767 later */
769 static basic_block
770 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, int count_scale)
772 block_stmt_iterator bsi, copy_bsi;
773 basic_block copy_basic_block;
775 /* create_basic_block() will append every new block to
776 basic_block_info automatically. */
777 copy_basic_block = create_basic_block (NULL, (void *) 0,
778 (basic_block) bb->prev_bb->aux);
779 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
781 /* We are going to rebuild frequencies from scratch. These values have just
782 small importance to drive canonicalize_loop_headers. */
783 copy_basic_block->frequency = ((gcov_type)bb->frequency
784 * frequency_scale / REG_BR_PROB_BASE);
785 if (copy_basic_block->frequency > BB_FREQ_MAX)
786 copy_basic_block->frequency = BB_FREQ_MAX;
787 copy_bsi = bsi_start (copy_basic_block);
789 for (bsi = bsi_start (bb);
790 !bsi_end_p (bsi); bsi_next (&bsi))
792 tree stmt = bsi_stmt (bsi);
793 tree orig_stmt = stmt;
795 walk_tree (&stmt, copy_body_r, id, NULL);
797 /* RETURN_EXPR might be removed,
798 this is signalled by making stmt pointer NULL. */
799 if (stmt)
801 tree call, decl;
803 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
805 /* With return slot optimization we can end up with
806 non-gimple (foo *)&this->m, fix that here. */
807 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
808 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == NOP_EXPR
809 && !is_gimple_val (TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0)))
810 gimplify_stmt (&stmt);
812 bsi_insert_after (&copy_bsi, stmt, BSI_NEW_STMT);
814 /* Process new statement. gimplify_stmt possibly turned statement
815 into multiple statements, we need to process all of them. */
816 while (!bsi_end_p (copy_bsi))
818 stmt = bsi_stmt (copy_bsi);
819 call = get_call_expr_in (stmt);
821 /* Statements produced by inlining can be unfolded, especially
822 when we constant propagated some operands. We can't fold
823 them right now for two reasons:
824 1) folding require SSA_NAME_DEF_STMTs to be correct
825 2) we can't change function calls to builtins.
826 So we just mark statement for later folding. We mark
827 all new statements, instead just statements that has changed
828 by some nontrivial substitution so even statements made
829 foldable indirectly are updated. If this turns out to be
830 expensive, copy_body can be told to watch for nontrivial
831 changes. */
832 if (id->statements_to_fold)
833 pointer_set_insert (id->statements_to_fold, stmt);
834 /* We're duplicating a CALL_EXPR. Find any corresponding
835 callgraph edges and update or duplicate them. */
836 if (call && (decl = get_callee_fndecl (call)))
838 struct cgraph_node *node;
839 struct cgraph_edge *edge;
841 switch (id->transform_call_graph_edges)
843 case CB_CGE_DUPLICATE:
844 edge = cgraph_edge (id->src_node, orig_stmt);
845 if (edge)
846 cgraph_clone_edge (edge, id->dst_node, stmt,
847 REG_BR_PROB_BASE, 1, edge->frequency, true);
848 break;
850 case CB_CGE_MOVE_CLONES:
851 for (node = id->dst_node->next_clone;
852 node;
853 node = node->next_clone)
855 edge = cgraph_edge (node, orig_stmt);
856 gcc_assert (edge);
857 cgraph_set_call_stmt (edge, stmt);
859 /* FALLTHRU */
861 case CB_CGE_MOVE:
862 edge = cgraph_edge (id->dst_node, orig_stmt);
863 if (edge)
864 cgraph_set_call_stmt (edge, stmt);
865 break;
867 default:
868 gcc_unreachable ();
871 /* If you think we can abort here, you are wrong.
872 There is no region 0 in tree land. */
873 gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt)
874 != 0);
876 if (tree_could_throw_p (stmt)
877 /* When we are cloning for inlining, we are supposed to
878 construct a clone that calls precisely the same functions
879 as original. However IPA optimizers might've proved
880 earlier some function calls as non-trapping that might
881 render some basic blocks dead that might become
882 unreachable.
884 We can't update SSA with unreachable blocks in CFG and thus
885 we prevent the scenario by preserving even the "dead" eh
886 edges until the point they are later removed by
887 fixup_cfg pass. */
888 || (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
889 && lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) > 0))
891 int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
892 /* Add an entry for the copied tree in the EH hashtable.
893 When cloning or versioning, use the hashtable in
894 cfun, and just copy the EH number. When inlining, use the
895 hashtable in the caller, and adjust the region number. */
896 if (region > 0)
897 add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
899 /* If this tree doesn't have a region associated with it,
900 and there is a "current region,"
901 then associate this tree with the current region
902 and add edges associated with this region. */
903 if ((lookup_stmt_eh_region_fn (id->src_cfun,
904 orig_stmt) <= 0
905 && id->eh_region > 0)
906 && tree_could_throw_p (stmt))
907 add_stmt_to_eh_region (stmt, id->eh_region);
909 if (gimple_in_ssa_p (cfun))
911 ssa_op_iter i;
912 tree def;
914 find_new_referenced_vars (bsi_stmt_ptr (copy_bsi));
915 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
916 if (TREE_CODE (def) == SSA_NAME)
917 SSA_NAME_DEF_STMT (def) = stmt;
919 bsi_next (&copy_bsi);
921 copy_bsi = bsi_last (copy_basic_block);
924 return copy_basic_block;
927 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
928 form is quite easy, since dominator relationship for old basic blocks does
929 not change.
931 There is however exception where inlining might change dominator relation
932 across EH edges from basic block within inlined functions destinating
933 to landing pads in function we inline into.
935 The function mark PHI_RESULT of such PHI nodes for renaming; it is
936 safe the EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI
937 must be set. This means, that there will be no overlapping live ranges
938 for the underlying symbol.
940 This might change in future if we allow redirecting of EH edges and
941 we might want to change way build CFG pre-inlining to include
942 all the possible edges then. */
943 static void
944 update_ssa_across_eh_edges (basic_block bb)
946 edge e;
947 edge_iterator ei;
949 FOR_EACH_EDGE (e, ei, bb->succs)
950 if (!e->dest->aux
951 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
953 tree phi;
955 gcc_assert (e->flags & EDGE_EH);
956 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
958 gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
959 (PHI_RESULT (phi)));
960 mark_sym_for_renaming
961 (SSA_NAME_VAR (PHI_RESULT (phi)));
966 /* Copy edges from BB into its copy constructed earlier, scale profile
967 accordingly. Edges will be taken care of later. Assume aux
968 pointers to point to the copies of each BB. */
969 static void
970 copy_edges_for_bb (basic_block bb, int count_scale)
972 basic_block new_bb = (basic_block) bb->aux;
973 edge_iterator ei;
974 edge old_edge;
975 block_stmt_iterator bsi;
976 int flags;
978 /* Use the indices from the original blocks to create edges for the
979 new ones. */
980 FOR_EACH_EDGE (old_edge, ei, bb->succs)
981 if (!(old_edge->flags & EDGE_EH))
983 edge new;
985 flags = old_edge->flags;
987 /* Return edges do get a FALLTHRU flag when the get inlined. */
988 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
989 && old_edge->dest->aux != EXIT_BLOCK_PTR)
990 flags |= EDGE_FALLTHRU;
991 new = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
992 new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
993 new->probability = old_edge->probability;
996 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
997 return;
999 for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
1001 tree copy_stmt;
1003 copy_stmt = bsi_stmt (bsi);
1004 update_stmt (copy_stmt);
1005 if (gimple_in_ssa_p (cfun))
1006 mark_symbols_for_renaming (copy_stmt);
1007 /* Do this before the possible split_block. */
1008 bsi_next (&bsi);
1010 /* If this tree could throw an exception, there are two
1011 cases where we need to add abnormal edge(s): the
1012 tree wasn't in a region and there is a "current
1013 region" in the caller; or the original tree had
1014 EH edges. In both cases split the block after the tree,
1015 and add abnormal edge(s) as needed; we need both
1016 those from the callee and the caller.
1017 We check whether the copy can throw, because the const
1018 propagation can change an INDIRECT_REF which throws
1019 into a COMPONENT_REF which doesn't. If the copy
1020 can throw, the original could also throw. */
1022 if (tree_can_throw_internal (copy_stmt))
1024 if (!bsi_end_p (bsi))
1025 /* Note that bb's predecessor edges aren't necessarily
1026 right at this point; split_block doesn't care. */
1028 edge e = split_block (new_bb, copy_stmt);
1030 new_bb = e->dest;
1031 new_bb->aux = e->src->aux;
1032 bsi = bsi_start (new_bb);
1035 make_eh_edges (copy_stmt);
1037 if (gimple_in_ssa_p (cfun))
1038 update_ssa_across_eh_edges (bb_for_stmt (copy_stmt));
1043 /* Copy the PHIs. All blocks and edges are copied, some blocks
1044 was possibly split and new outgoing EH edges inserted.
1045 BB points to the block of original function and AUX pointers links
1046 the original and newly copied blocks. */
1048 static void
1049 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1051 basic_block new_bb = bb->aux;
1052 edge_iterator ei;
1053 tree phi;
1055 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1057 tree res = PHI_RESULT (phi);
1058 tree new_res = res;
1059 tree new_phi;
1060 edge new_edge;
1062 if (is_gimple_reg (res))
1064 walk_tree (&new_res, copy_body_r, id, NULL);
1065 SSA_NAME_DEF_STMT (new_res)
1066 = new_phi = create_phi_node (new_res, new_bb);
1067 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1069 edge old_edge = find_edge (new_edge->src->aux, bb);
1070 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1071 tree new_arg = arg;
1073 walk_tree (&new_arg, copy_body_r, id, NULL);
1074 gcc_assert (new_arg);
1075 add_phi_arg (new_phi, new_arg, new_edge);
1081 /* Wrapper for remap_decl so it can be used as a callback. */
1082 static tree
1083 remap_decl_1 (tree decl, void *data)
1085 return remap_decl (decl, (copy_body_data *) data);
1088 /* Build struct function and associated datastructures for the new clone
1089 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1091 static void
1092 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
1093 int frequency)
1095 struct function *new_cfun
1096 = (struct function *) ggc_alloc_cleared (sizeof (struct function));
1097 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1098 int count_scale, frequency_scale;
1100 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1101 count_scale = (REG_BR_PROB_BASE * count
1102 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1103 else
1104 count_scale = 1;
1106 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1107 frequency_scale = (REG_BR_PROB_BASE * frequency
1109 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1110 else
1111 frequency_scale = count_scale;
1113 /* Register specific tree functions. */
1114 tree_register_cfg_hooks ();
1115 *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
1116 new_cfun->funcdef_no = get_next_funcdef_no ();
1117 VALUE_HISTOGRAMS (new_cfun) = NULL;
1118 new_cfun->unexpanded_var_list = NULL;
1119 new_cfun->cfg = NULL;
1120 new_cfun->decl = new_fndecl /*= copy_node (callee_fndecl)*/;
1121 new_cfun->ib_boundaries_block = NULL;
1122 DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
1123 push_cfun (new_cfun);
1124 init_empty_tree_cfg ();
1126 ENTRY_BLOCK_PTR->count =
1127 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1128 REG_BR_PROB_BASE);
1129 ENTRY_BLOCK_PTR->frequency =
1130 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1131 frequency_scale / REG_BR_PROB_BASE);
1132 EXIT_BLOCK_PTR->count =
1133 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1134 REG_BR_PROB_BASE);
1135 EXIT_BLOCK_PTR->frequency =
1136 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1137 frequency_scale / REG_BR_PROB_BASE);
1138 if (src_cfun->eh)
1139 init_eh_for_function ();
1141 if (src_cfun->gimple_df)
1143 init_tree_ssa ();
1144 cfun->gimple_df->in_ssa_p = true;
1145 init_ssa_operands ();
1147 pop_cfun ();
1150 /* Make a copy of the body of FN so that it can be inserted inline in
1151 another function. Walks FN via CFG, returns new fndecl. */
1153 static tree
1154 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
1155 basic_block entry_block_map, basic_block exit_block_map)
1157 tree callee_fndecl = id->src_fn;
1158 /* Original cfun for the callee, doesn't change. */
1159 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1160 struct function *cfun_to_copy;
1161 basic_block bb;
1162 tree new_fndecl = NULL;
1163 int count_scale, frequency_scale;
1164 int last;
1166 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1167 count_scale = (REG_BR_PROB_BASE * count
1168 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1169 else
1170 count_scale = 1;
1172 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1173 frequency_scale = (REG_BR_PROB_BASE * frequency
1175 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1176 else
1177 frequency_scale = count_scale;
1179 /* Register specific tree functions. */
1180 tree_register_cfg_hooks ();
1182 /* Must have a CFG here at this point. */
1183 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
1184 (DECL_STRUCT_FUNCTION (callee_fndecl)));
1186 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1189 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
1190 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
1191 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1192 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1194 /* Duplicate any exception-handling regions. */
1195 if (cfun->eh)
1197 id->eh_region_offset
1198 = duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
1199 0, id->eh_region);
1201 /* Use aux pointers to map the original blocks to copy. */
1202 FOR_EACH_BB_FN (bb, cfun_to_copy)
1204 basic_block new = copy_bb (id, bb, frequency_scale, count_scale);
1205 bb->aux = new;
1206 new->aux = bb;
1209 last = n_basic_blocks;
1210 /* Now that we've duplicated the blocks, duplicate their edges. */
1211 FOR_ALL_BB_FN (bb, cfun_to_copy)
1212 copy_edges_for_bb (bb, count_scale);
1213 if (gimple_in_ssa_p (cfun))
1214 FOR_ALL_BB_FN (bb, cfun_to_copy)
1215 copy_phis_for_bb (bb, id);
1216 FOR_ALL_BB_FN (bb, cfun_to_copy)
1218 ((basic_block)bb->aux)->aux = NULL;
1219 bb->aux = NULL;
1221 /* Zero out AUX fields of newly created block during EH edge
1222 insertion. */
1223 for (; last < n_basic_blocks; last++)
1224 BASIC_BLOCK (last)->aux = NULL;
1225 entry_block_map->aux = NULL;
1226 exit_block_map->aux = NULL;
1228 return new_fndecl;
1231 /* Make a copy of the body of FN so that it can be inserted inline in
1232 another function. */
1234 static tree
1235 copy_generic_body (copy_body_data *id)
1237 tree body;
1238 tree fndecl = id->src_fn;
1240 body = DECL_SAVED_TREE (fndecl);
1241 walk_tree (&body, copy_body_r, id, NULL);
1243 return body;
1246 static tree
1247 copy_body (copy_body_data *id, gcov_type count, int frequency,
1248 basic_block entry_block_map, basic_block exit_block_map)
1250 tree fndecl = id->src_fn;
1251 tree body;
1253 /* If this body has a CFG, walk CFG and copy. */
1254 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
1255 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
1257 return body;
1260 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
1261 defined in function FN, or of a data member thereof. */
1263 static bool
1264 self_inlining_addr_expr (tree value, tree fn)
1266 tree var;
1268 if (TREE_CODE (value) != ADDR_EXPR)
1269 return false;
1271 var = get_base_address (TREE_OPERAND (value, 0));
1273 return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
1276 static void
1277 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
1278 basic_block bb, tree *vars)
1280 tree init_stmt;
1281 tree var;
1282 tree var_sub;
1283 tree rhs = value ? fold_convert (TREE_TYPE (p), value) : NULL;
1284 tree def = (gimple_in_ssa_p (cfun)
1285 ? gimple_default_def (id->src_cfun, p) : NULL);
1287 /* If the parameter is never assigned to, has no SSA_NAMEs created,
1288 we may not need to create a new variable here at all. Instead, we may
1289 be able to just use the argument value. */
1290 if (TREE_READONLY (p)
1291 && !TREE_ADDRESSABLE (p)
1292 && value && !TREE_SIDE_EFFECTS (value)
1293 && !def)
1295 /* We may produce non-gimple trees by adding NOPs or introduce
1296 invalid sharing when operand is not really constant.
1297 It is not big deal to prohibit constant propagation here as
1298 we will constant propagate in DOM1 pass anyway. */
1299 if (is_gimple_min_invariant (value)
1300 && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
1301 /* We have to be very careful about ADDR_EXPR. Make sure
1302 the base variable isn't a local variable of the inlined
1303 function, e.g., when doing recursive inlining, direct or
1304 mutually-recursive or whatever, which is why we don't
1305 just test whether fn == current_function_decl. */
1306 && ! self_inlining_addr_expr (value, fn))
1308 insert_decl_map (id, p, value);
1309 return;
1313 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
1314 here since the type of this decl must be visible to the calling
1315 function. */
1316 var = copy_decl_to_var (p, id);
1317 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
1319 get_var_ann (var);
1320 add_referenced_var (var);
1323 /* See if the frontend wants to pass this by invisible reference. If
1324 so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
1325 replace uses of the PARM_DECL with dereferences. */
1326 if (TREE_TYPE (var) != TREE_TYPE (p)
1327 && POINTER_TYPE_P (TREE_TYPE (var))
1328 && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
1330 insert_decl_map (id, var, var);
1331 var_sub = build_fold_indirect_ref (var);
1333 else
1334 var_sub = var;
1336 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
1337 that way, when the PARM_DECL is encountered, it will be
1338 automatically replaced by the VAR_DECL. */
1339 insert_decl_map (id, p, var_sub);
1341 /* Declare this new variable. */
1342 TREE_CHAIN (var) = *vars;
1343 *vars = var;
1345 /* Make gimplifier happy about this variable. */
1346 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1348 /* Even if P was TREE_READONLY, the new VAR should not be.
1349 In the original code, we would have constructed a
1350 temporary, and then the function body would have never
1351 changed the value of P. However, now, we will be
1352 constructing VAR directly. The constructor body may
1353 change its value multiple times as it is being
1354 constructed. Therefore, it must not be TREE_READONLY;
1355 the back-end assumes that TREE_READONLY variable is
1356 assigned to only once. */
1357 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
1358 TREE_READONLY (var) = 0;
1360 /* If there is no setup required and we are in SSA, take the easy route
1361 replacing all SSA names representing the function parameter by the
1362 SSA name passed to function.
1364 We need to construct map for the variable anyway as it might be used
1365 in different SSA names when parameter is set in function.
1367 FIXME: This usually kills the last connection in between inlined
1368 function parameter and the actual value in debug info. Can we do
1369 better here? If we just inserted the statement, copy propagation
1370 would kill it anyway as it always did in older versions of GCC.
1372 We might want to introduce a notion that single SSA_NAME might
1373 represent multiple variables for purposes of debugging. */
1374 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
1375 && (TREE_CODE (rhs) == SSA_NAME
1376 || is_gimple_min_invariant (rhs))
1377 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1379 insert_decl_map (id, def, rhs);
1380 return;
1383 /* Initialize this VAR_DECL from the equivalent argument. Convert
1384 the argument to the proper type in case it was promoted. */
1385 if (value)
1387 block_stmt_iterator bsi = bsi_last (bb);
1389 if (rhs == error_mark_node)
1391 insert_decl_map (id, p, var_sub);
1392 return;
1395 STRIP_USELESS_TYPE_CONVERSION (rhs);
1397 /* We want to use GIMPLE_MODIFY_STMT, not INIT_EXPR here so that we
1398 keep our trees in gimple form. */
1399 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
1401 def = remap_ssa_name (def, id);
1402 init_stmt = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (var), def, rhs);
1403 SSA_NAME_DEF_STMT (def) = init_stmt;
1404 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
1405 set_default_def (var, NULL);
1407 else
1408 init_stmt = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (var), var, rhs);
1410 /* If we did not create a gimple value and we did not create a gimple
1411 cast of a gimple value, then we will need to gimplify INIT_STMTS
1412 at the end. Note that is_gimple_cast only checks the outer
1413 tree code, not its operand. Thus the explicit check that its
1414 operand is a gimple value. */
1415 if ((!is_gimple_val (rhs)
1416 && (!is_gimple_cast (rhs)
1417 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
1418 || !is_gimple_reg (var))
1420 tree_stmt_iterator i;
1422 push_gimplify_context ();
1423 gimplify_stmt (&init_stmt);
1424 if (gimple_in_ssa_p (cfun)
1425 && init_stmt && TREE_CODE (init_stmt) == STATEMENT_LIST)
1427 /* The replacement can expose previously unreferenced
1428 variables. */
1429 for (i = tsi_start (init_stmt); !tsi_end_p (i); tsi_next (&i))
1430 find_new_referenced_vars (tsi_stmt_ptr (i));
1432 pop_gimplify_context (NULL);
1435 /* If VAR represents a zero-sized variable, it's possible that the
1436 assignment statment may result in no gimple statements. */
1437 if (init_stmt)
1438 bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
1439 if (gimple_in_ssa_p (cfun))
1440 for (;!bsi_end_p (bsi); bsi_next (&bsi))
1441 mark_symbols_for_renaming (bsi_stmt (bsi));
1445 /* Generate code to initialize the parameters of the function at the
1446 top of the stack in ID from the CALL_EXPR EXP. */
1448 static void
1449 initialize_inlined_parameters (copy_body_data *id, tree exp,
1450 tree fn, basic_block bb)
1452 tree parms;
1453 tree a;
1454 tree p;
1455 tree vars = NULL_TREE;
1456 int argnum = 0;
1457 call_expr_arg_iterator iter;
1458 tree static_chain = CALL_EXPR_STATIC_CHAIN (exp);
1460 /* Figure out what the parameters are. */
1461 parms = DECL_ARGUMENTS (fn);
1463 /* Loop through the parameter declarations, replacing each with an
1464 equivalent VAR_DECL, appropriately initialized. */
1465 for (p = parms, a = first_call_expr_arg (exp, &iter); p;
1466 a = next_call_expr_arg (&iter), p = TREE_CHAIN (p))
1468 tree value;
1470 ++argnum;
1472 /* Find the initializer. */
1473 value = lang_hooks.tree_inlining.convert_parm_for_inlining
1474 (p, a, fn, argnum);
1476 setup_one_parameter (id, p, value, fn, bb, &vars);
1479 /* Initialize the static chain. */
1480 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1481 gcc_assert (fn != current_function_decl);
1482 if (p)
1484 /* No static chain? Seems like a bug in tree-nested.c. */
1485 gcc_assert (static_chain);
1487 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
1490 declare_inline_vars (id->block, vars);
1493 /* Declare a return variable to replace the RESULT_DECL for the
1494 function we are calling. An appropriate DECL_STMT is returned.
1495 The USE_STMT is filled to contain a use of the declaration to
1496 indicate the return value of the function.
1498 RETURN_SLOT, if non-null is place where to store the result. It
1499 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
1500 was the LHS of the GIMPLE_MODIFY_STMT to which this call is the RHS.
1502 The return value is a (possibly null) value that is the result of the
1503 function as seen by the callee. *USE_P is a (possibly null) value that
1504 holds the result as seen by the caller. */
1506 static tree
1507 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
1508 tree *use_p)
1510 tree callee = id->src_fn;
1511 tree caller = id->dst_fn;
1512 tree result = DECL_RESULT (callee);
1513 tree callee_type = TREE_TYPE (result);
1514 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
1515 tree var, use;
1517 /* We don't need to do anything for functions that don't return
1518 anything. */
1519 if (!result || VOID_TYPE_P (callee_type))
1521 *use_p = NULL_TREE;
1522 return NULL_TREE;
1525 /* If there was a return slot, then the return value is the
1526 dereferenced address of that object. */
1527 if (return_slot)
1529 /* The front end shouldn't have used both return_slot and
1530 a modify expression. */
1531 gcc_assert (!modify_dest);
1532 if (DECL_BY_REFERENCE (result))
1534 tree return_slot_addr = build_fold_addr_expr (return_slot);
1535 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
1537 /* We are going to construct *&return_slot and we can't do that
1538 for variables believed to be not addressable.
1540 FIXME: This check possibly can match, because values returned
1541 via return slot optimization are not believed to have address
1542 taken by alias analysis. */
1543 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
1544 if (gimple_in_ssa_p (cfun))
1546 HOST_WIDE_INT bitsize;
1547 HOST_WIDE_INT bitpos;
1548 tree offset;
1549 enum machine_mode mode;
1550 int unsignedp;
1551 int volatilep;
1552 tree base;
1553 base = get_inner_reference (return_slot, &bitsize, &bitpos,
1554 &offset,
1555 &mode, &unsignedp, &volatilep,
1556 false);
1557 if (TREE_CODE (base) == INDIRECT_REF)
1558 base = TREE_OPERAND (base, 0);
1559 if (TREE_CODE (base) == SSA_NAME)
1560 base = SSA_NAME_VAR (base);
1561 mark_sym_for_renaming (base);
1563 var = return_slot_addr;
1565 else
1567 var = return_slot;
1568 gcc_assert (TREE_CODE (var) != SSA_NAME);
1570 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1571 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1572 && !DECL_GIMPLE_REG_P (result)
1573 && DECL_P (var))
1574 DECL_GIMPLE_REG_P (var) = 0;
1575 use = NULL;
1576 goto done;
1579 /* All types requiring non-trivial constructors should have been handled. */
1580 gcc_assert (!TREE_ADDRESSABLE (callee_type));
1582 /* Attempt to avoid creating a new temporary variable. */
1583 if (modify_dest
1584 && TREE_CODE (modify_dest) != SSA_NAME)
1586 bool use_it = false;
1588 /* We can't use MODIFY_DEST if there's type promotion involved. */
1589 if (!lang_hooks.types_compatible_p (caller_type, callee_type))
1590 use_it = false;
1592 /* ??? If we're assigning to a variable sized type, then we must
1593 reuse the destination variable, because we've no good way to
1594 create variable sized temporaries at this point. */
1595 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
1596 use_it = true;
1598 /* If the callee cannot possibly modify MODIFY_DEST, then we can
1599 reuse it as the result of the call directly. Don't do this if
1600 it would promote MODIFY_DEST to addressable. */
1601 else if (TREE_ADDRESSABLE (result))
1602 use_it = false;
1603 else
1605 tree base_m = get_base_address (modify_dest);
1607 /* If the base isn't a decl, then it's a pointer, and we don't
1608 know where that's going to go. */
1609 if (!DECL_P (base_m))
1610 use_it = false;
1611 else if (is_global_var (base_m))
1612 use_it = false;
1613 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1614 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1615 && !DECL_GIMPLE_REG_P (result)
1616 && DECL_GIMPLE_REG_P (base_m))
1617 use_it = false;
1618 else if (!TREE_ADDRESSABLE (base_m))
1619 use_it = true;
1622 if (use_it)
1624 var = modify_dest;
1625 use = NULL;
1626 goto done;
1630 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
1632 var = copy_result_decl_to_var (result, id);
1633 if (gimple_in_ssa_p (cfun))
1635 get_var_ann (var);
1636 add_referenced_var (var);
1639 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1640 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
1641 = tree_cons (NULL_TREE, var,
1642 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
1644 /* Do not have the rest of GCC warn about this variable as it should
1645 not be visible to the user. */
1646 TREE_NO_WARNING (var) = 1;
1648 declare_inline_vars (id->block, var);
1650 /* Build the use expr. If the return type of the function was
1651 promoted, convert it back to the expected type. */
1652 use = var;
1653 if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
1654 use = fold_convert (caller_type, var);
1656 STRIP_USELESS_TYPE_CONVERSION (use);
1658 if (DECL_BY_REFERENCE (result))
1659 var = build_fold_addr_expr (var);
1661 done:
1662 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
1663 way, when the RESULT_DECL is encountered, it will be
1664 automatically replaced by the VAR_DECL. */
1665 insert_decl_map (id, result, var);
1667 /* Remember this so we can ignore it in remap_decls. */
1668 id->retvar = var;
1670 *use_p = use;
1671 return var;
1674 /* Returns nonzero if a function can be inlined as a tree. */
1676 bool
1677 tree_inlinable_function_p (tree fn)
1679 return inlinable_function_p (fn);
1682 static const char *inline_forbidden_reason;
1684 static tree
1685 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
1686 void *fnp)
1688 tree node = *nodep;
1689 tree fn = (tree) fnp;
1690 tree t;
1692 switch (TREE_CODE (node))
1694 case CALL_EXPR:
1695 /* Refuse to inline alloca call unless user explicitly forced so as
1696 this may change program's memory overhead drastically when the
1697 function using alloca is called in loop. In GCC present in
1698 SPEC2000 inlining into schedule_block cause it to require 2GB of
1699 RAM instead of 256MB. */
1700 if (alloca_call_p (node)
1701 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1703 inline_forbidden_reason
1704 = G_("function %q+F can never be inlined because it uses "
1705 "alloca (override using the always_inline attribute)");
1706 return node;
1708 t = get_callee_fndecl (node);
1709 if (! t)
1710 break;
1712 /* We cannot inline functions that call setjmp. */
1713 if (setjmp_call_p (t))
1715 inline_forbidden_reason
1716 = G_("function %q+F can never be inlined because it uses setjmp");
1717 return node;
1720 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
1721 switch (DECL_FUNCTION_CODE (t))
1723 /* We cannot inline functions that take a variable number of
1724 arguments. */
1725 case BUILT_IN_VA_START:
1726 case BUILT_IN_STDARG_START:
1727 case BUILT_IN_NEXT_ARG:
1728 case BUILT_IN_VA_END:
1729 inline_forbidden_reason
1730 = G_("function %q+F can never be inlined because it "
1731 "uses variable argument lists");
1732 return node;
1734 case BUILT_IN_LONGJMP:
1735 /* We can't inline functions that call __builtin_longjmp at
1736 all. The non-local goto machinery really requires the
1737 destination be in a different function. If we allow the
1738 function calling __builtin_longjmp to be inlined into the
1739 function calling __builtin_setjmp, Things will Go Awry. */
1740 inline_forbidden_reason
1741 = G_("function %q+F can never be inlined because "
1742 "it uses setjmp-longjmp exception handling");
1743 return node;
1745 case BUILT_IN_NONLOCAL_GOTO:
1746 /* Similarly. */
1747 inline_forbidden_reason
1748 = G_("function %q+F can never be inlined because "
1749 "it uses non-local goto");
1750 return node;
1752 case BUILT_IN_RETURN:
1753 case BUILT_IN_APPLY_ARGS:
1754 /* If a __builtin_apply_args caller would be inlined,
1755 it would be saving arguments of the function it has
1756 been inlined into. Similarly __builtin_return would
1757 return from the function the inline has been inlined into. */
1758 inline_forbidden_reason
1759 = G_("function %q+F can never be inlined because "
1760 "it uses __builtin_return or __builtin_apply_args");
1761 return node;
1763 default:
1764 break;
1766 break;
1768 case GOTO_EXPR:
1769 t = TREE_OPERAND (node, 0);
1771 /* We will not inline a function which uses computed goto. The
1772 addresses of its local labels, which may be tucked into
1773 global storage, are of course not constant across
1774 instantiations, which causes unexpected behavior. */
1775 if (TREE_CODE (t) != LABEL_DECL)
1777 inline_forbidden_reason
1778 = G_("function %q+F can never be inlined "
1779 "because it contains a computed goto");
1780 return node;
1782 break;
1784 case LABEL_EXPR:
1785 t = TREE_OPERAND (node, 0);
1786 if (DECL_NONLOCAL (t))
1788 /* We cannot inline a function that receives a non-local goto
1789 because we cannot remap the destination label used in the
1790 function that is performing the non-local goto. */
1791 inline_forbidden_reason
1792 = G_("function %q+F can never be inlined "
1793 "because it receives a non-local goto");
1794 return node;
1796 break;
1798 case RECORD_TYPE:
1799 case UNION_TYPE:
1800 /* We cannot inline a function of the form
1802 void F (int i) { struct S { int ar[i]; } s; }
1804 Attempting to do so produces a catch-22.
1805 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1806 UNION_TYPE nodes, then it goes into infinite recursion on a
1807 structure containing a pointer to its own type. If it doesn't,
1808 then the type node for S doesn't get adjusted properly when
1809 F is inlined.
1811 ??? This is likely no longer true, but it's too late in the 4.0
1812 cycle to try to find out. This should be checked for 4.1. */
1813 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1814 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1816 inline_forbidden_reason
1817 = G_("function %q+F can never be inlined "
1818 "because it uses variable sized variables");
1819 return node;
1822 default:
1823 break;
1826 return NULL_TREE;
1829 /* Return subexpression representing possible alloca call, if any. */
1830 static tree
1831 inline_forbidden_p (tree fndecl)
1833 location_t saved_loc = input_location;
1834 block_stmt_iterator bsi;
1835 basic_block bb;
1836 tree ret = NULL_TREE;
1838 FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (fndecl))
1839 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1841 ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
1842 inline_forbidden_p_1, fndecl);
1843 if (ret)
1844 goto egress;
1847 egress:
1848 input_location = saved_loc;
1849 return ret;
1852 /* Returns nonzero if FN is a function that does not have any
1853 fundamental inline blocking properties. */
1855 static bool
1856 inlinable_function_p (tree fn)
1858 bool inlinable = true;
1860 /* If we've already decided this function shouldn't be inlined,
1861 there's no need to check again. */
1862 if (DECL_UNINLINABLE (fn))
1863 return false;
1865 /* See if there is any language-specific reason it cannot be
1866 inlined. (It is important that this hook be called early because
1867 in C++ it may result in template instantiation.)
1868 If the function is not inlinable for language-specific reasons,
1869 it is left up to the langhook to explain why. */
1870 inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1872 /* If we don't have the function body available, we can't inline it.
1873 However, this should not be recorded since we also get here for
1874 forward declared inline functions. Therefore, return at once. */
1875 if (!DECL_SAVED_TREE (fn))
1876 return false;
1878 /* If we're not inlining at all, then we cannot inline this function. */
1879 else if (!flag_inline_trees)
1880 inlinable = false;
1882 /* Only try to inline functions if DECL_INLINE is set. This should be
1883 true for all functions declared `inline', and for all other functions
1884 as well with -finline-functions.
1886 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1887 it's the front-end that must set DECL_INLINE in this case, because
1888 dwarf2out loses if a function that does not have DECL_INLINE set is
1889 inlined anyway. That is why we have both DECL_INLINE and
1890 DECL_DECLARED_INLINE_P. */
1891 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1892 here should be redundant. */
1893 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1894 inlinable = false;
1896 else if (inline_forbidden_p (fn))
1898 /* See if we should warn about uninlinable functions. Previously,
1899 some of these warnings would be issued while trying to expand
1900 the function inline, but that would cause multiple warnings
1901 about functions that would for example call alloca. But since
1902 this a property of the function, just one warning is enough.
1903 As a bonus we can now give more details about the reason why a
1904 function is not inlinable.
1905 We only warn for functions declared `inline' by the user. */
1906 bool do_warning = (warn_inline
1907 && DECL_INLINE (fn)
1908 && DECL_DECLARED_INLINE_P (fn)
1909 && !DECL_IN_SYSTEM_HEADER (fn));
1911 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1912 sorry (inline_forbidden_reason, fn);
1913 else if (do_warning)
1914 warning (OPT_Winline, inline_forbidden_reason, fn);
1916 inlinable = false;
1919 /* Squirrel away the result so that we don't have to check again. */
1920 DECL_UNINLINABLE (fn) = !inlinable;
1922 return inlinable;
1925 /* Estimate the cost of a memory move. Use machine dependent
1926 word size and take possible memcpy call into account. */
1929 estimate_move_cost (tree type)
1931 HOST_WIDE_INT size;
1933 size = int_size_in_bytes (type);
1935 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1936 /* Cost of a memcpy call, 3 arguments and the call. */
1937 return 4;
1938 else
1939 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1942 /* Arguments for estimate_num_insns_1. */
1944 struct eni_data
1946 /* Used to return the number of insns. */
1947 int count;
1949 /* Weights of various constructs. */
1950 eni_weights *weights;
1953 /* Used by estimate_num_insns. Estimate number of instructions seen
1954 by given statement. */
1956 static tree
1957 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1959 struct eni_data *d = data;
1960 tree x = *tp;
1961 unsigned cost;
1963 if (IS_TYPE_OR_DECL_P (x))
1965 *walk_subtrees = 0;
1966 return NULL;
1968 /* Assume that constants and references counts nothing. These should
1969 be majorized by amount of operations among them we count later
1970 and are common target of CSE and similar optimizations. */
1971 else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
1972 return NULL;
1974 switch (TREE_CODE (x))
1976 /* Containers have no cost. */
1977 case TREE_LIST:
1978 case TREE_VEC:
1979 case BLOCK:
1980 case COMPONENT_REF:
1981 case BIT_FIELD_REF:
1982 case INDIRECT_REF:
1983 case ALIGN_INDIRECT_REF:
1984 case MISALIGNED_INDIRECT_REF:
1985 case ARRAY_REF:
1986 case ARRAY_RANGE_REF:
1987 case OBJ_TYPE_REF:
1988 case EXC_PTR_EXPR: /* ??? */
1989 case FILTER_EXPR: /* ??? */
1990 case COMPOUND_EXPR:
1991 case BIND_EXPR:
1992 case WITH_CLEANUP_EXPR:
1993 case NOP_EXPR:
1994 case VIEW_CONVERT_EXPR:
1995 case SAVE_EXPR:
1996 case ADDR_EXPR:
1997 case COMPLEX_EXPR:
1998 case RANGE_EXPR:
1999 case CASE_LABEL_EXPR:
2000 case SSA_NAME:
2001 case CATCH_EXPR:
2002 case EH_FILTER_EXPR:
2003 case STATEMENT_LIST:
2004 case ERROR_MARK:
2005 case NON_LVALUE_EXPR:
2006 case FDESC_EXPR:
2007 case VA_ARG_EXPR:
2008 case TRY_CATCH_EXPR:
2009 case TRY_FINALLY_EXPR:
2010 case LABEL_EXPR:
2011 case GOTO_EXPR:
2012 case RETURN_EXPR:
2013 case EXIT_EXPR:
2014 case LOOP_EXPR:
2015 case PHI_NODE:
2016 case WITH_SIZE_EXPR:
2017 case OMP_CLAUSE:
2018 case OMP_RETURN:
2019 case OMP_CONTINUE:
2020 break;
2022 /* We don't account constants for now. Assume that the cost is amortized
2023 by operations that do use them. We may re-consider this decision once
2024 we are able to optimize the tree before estimating its size and break
2025 out static initializers. */
2026 case IDENTIFIER_NODE:
2027 case INTEGER_CST:
2028 case REAL_CST:
2029 case COMPLEX_CST:
2030 case VECTOR_CST:
2031 case STRING_CST:
2032 *walk_subtrees = 0;
2033 return NULL;
2035 /* Try to estimate the cost of assignments. We have three cases to
2036 deal with:
2037 1) Simple assignments to registers;
2038 2) Stores to things that must live in memory. This includes
2039 "normal" stores to scalars, but also assignments of large
2040 structures, or constructors of big arrays;
2041 3) TARGET_EXPRs.
2043 Let us look at the first two cases, assuming we have "a = b + C":
2044 <GIMPLE_MODIFY_STMT <var_decl "a">
2045 <plus_expr <var_decl "b"> <constant C>>
2046 If "a" is a GIMPLE register, the assignment to it is free on almost
2047 any target, because "a" usually ends up in a real register. Hence
2048 the only cost of this expression comes from the PLUS_EXPR, and we
2049 can ignore the GIMPLE_MODIFY_STMT.
2050 If "a" is not a GIMPLE register, the assignment to "a" will most
2051 likely be a real store, so the cost of the GIMPLE_MODIFY_STMT is the cost
2052 of moving something into "a", which we compute using the function
2053 estimate_move_cost.
2055 The third case deals with TARGET_EXPRs, for which the semantics are
2056 that a temporary is assigned, unless the TARGET_EXPR itself is being
2057 assigned to something else. In the latter case we do not need the
2058 temporary. E.g. in:
2059 <GIMPLE_MODIFY_STMT <var_decl "a"> <target_expr>>, the
2060 GIMPLE_MODIFY_STMT is free. */
2061 case INIT_EXPR:
2062 case GIMPLE_MODIFY_STMT:
2063 /* Is the right and side a TARGET_EXPR? */
2064 if (TREE_CODE (GENERIC_TREE_OPERAND (x, 1)) == TARGET_EXPR)
2065 break;
2066 /* ... fall through ... */
2068 case TARGET_EXPR:
2069 x = GENERIC_TREE_OPERAND (x, 0);
2070 /* Is this an assignments to a register? */
2071 if (is_gimple_reg (x))
2072 break;
2073 /* Otherwise it's a store, so fall through to compute the move cost. */
2075 case CONSTRUCTOR:
2076 d->count += estimate_move_cost (TREE_TYPE (x));
2077 break;
2079 /* Assign cost of 1 to usual operations.
2080 ??? We may consider mapping RTL costs to this. */
2081 case COND_EXPR:
2082 case VEC_COND_EXPR:
2084 case PLUS_EXPR:
2085 case MINUS_EXPR:
2086 case MULT_EXPR:
2088 case FIX_TRUNC_EXPR:
2090 case NEGATE_EXPR:
2091 case FLOAT_EXPR:
2092 case MIN_EXPR:
2093 case MAX_EXPR:
2094 case ABS_EXPR:
2096 case LSHIFT_EXPR:
2097 case RSHIFT_EXPR:
2098 case LROTATE_EXPR:
2099 case RROTATE_EXPR:
2100 case VEC_LSHIFT_EXPR:
2101 case VEC_RSHIFT_EXPR:
2103 case BIT_IOR_EXPR:
2104 case BIT_XOR_EXPR:
2105 case BIT_AND_EXPR:
2106 case BIT_NOT_EXPR:
2108 case TRUTH_ANDIF_EXPR:
2109 case TRUTH_ORIF_EXPR:
2110 case TRUTH_AND_EXPR:
2111 case TRUTH_OR_EXPR:
2112 case TRUTH_XOR_EXPR:
2113 case TRUTH_NOT_EXPR:
2115 case LT_EXPR:
2116 case LE_EXPR:
2117 case GT_EXPR:
2118 case GE_EXPR:
2119 case EQ_EXPR:
2120 case NE_EXPR:
2121 case ORDERED_EXPR:
2122 case UNORDERED_EXPR:
2124 case UNLT_EXPR:
2125 case UNLE_EXPR:
2126 case UNGT_EXPR:
2127 case UNGE_EXPR:
2128 case UNEQ_EXPR:
2129 case LTGT_EXPR:
2131 case CONVERT_EXPR:
2133 case CONJ_EXPR:
2135 case PREDECREMENT_EXPR:
2136 case PREINCREMENT_EXPR:
2137 case POSTDECREMENT_EXPR:
2138 case POSTINCREMENT_EXPR:
2140 case ASM_EXPR:
2142 case REALIGN_LOAD_EXPR:
2144 case REDUC_MAX_EXPR:
2145 case REDUC_MIN_EXPR:
2146 case REDUC_PLUS_EXPR:
2147 case WIDEN_SUM_EXPR:
2148 case DOT_PROD_EXPR:
2149 case VEC_WIDEN_MULT_HI_EXPR:
2150 case VEC_WIDEN_MULT_LO_EXPR:
2151 case VEC_UNPACK_HI_EXPR:
2152 case VEC_UNPACK_LO_EXPR:
2153 case VEC_PACK_MOD_EXPR:
2154 case VEC_PACK_SAT_EXPR:
2156 case WIDEN_MULT_EXPR:
2158 case VEC_EXTRACT_EVEN_EXPR:
2159 case VEC_EXTRACT_ODD_EXPR:
2160 case VEC_INTERLEAVE_HIGH_EXPR:
2161 case VEC_INTERLEAVE_LOW_EXPR:
2163 case RESX_EXPR:
2164 d->count += 1;
2165 break;
2167 case SWITCH_EXPR:
2168 /* TODO: Cost of a switch should be derived from the number of
2169 branches. */
2170 d->count += d->weights->switch_cost;
2171 break;
2173 /* Few special cases of expensive operations. This is useful
2174 to avoid inlining on functions having too many of these. */
2175 case TRUNC_DIV_EXPR:
2176 case CEIL_DIV_EXPR:
2177 case FLOOR_DIV_EXPR:
2178 case ROUND_DIV_EXPR:
2179 case EXACT_DIV_EXPR:
2180 case TRUNC_MOD_EXPR:
2181 case CEIL_MOD_EXPR:
2182 case FLOOR_MOD_EXPR:
2183 case ROUND_MOD_EXPR:
2184 case RDIV_EXPR:
2185 d->count += d->weights->div_mod_cost;
2186 break;
2187 case CALL_EXPR:
2189 tree decl = get_callee_fndecl (x);
2191 cost = d->weights->call_cost;
2192 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
2193 switch (DECL_FUNCTION_CODE (decl))
2195 case BUILT_IN_CONSTANT_P:
2196 *walk_subtrees = 0;
2197 return NULL_TREE;
2198 case BUILT_IN_EXPECT:
2199 return NULL_TREE;
2200 /* Prefetch instruction is not expensive. */
2201 case BUILT_IN_PREFETCH:
2202 cost = 1;
2203 break;
2204 default:
2205 break;
2208 /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
2209 that does use function declaration to figure out the arguments. */
2210 if (!decl)
2212 tree a;
2213 call_expr_arg_iterator iter;
2214 FOR_EACH_CALL_EXPR_ARG (a, iter, x)
2215 d->count += estimate_move_cost (TREE_TYPE (a));
2217 else
2219 tree arg;
2220 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2221 d->count += estimate_move_cost (TREE_TYPE (arg));
2224 d->count += cost;
2225 break;
2228 case OMP_PARALLEL:
2229 case OMP_FOR:
2230 case OMP_SECTIONS:
2231 case OMP_SINGLE:
2232 case OMP_SECTION:
2233 case OMP_MASTER:
2234 case OMP_ORDERED:
2235 case OMP_CRITICAL:
2236 case OMP_ATOMIC:
2237 /* OpenMP directives are generally very expensive. */
2238 d->count += d->weights->omp_cost;
2239 break;
2241 default:
2242 gcc_unreachable ();
2244 return NULL;
2247 /* Estimate number of instructions that will be created by expanding EXPR.
2248 WEIGHTS contains weights attributed to various constructs. */
2251 estimate_num_insns (tree expr, eni_weights *weights)
2253 struct pointer_set_t *visited_nodes;
2254 basic_block bb;
2255 block_stmt_iterator bsi;
2256 struct function *my_function;
2257 struct eni_data data;
2259 data.count = 0;
2260 data.weights = weights;
2262 /* If we're given an entire function, walk the CFG. */
2263 if (TREE_CODE (expr) == FUNCTION_DECL)
2265 my_function = DECL_STRUCT_FUNCTION (expr);
2266 gcc_assert (my_function && my_function->cfg);
2267 visited_nodes = pointer_set_create ();
2268 FOR_EACH_BB_FN (bb, my_function)
2270 for (bsi = bsi_start (bb);
2271 !bsi_end_p (bsi);
2272 bsi_next (&bsi))
2274 walk_tree (bsi_stmt_ptr (bsi), estimate_num_insns_1,
2275 &data, visited_nodes);
2278 pointer_set_destroy (visited_nodes);
2280 else
2281 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &data);
2283 return data.count;
2286 /* Initializes weights used by estimate_num_insns. */
2288 void
2289 init_inline_once (void)
2291 eni_inlining_weights.call_cost = PARAM_VALUE (PARAM_INLINE_CALL_COST);
2292 eni_inlining_weights.div_mod_cost = 10;
2293 eni_inlining_weights.switch_cost = 1;
2294 eni_inlining_weights.omp_cost = 40;
2296 eni_size_weights.call_cost = 1;
2297 eni_size_weights.div_mod_cost = 1;
2298 eni_size_weights.switch_cost = 10;
2299 eni_size_weights.omp_cost = 40;
2301 /* Estimating time for call is difficult, since we have no idea what the
2302 called function does. In the current uses of eni_time_weights,
2303 underestimating the cost does less harm than overestimating it, so
2304 we choose a rather small value here. */
2305 eni_time_weights.call_cost = 10;
2306 eni_time_weights.div_mod_cost = 10;
2307 eni_time_weights.switch_cost = 4;
2308 eni_time_weights.omp_cost = 40;
2311 typedef struct function *function_p;
2313 DEF_VEC_P(function_p);
2314 DEF_VEC_ALLOC_P(function_p,heap);
2316 /* Initialized with NOGC, making this poisonous to the garbage collector. */
2317 static VEC(function_p,heap) *cfun_stack;
2319 void
2320 push_cfun (struct function *new_cfun)
2322 VEC_safe_push (function_p, heap, cfun_stack, cfun);
2323 cfun = new_cfun;
2326 void
2327 pop_cfun (void)
2329 cfun = VEC_pop (function_p, cfun_stack);
2332 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
2333 static void
2334 add_lexical_block (tree current_block, tree new_block)
2336 tree *blk_p;
2338 /* Walk to the last sub-block. */
2339 for (blk_p = &BLOCK_SUBBLOCKS (current_block);
2340 *blk_p;
2341 blk_p = &TREE_CHAIN (*blk_p))
2343 *blk_p = new_block;
2344 BLOCK_SUPERCONTEXT (new_block) = current_block;
2347 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
2349 static bool
2350 expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
2352 copy_body_data *id;
2353 tree t;
2354 tree use_retvar;
2355 tree fn;
2356 splay_tree st;
2357 tree return_slot;
2358 tree modify_dest;
2359 location_t saved_location;
2360 struct cgraph_edge *cg_edge;
2361 const char *reason;
2362 basic_block return_block;
2363 edge e;
2364 block_stmt_iterator bsi, stmt_bsi;
2365 bool successfully_inlined = FALSE;
2366 bool purge_dead_abnormal_edges;
2367 tree t_step;
2368 tree var;
2370 /* See what we've got. */
2371 id = (copy_body_data *) data;
2372 t = *tp;
2374 /* Set input_location here so we get the right instantiation context
2375 if we call instantiate_decl from inlinable_function_p. */
2376 saved_location = input_location;
2377 if (EXPR_HAS_LOCATION (t))
2378 input_location = EXPR_LOCATION (t);
2380 /* From here on, we're only interested in CALL_EXPRs. */
2381 if (TREE_CODE (t) != CALL_EXPR)
2382 goto egress;
2384 /* First, see if we can figure out what function is being called.
2385 If we cannot, then there is no hope of inlining the function. */
2386 fn = get_callee_fndecl (t);
2387 if (!fn)
2388 goto egress;
2390 /* Turn forward declarations into real ones. */
2391 fn = cgraph_node (fn)->decl;
2393 /* If fn is a declaration of a function in a nested scope that was
2394 globally declared inline, we don't set its DECL_INITIAL.
2395 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
2396 C++ front-end uses it for cdtors to refer to their internal
2397 declarations, that are not real functions. Fortunately those
2398 don't have trees to be saved, so we can tell by checking their
2399 DECL_SAVED_TREE. */
2400 if (! DECL_INITIAL (fn)
2401 && DECL_ABSTRACT_ORIGIN (fn)
2402 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
2403 fn = DECL_ABSTRACT_ORIGIN (fn);
2405 /* Objective C and fortran still calls tree_rest_of_compilation directly.
2406 Kill this check once this is fixed. */
2407 if (!id->dst_node->analyzed)
2408 goto egress;
2410 cg_edge = cgraph_edge (id->dst_node, stmt);
2412 /* Constant propagation on argument done during previous inlining
2413 may create new direct call. Produce an edge for it. */
2414 if (!cg_edge)
2416 struct cgraph_node *dest = cgraph_node (fn);
2418 /* We have missing edge in the callgraph. This can happen in one case
2419 where previous inlining turned indirect call into direct call by
2420 constant propagating arguments. In all other cases we hit a bug
2421 (incorrect node sharing is most common reason for missing edges. */
2422 gcc_assert (dest->needed || !flag_unit_at_a_time);
2423 cgraph_create_edge (id->dst_node, dest, stmt,
2424 bb->count, CGRAPH_FREQ_BASE,
2425 bb->loop_depth)->inline_failed
2426 = N_("originally indirect function call not considered for inlining");
2427 if (dump_file)
2429 fprintf (dump_file, "Created new direct edge to %s",
2430 cgraph_node_name (dest));
2432 goto egress;
2435 /* Don't try to inline functions that are not well-suited to
2436 inlining. */
2437 if (!cgraph_inline_p (cg_edge, &reason))
2439 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
2440 /* Avoid warnings during early inline pass. */
2441 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2443 sorry ("inlining failed in call to %q+F: %s", fn, reason);
2444 sorry ("called from here");
2446 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
2447 && !DECL_IN_SYSTEM_HEADER (fn)
2448 && strlen (reason)
2449 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
2450 /* Avoid warnings during early inline pass. */
2451 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2453 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
2454 fn, reason);
2455 warning (OPT_Winline, "called from here");
2457 goto egress;
2459 fn = cg_edge->callee->decl;
2461 #ifdef ENABLE_CHECKING
2462 if (cg_edge->callee->decl != id->dst_node->decl)
2463 verify_cgraph_node (cg_edge->callee);
2464 #endif
2466 /* We will be inlining this callee. */
2467 id->eh_region = lookup_stmt_eh_region (stmt);
2469 /* Split the block holding the CALL_EXPR. */
2470 e = split_block (bb, stmt);
2471 bb = e->src;
2472 return_block = e->dest;
2473 remove_edge (e);
2475 /* split_block splits after the statement; work around this by
2476 moving the call into the second block manually. Not pretty,
2477 but seems easier than doing the CFG manipulation by hand
2478 when the CALL_EXPR is in the last statement of BB. */
2479 stmt_bsi = bsi_last (bb);
2480 bsi_remove (&stmt_bsi, false);
2482 /* If the CALL_EXPR was in the last statement of BB, it may have
2483 been the source of abnormal edges. In this case, schedule
2484 the removal of dead abnormal edges. */
2485 bsi = bsi_start (return_block);
2486 if (bsi_end_p (bsi))
2488 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2489 purge_dead_abnormal_edges = true;
2491 else
2493 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
2494 purge_dead_abnormal_edges = false;
2497 stmt_bsi = bsi_start (return_block);
2499 /* Build a block containing code to initialize the arguments, the
2500 actual inline expansion of the body, and a label for the return
2501 statements within the function to jump to. The type of the
2502 statement expression is the return type of the function call. */
2503 id->block = make_node (BLOCK);
2504 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
2505 BLOCK_SOURCE_LOCATION (id->block) = input_location;
2506 add_lexical_block (TREE_BLOCK (stmt), id->block);
2508 /* Local declarations will be replaced by their equivalents in this
2509 map. */
2510 st = id->decl_map;
2511 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
2512 NULL, NULL);
2514 /* Record the function we are about to inline. */
2515 id->src_fn = fn;
2516 id->src_node = cg_edge->callee;
2517 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
2519 initialize_inlined_parameters (id, t, fn, bb);
2521 if (DECL_INITIAL (fn))
2522 add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
2524 /* Return statements in the function body will be replaced by jumps
2525 to the RET_LABEL. */
2527 gcc_assert (DECL_INITIAL (fn));
2528 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
2530 /* Find the lhs to which the result of this call is assigned. */
2531 return_slot = NULL;
2532 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
2534 modify_dest = GIMPLE_STMT_OPERAND (stmt, 0);
2536 /* The function which we are inlining might not return a value,
2537 in which case we should issue a warning that the function
2538 does not return a value. In that case the optimizers will
2539 see that the variable to which the value is assigned was not
2540 initialized. We do not want to issue a warning about that
2541 uninitialized variable. */
2542 if (DECL_P (modify_dest))
2543 TREE_NO_WARNING (modify_dest) = 1;
2544 if (CALL_EXPR_RETURN_SLOT_OPT (t))
2546 return_slot = modify_dest;
2547 modify_dest = NULL;
2550 else
2551 modify_dest = NULL;
2553 /* Declare the return variable for the function. */
2554 declare_return_variable (id, return_slot,
2555 modify_dest, &use_retvar);
2557 /* This is it. Duplicate the callee body. Assume callee is
2558 pre-gimplified. Note that we must not alter the caller
2559 function in any way before this point, as this CALL_EXPR may be
2560 a self-referential call; if we're calling ourselves, we need to
2561 duplicate our body before altering anything. */
2562 copy_body (id, bb->count, bb->frequency, bb, return_block);
2564 /* Add local vars in this inlined callee to caller. */
2565 t_step = id->src_cfun->unexpanded_var_list;
2566 for (; t_step; t_step = TREE_CHAIN (t_step))
2568 var = TREE_VALUE (t_step);
2569 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2570 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2571 cfun->unexpanded_var_list);
2572 else
2573 cfun->unexpanded_var_list = tree_cons (NULL_TREE, remap_decl (var, id),
2574 cfun->unexpanded_var_list);
2577 /* Clean up. */
2578 splay_tree_delete (id->decl_map);
2579 id->decl_map = st;
2581 /* If the inlined function returns a result that we care about,
2582 clobber the CALL_EXPR with a reference to the return variable. */
2583 if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
2585 *tp = use_retvar;
2586 if (gimple_in_ssa_p (cfun))
2588 update_stmt (stmt);
2589 mark_symbols_for_renaming (stmt);
2591 maybe_clean_or_replace_eh_stmt (stmt, stmt);
2593 else
2594 /* We're modifying a TSI owned by gimple_expand_calls_inline();
2595 tsi_delink() will leave the iterator in a sane state. */
2597 /* Handle case of inlining function that miss return statement so
2598 return value becomes undefined. */
2599 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
2600 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME)
2602 tree name = TREE_OPERAND (stmt, 0);
2603 tree var = SSA_NAME_VAR (TREE_OPERAND (stmt, 0));
2604 tree def = gimple_default_def (cfun, var);
2606 /* If the variable is used undefined, make this name undefined via
2607 move. */
2608 if (def)
2610 TREE_OPERAND (stmt, 1) = def;
2611 update_stmt (stmt);
2613 /* Otherwise make this variable undefined. */
2614 else
2616 bsi_remove (&stmt_bsi, true);
2617 set_default_def (var, name);
2618 SSA_NAME_DEF_STMT (name) = build_empty_stmt ();
2621 else
2622 bsi_remove (&stmt_bsi, true);
2625 if (purge_dead_abnormal_edges)
2626 tree_purge_dead_abnormal_call_edges (return_block);
2628 /* If the value of the new expression is ignored, that's OK. We
2629 don't warn about this for CALL_EXPRs, so we shouldn't warn about
2630 the equivalent inlined version either. */
2631 TREE_USED (*tp) = 1;
2633 /* Output the inlining info for this abstract function, since it has been
2634 inlined. If we don't do this now, we can lose the information about the
2635 variables in the function when the blocks get blown away as soon as we
2636 remove the cgraph node. */
2637 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
2639 /* Update callgraph if needed. */
2640 cgraph_remove_node (cg_edge->callee);
2642 id->block = NULL_TREE;
2643 successfully_inlined = TRUE;
2645 egress:
2646 input_location = saved_location;
2647 return successfully_inlined;
2650 /* Expand call statements reachable from STMT_P.
2651 We can only have CALL_EXPRs as the "toplevel" tree code or nested
2652 in a GIMPLE_MODIFY_STMT. See tree-gimple.c:get_call_expr_in(). We can
2653 unfortunately not use that function here because we need a pointer
2654 to the CALL_EXPR, not the tree itself. */
2656 static bool
2657 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
2659 block_stmt_iterator bsi;
2661 /* Register specific tree functions. */
2662 tree_register_cfg_hooks ();
2663 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2665 tree *expr_p = bsi_stmt_ptr (bsi);
2666 tree stmt = *expr_p;
2668 if (TREE_CODE (*expr_p) == GIMPLE_MODIFY_STMT)
2669 expr_p = &GIMPLE_STMT_OPERAND (*expr_p, 1);
2670 if (TREE_CODE (*expr_p) == WITH_SIZE_EXPR)
2671 expr_p = &TREE_OPERAND (*expr_p, 0);
2672 if (TREE_CODE (*expr_p) == CALL_EXPR)
2673 if (expand_call_inline (bb, stmt, expr_p, id))
2674 return true;
2676 return false;
2679 /* Walk all basic blocks created after FIRST and try to fold every statement
2680 in the STATEMENTS pointer set. */
2681 static void
2682 fold_marked_statements (int first, struct pointer_set_t *statements)
2684 for (;first < n_basic_blocks;first++)
2685 if (BASIC_BLOCK (first))
2687 block_stmt_iterator bsi;
2688 for (bsi = bsi_start (BASIC_BLOCK (first));
2689 !bsi_end_p (bsi); bsi_next (&bsi))
2690 if (pointer_set_contains (statements, bsi_stmt (bsi)))
2692 tree old_stmt = bsi_stmt (bsi);
2693 if (fold_stmt (bsi_stmt_ptr (bsi)))
2695 update_stmt (bsi_stmt (bsi));
2696 if (maybe_clean_or_replace_eh_stmt (old_stmt, bsi_stmt (bsi)))
2697 tree_purge_dead_eh_edges (BASIC_BLOCK (first));
2703 /* Return true if BB has at least one abnormal outgoing edge. */
2705 static inline bool
2706 has_abnormal_outgoing_edge_p (basic_block bb)
2708 edge e;
2709 edge_iterator ei;
2711 FOR_EACH_EDGE (e, ei, bb->succs)
2712 if (e->flags & EDGE_ABNORMAL)
2713 return true;
2715 return false;
2718 /* When a block from the inlined function contains a call with side-effects
2719 in the middle gets inlined in a function with non-locals labels, the call
2720 becomes a potential non-local goto so we need to add appropriate edge. */
2722 static void
2723 make_nonlocal_label_edges (void)
2725 block_stmt_iterator bsi;
2726 basic_block bb;
2728 FOR_EACH_BB (bb)
2730 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2732 tree stmt = bsi_stmt (bsi);
2733 if (tree_can_make_abnormal_goto (stmt))
2735 if (stmt == bsi_stmt (bsi_last (bb)))
2737 if (!has_abnormal_outgoing_edge_p (bb))
2738 make_abnormal_goto_edges (bb, true);
2740 else
2742 edge e = split_block (bb, stmt);
2743 bb = e->src;
2744 make_abnormal_goto_edges (bb, true);
2746 break;
2749 /* Update PHIs on nonlocal goto receivers we (possibly)
2750 just created new edges into. */
2751 if (TREE_CODE (stmt) == LABEL_EXPR
2752 && gimple_in_ssa_p (cfun))
2754 tree target = LABEL_EXPR_LABEL (stmt);
2755 if (DECL_NONLOCAL (target))
2757 tree phi;
2759 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2761 gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
2762 (PHI_RESULT (phi)));
2763 mark_sym_for_renaming
2764 (SSA_NAME_VAR (PHI_RESULT (phi)));
2772 /* Expand calls to inline functions in the body of FN. */
2774 unsigned int
2775 optimize_inline_calls (tree fn)
2777 copy_body_data id;
2778 tree prev_fn;
2779 basic_block bb;
2780 int last = n_basic_blocks;
2781 /* There is no point in performing inlining if errors have already
2782 occurred -- and we might crash if we try to inline invalid
2783 code. */
2784 if (errorcount || sorrycount)
2785 return 0;
2787 /* Clear out ID. */
2788 memset (&id, 0, sizeof (id));
2790 id.src_node = id.dst_node = cgraph_node (fn);
2791 id.dst_fn = fn;
2792 /* Or any functions that aren't finished yet. */
2793 prev_fn = NULL_TREE;
2794 if (current_function_decl)
2796 id.dst_fn = current_function_decl;
2797 prev_fn = current_function_decl;
2800 id.copy_decl = copy_decl_maybe_to_var;
2801 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2802 id.transform_new_cfg = false;
2803 id.transform_return_to_modify = true;
2804 id.transform_lang_insert_block = false;
2805 id.statements_to_fold = pointer_set_create ();
2807 push_gimplify_context ();
2809 /* Reach the trees by walking over the CFG, and note the
2810 enclosing basic-blocks in the call edges. */
2811 /* We walk the blocks going forward, because inlined function bodies
2812 will split id->current_basic_block, and the new blocks will
2813 follow it; we'll trudge through them, processing their CALL_EXPRs
2814 along the way. */
2815 FOR_EACH_BB (bb)
2816 gimple_expand_calls_inline (bb, &id);
2818 pop_gimplify_context (NULL);
2819 /* Renumber the (code) basic_blocks consecutively. */
2820 compact_blocks ();
2821 /* Renumber the lexical scoping (non-code) blocks consecutively. */
2822 number_blocks (fn);
2824 #ifdef ENABLE_CHECKING
2826 struct cgraph_edge *e;
2828 verify_cgraph_node (id.dst_node);
2830 /* Double check that we inlined everything we are supposed to inline. */
2831 for (e = id.dst_node->callees; e; e = e->next_callee)
2832 gcc_assert (e->inline_failed);
2834 #endif
2836 /* We are not going to maintain the cgraph edges up to date.
2837 Kill it so it won't confuse us. */
2838 cgraph_node_remove_callees (id.dst_node);
2840 fold_marked_statements (last, id.statements_to_fold);
2841 pointer_set_destroy (id.statements_to_fold);
2842 fold_cond_expr_cond ();
2843 if (current_function_has_nonlocal_label)
2844 make_nonlocal_label_edges ();
2845 /* We make no attempts to keep dominance info up-to-date. */
2846 free_dominance_info (CDI_DOMINATORS);
2847 free_dominance_info (CDI_POST_DOMINATORS);
2848 /* It would be nice to check SSA/CFG/statement consistency here, but it is
2849 not possible yet - the IPA passes might make various functions to not
2850 throw and they don't care to proactively update local EH info. This is
2851 done later in fixup_cfg pass that also execute the verification. */
2852 return (TODO_update_ssa | TODO_cleanup_cfg
2853 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
2854 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
2857 /* FN is a function that has a complete body, and CLONE is a function whose
2858 body is to be set to a copy of FN, mapping argument declarations according
2859 to the ARG_MAP splay_tree. */
2861 void
2862 clone_body (tree clone, tree fn, void *arg_map)
2864 copy_body_data id;
2866 /* Clone the body, as if we were making an inline call. But, remap the
2867 parameters in the callee to the parameters of caller. */
2868 memset (&id, 0, sizeof (id));
2869 id.src_fn = fn;
2870 id.dst_fn = clone;
2871 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
2872 id.decl_map = (splay_tree)arg_map;
2874 id.copy_decl = copy_decl_no_change;
2875 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2876 id.transform_new_cfg = true;
2877 id.transform_return_to_modify = false;
2878 id.transform_lang_insert_block = true;
2880 /* We're not inside any EH region. */
2881 id.eh_region = -1;
2883 /* Actually copy the body. */
2884 append_to_statement_list_force (copy_generic_body (&id), &DECL_SAVED_TREE (clone));
2887 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2889 tree
2890 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2892 enum tree_code code = TREE_CODE (*tp);
2893 enum tree_code_class cl = TREE_CODE_CLASS (code);
2895 /* We make copies of most nodes. */
2896 if (IS_EXPR_CODE_CLASS (cl)
2897 || IS_GIMPLE_STMT_CODE_CLASS (cl)
2898 || code == TREE_LIST
2899 || code == TREE_VEC
2900 || code == TYPE_DECL
2901 || code == OMP_CLAUSE)
2903 /* Because the chain gets clobbered when we make a copy, we save it
2904 here. */
2905 tree chain = NULL_TREE, new;
2907 if (!GIMPLE_TUPLE_P (*tp))
2908 chain = TREE_CHAIN (*tp);
2910 /* Copy the node. */
2911 new = copy_node (*tp);
2913 /* Propagate mudflap marked-ness. */
2914 if (flag_mudflap && mf_marked_p (*tp))
2915 mf_mark (new);
2917 *tp = new;
2919 /* Now, restore the chain, if appropriate. That will cause
2920 walk_tree to walk into the chain as well. */
2921 if (code == PARM_DECL
2922 || code == TREE_LIST
2923 || code == OMP_CLAUSE)
2924 TREE_CHAIN (*tp) = chain;
2926 /* For now, we don't update BLOCKs when we make copies. So, we
2927 have to nullify all BIND_EXPRs. */
2928 if (TREE_CODE (*tp) == BIND_EXPR)
2929 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2931 else if (code == CONSTRUCTOR)
2933 /* CONSTRUCTOR nodes need special handling because
2934 we need to duplicate the vector of elements. */
2935 tree new;
2937 new = copy_node (*tp);
2939 /* Propagate mudflap marked-ness. */
2940 if (flag_mudflap && mf_marked_p (*tp))
2941 mf_mark (new);
2943 CONSTRUCTOR_ELTS (new) = VEC_copy (constructor_elt, gc,
2944 CONSTRUCTOR_ELTS (*tp));
2945 *tp = new;
2947 else if (TREE_CODE_CLASS (code) == tcc_type)
2948 *walk_subtrees = 0;
2949 else if (TREE_CODE_CLASS (code) == tcc_declaration)
2950 *walk_subtrees = 0;
2951 else if (TREE_CODE_CLASS (code) == tcc_constant)
2952 *walk_subtrees = 0;
2953 else
2954 gcc_assert (code != STATEMENT_LIST);
2955 return NULL_TREE;
2958 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2959 information indicating to what new SAVE_EXPR this one should be mapped,
2960 use that one. Otherwise, create a new node and enter it in ST. FN is
2961 the function into which the copy will be placed. */
2963 static void
2964 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2966 splay_tree st = (splay_tree) st_;
2967 splay_tree_node n;
2968 tree t;
2970 /* See if we already encountered this SAVE_EXPR. */
2971 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2973 /* If we didn't already remap this SAVE_EXPR, do so now. */
2974 if (!n)
2976 t = copy_node (*tp);
2978 /* Remember this SAVE_EXPR. */
2979 splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2980 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2981 splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2983 else
2985 /* We've already walked into this SAVE_EXPR; don't do it again. */
2986 *walk_subtrees = 0;
2987 t = (tree) n->value;
2990 /* Replace this SAVE_EXPR with the copy. */
2991 *tp = t;
2994 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
2995 copies the declaration and enters it in the splay_tree in DATA (which is
2996 really an `copy_body_data *'). */
2998 static tree
2999 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
3000 void *data)
3002 copy_body_data *id = (copy_body_data *) data;
3004 /* Don't walk into types. */
3005 if (TYPE_P (*tp))
3006 *walk_subtrees = 0;
3008 else if (TREE_CODE (*tp) == LABEL_EXPR)
3010 tree decl = TREE_OPERAND (*tp, 0);
3012 /* Copy the decl and remember the copy. */
3013 insert_decl_map (id, decl, id->copy_decl (decl, id));
3016 return NULL_TREE;
3019 /* Perform any modifications to EXPR required when it is unsaved. Does
3020 not recurse into EXPR's subtrees. */
3022 static void
3023 unsave_expr_1 (tree expr)
3025 switch (TREE_CODE (expr))
3027 case TARGET_EXPR:
3028 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
3029 It's OK for this to happen if it was part of a subtree that
3030 isn't immediately expanded, such as operand 2 of another
3031 TARGET_EXPR. */
3032 if (TREE_OPERAND (expr, 1))
3033 break;
3035 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
3036 TREE_OPERAND (expr, 3) = NULL_TREE;
3037 break;
3039 default:
3040 break;
3044 /* Called via walk_tree when an expression is unsaved. Using the
3045 splay_tree pointed to by ST (which is really a `splay_tree'),
3046 remaps all local declarations to appropriate replacements. */
3048 static tree
3049 unsave_r (tree *tp, int *walk_subtrees, void *data)
3051 copy_body_data *id = (copy_body_data *) data;
3052 splay_tree st = id->decl_map;
3053 splay_tree_node n;
3055 /* Only a local declaration (variable or label). */
3056 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
3057 || TREE_CODE (*tp) == LABEL_DECL)
3059 /* Lookup the declaration. */
3060 n = splay_tree_lookup (st, (splay_tree_key) *tp);
3062 /* If it's there, remap it. */
3063 if (n)
3064 *tp = (tree) n->value;
3067 else if (TREE_CODE (*tp) == STATEMENT_LIST)
3068 copy_statement_list (tp);
3069 else if (TREE_CODE (*tp) == BIND_EXPR)
3070 copy_bind_expr (tp, walk_subtrees, id);
3071 else if (TREE_CODE (*tp) == SAVE_EXPR)
3072 remap_save_expr (tp, st, walk_subtrees);
3073 else
3075 copy_tree_r (tp, walk_subtrees, NULL);
3077 /* Do whatever unsaving is required. */
3078 unsave_expr_1 (*tp);
3081 /* Keep iterating. */
3082 return NULL_TREE;
3085 /* Copies everything in EXPR and replaces variables, labels
3086 and SAVE_EXPRs local to EXPR. */
3088 tree
3089 unsave_expr_now (tree expr)
3091 copy_body_data id;
3093 /* There's nothing to do for NULL_TREE. */
3094 if (expr == 0)
3095 return expr;
3097 /* Set up ID. */
3098 memset (&id, 0, sizeof (id));
3099 id.src_fn = current_function_decl;
3100 id.dst_fn = current_function_decl;
3101 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
3103 id.copy_decl = copy_decl_no_change;
3104 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3105 id.transform_new_cfg = false;
3106 id.transform_return_to_modify = false;
3107 id.transform_lang_insert_block = false;
3109 /* Walk the tree once to find local labels. */
3110 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
3112 /* Walk the tree again, copying, remapping, and unsaving. */
3113 walk_tree (&expr, unsave_r, &id, NULL);
3115 /* Clean up. */
3116 splay_tree_delete (id.decl_map);
3118 return expr;
3121 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
3123 static tree
3124 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
3126 if (*tp == data)
3127 return (tree) data;
3128 else
3129 return NULL;
3132 bool
3133 debug_find_tree (tree top, tree search)
3135 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
3139 /* Declare the variables created by the inliner. Add all the variables in
3140 VARS to BIND_EXPR. */
3142 static void
3143 declare_inline_vars (tree block, tree vars)
3145 tree t;
3146 for (t = vars; t; t = TREE_CHAIN (t))
3148 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
3149 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
3150 cfun->unexpanded_var_list =
3151 tree_cons (NULL_TREE, t,
3152 cfun->unexpanded_var_list);
3155 if (block)
3156 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
3160 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
3161 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
3162 VAR_DECL translation. */
3164 static tree
3165 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
3167 /* Don't generate debug information for the copy if we wouldn't have
3168 generated it for the copy either. */
3169 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
3170 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
3172 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
3173 declaration inspired this copy. */
3174 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
3176 /* The new variable/label has no RTL, yet. */
3177 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
3178 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
3179 SET_DECL_RTL (copy, NULL_RTX);
3181 /* These args would always appear unused, if not for this. */
3182 TREE_USED (copy) = 1;
3184 /* Set the context for the new declaration. */
3185 if (!DECL_CONTEXT (decl))
3186 /* Globals stay global. */
3188 else if (DECL_CONTEXT (decl) != id->src_fn)
3189 /* Things that weren't in the scope of the function we're inlining
3190 from aren't in the scope we're inlining to, either. */
3192 else if (TREE_STATIC (decl))
3193 /* Function-scoped static variables should stay in the original
3194 function. */
3196 else
3197 /* Ordinary automatic local variables are now in the scope of the
3198 new function. */
3199 DECL_CONTEXT (copy) = id->dst_fn;
3201 return copy;
3204 static tree
3205 copy_decl_to_var (tree decl, copy_body_data *id)
3207 tree copy, type;
3209 gcc_assert (TREE_CODE (decl) == PARM_DECL
3210 || TREE_CODE (decl) == RESULT_DECL);
3212 type = TREE_TYPE (decl);
3214 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
3215 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
3216 TREE_READONLY (copy) = TREE_READONLY (decl);
3217 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
3218 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
3220 return copy_decl_for_dup_finish (id, decl, copy);
3223 /* Like copy_decl_to_var, but create a return slot object instead of a
3224 pointer variable for return by invisible reference. */
3226 static tree
3227 copy_result_decl_to_var (tree decl, copy_body_data *id)
3229 tree copy, type;
3231 gcc_assert (TREE_CODE (decl) == PARM_DECL
3232 || TREE_CODE (decl) == RESULT_DECL);
3234 type = TREE_TYPE (decl);
3235 if (DECL_BY_REFERENCE (decl))
3236 type = TREE_TYPE (type);
3238 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
3239 TREE_READONLY (copy) = TREE_READONLY (decl);
3240 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
3241 if (!DECL_BY_REFERENCE (decl))
3243 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
3244 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
3247 return copy_decl_for_dup_finish (id, decl, copy);
3251 static tree
3252 copy_decl_no_change (tree decl, copy_body_data *id)
3254 tree copy;
3256 copy = copy_node (decl);
3258 /* The COPY is not abstract; it will be generated in DST_FN. */
3259 DECL_ABSTRACT (copy) = 0;
3260 lang_hooks.dup_lang_specific_decl (copy);
3262 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
3263 been taken; it's for internal bookkeeping in expand_goto_internal. */
3264 if (TREE_CODE (copy) == LABEL_DECL)
3266 TREE_ADDRESSABLE (copy) = 0;
3267 LABEL_DECL_UID (copy) = -1;
3270 return copy_decl_for_dup_finish (id, decl, copy);
3273 static tree
3274 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
3276 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
3277 return copy_decl_to_var (decl, id);
3278 else
3279 return copy_decl_no_change (decl, id);
3282 /* Return a copy of the function's argument tree. */
3283 static tree
3284 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id)
3286 tree *arg_copy, *parg;
3288 arg_copy = &orig_parm;
3289 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
3291 tree new = remap_decl (*parg, id);
3292 lang_hooks.dup_lang_specific_decl (new);
3293 TREE_CHAIN (new) = TREE_CHAIN (*parg);
3294 *parg = new;
3296 return orig_parm;
3299 /* Return a copy of the function's static chain. */
3300 static tree
3301 copy_static_chain (tree static_chain, copy_body_data * id)
3303 tree *chain_copy, *pvar;
3305 chain_copy = &static_chain;
3306 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
3308 tree new = remap_decl (*pvar, id);
3309 lang_hooks.dup_lang_specific_decl (new);
3310 TREE_CHAIN (new) = TREE_CHAIN (*pvar);
3311 *pvar = new;
3313 return static_chain;
3316 /* Return true if the function is allowed to be versioned.
3317 This is a guard for the versioning functionality. */
3318 bool
3319 tree_versionable_function_p (tree fndecl)
3321 if (fndecl == NULL_TREE)
3322 return false;
3323 /* ??? There are cases where a function is
3324 uninlinable but can be versioned. */
3325 if (!tree_inlinable_function_p (fndecl))
3326 return false;
3328 return true;
3331 /* Create a copy of a function's tree.
3332 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
3333 of the original function and the new copied function
3334 respectively. In case we want to replace a DECL
3335 tree with another tree while duplicating the function's
3336 body, TREE_MAP represents the mapping between these
3337 trees. If UPDATE_CLONES is set, the call_stmt fields
3338 of edges of clones of the function will be updated. */
3339 void
3340 tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
3341 bool update_clones)
3343 struct cgraph_node *old_version_node;
3344 struct cgraph_node *new_version_node;
3345 copy_body_data id;
3346 tree p;
3347 unsigned i;
3348 struct ipa_replace_map *replace_info;
3349 basic_block old_entry_block;
3350 tree t_step;
3351 tree old_current_function_decl = current_function_decl;
3353 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
3354 && TREE_CODE (new_decl) == FUNCTION_DECL);
3355 DECL_POSSIBLY_INLINED (old_decl) = 1;
3357 old_version_node = cgraph_node (old_decl);
3358 new_version_node = cgraph_node (new_decl);
3360 DECL_ARTIFICIAL (new_decl) = 1;
3361 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
3363 /* Prepare the data structures for the tree copy. */
3364 memset (&id, 0, sizeof (id));
3366 /* Generate a new name for the new version. */
3367 if (!update_clones)
3369 DECL_NAME (new_decl) = create_tmp_var_name (NULL);
3370 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
3371 SET_DECL_RTL (new_decl, NULL_RTX);
3372 id.statements_to_fold = pointer_set_create ();
3375 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
3376 id.src_fn = old_decl;
3377 id.dst_fn = new_decl;
3378 id.src_node = old_version_node;
3379 id.dst_node = new_version_node;
3380 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
3382 id.copy_decl = copy_decl_no_change;
3383 id.transform_call_graph_edges
3384 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
3385 id.transform_new_cfg = true;
3386 id.transform_return_to_modify = false;
3387 id.transform_lang_insert_block = false;
3389 current_function_decl = new_decl;
3390 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
3391 (DECL_STRUCT_FUNCTION (old_decl));
3392 initialize_cfun (new_decl, old_decl,
3393 old_entry_block->count,
3394 old_entry_block->frequency);
3395 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
3397 /* Copy the function's static chain. */
3398 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
3399 if (p)
3400 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
3401 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
3402 &id);
3403 /* Copy the function's arguments. */
3404 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
3405 DECL_ARGUMENTS (new_decl) =
3406 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id);
3408 /* If there's a tree_map, prepare for substitution. */
3409 if (tree_map)
3410 for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
3412 replace_info = VARRAY_GENERIC_PTR (tree_map, i);
3413 if (replace_info->replace_p)
3414 insert_decl_map (&id, replace_info->old_tree,
3415 replace_info->new_tree);
3418 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
3420 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3421 number_blocks (id.dst_fn);
3423 if (DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list != NULL_TREE)
3424 /* Add local vars. */
3425 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list;
3426 t_step; t_step = TREE_CHAIN (t_step))
3428 tree var = TREE_VALUE (t_step);
3429 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3430 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
3431 cfun->unexpanded_var_list);
3432 else
3433 cfun->unexpanded_var_list =
3434 tree_cons (NULL_TREE, remap_decl (var, &id),
3435 cfun->unexpanded_var_list);
3438 /* Copy the Function's body. */
3439 copy_body (&id, old_entry_block->count, old_entry_block->frequency, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
3441 if (DECL_RESULT (old_decl) != NULL_TREE)
3443 tree *res_decl = &DECL_RESULT (old_decl);
3444 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
3445 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
3448 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3449 number_blocks (new_decl);
3451 /* Clean up. */
3452 splay_tree_delete (id.decl_map);
3453 if (!update_clones)
3455 fold_marked_statements (0, id.statements_to_fold);
3456 pointer_set_destroy (id.statements_to_fold);
3457 fold_cond_expr_cond ();
3459 if (gimple_in_ssa_p (cfun))
3461 free_dominance_info (CDI_DOMINATORS);
3462 free_dominance_info (CDI_POST_DOMINATORS);
3463 if (!update_clones)
3464 delete_unreachable_blocks ();
3465 update_ssa (TODO_update_ssa);
3466 if (!update_clones)
3468 fold_cond_expr_cond ();
3469 if (need_ssa_update_p ())
3470 update_ssa (TODO_update_ssa);
3473 free_dominance_info (CDI_DOMINATORS);
3474 free_dominance_info (CDI_POST_DOMINATORS);
3475 pop_cfun ();
3476 current_function_decl = old_current_function_decl;
3477 gcc_assert (!current_function_decl
3478 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
3479 return;
3482 /* Duplicate a type, fields and all. */
3484 tree
3485 build_duplicate_type (tree type)
3487 struct copy_body_data id;
3489 memset (&id, 0, sizeof (id));
3490 id.src_fn = current_function_decl;
3491 id.dst_fn = current_function_decl;
3492 id.src_cfun = cfun;
3493 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
3495 type = remap_type_1 (type, &id);
3497 splay_tree_delete (id.decl_map);
3499 return type;