Makefile.in: Add dummy "install-info" target.
[official-gcc.git] / gcc / tree-inline.c
blob966da8449f669c3bbec064333adf36f452a6c56d
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "tree.h"
29 #include "tree-inline.h"
30 #include "rtl.h"
31 #include "expr.h"
32 #include "flags.h"
33 #include "params.h"
34 #include "input.h"
35 #include "insn-config.h"
36 #include "varray.h"
37 #include "hashtab.h"
38 #include "langhooks.h"
39 #include "basic-block.h"
40 #include "tree-iterator.h"
41 #include "cgraph.h"
42 #include "intl.h"
43 #include "tree-mudflap.h"
44 #include "tree-flow.h"
45 #include "function.h"
46 #include "ggc.h"
47 #include "tree-flow.h"
48 #include "diagnostic.h"
49 #include "except.h"
50 #include "debug.h"
51 #include "pointer-set.h"
52 #include "ipa-prop.h"
53 #include "value-prof.h"
54 #include "tree-pass.h"
56 /* I'm not real happy about this, but we need to handle gimple and
57 non-gimple trees. */
58 #include "tree-gimple.h"
60 /* Inlining, Cloning, Versioning, Parallelization
62 Inlining: a function body is duplicated, but the PARM_DECLs are
63 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
64 GIMPLE_MODIFY_STMTs that store to a dedicated returned-value variable.
65 The duplicated eh_region info of the copy will later be appended
66 to the info for the caller; the eh_region info in copied throwing
67 statements and RESX_EXPRs is adjusted accordingly.
69 Cloning: (only in C++) We have one body for a con/de/structor, and
70 multiple function decls, each with a unique parameter list.
71 Duplicate the body, using the given splay tree; some parameters
72 will become constants (like 0 or 1).
74 Versioning: a function body is duplicated and the result is a new
75 function rather than into blocks of an existing function as with
76 inlining. Some parameters will become constants.
78 Parallelization: a region of a function is duplicated resulting in
79 a new function. Variables may be replaced with complex expressions
80 to enable shared variable semantics.
82 All of these will simultaneously lookup any callgraph edges. If
83 we're going to inline the duplicated function body, and the given
84 function has some cloned callgraph nodes (one for each place this
85 function will be inlined) those callgraph edges will be duplicated.
86 If we're cloning the body, those callgraph edges will be
87 updated to point into the new body. (Note that the original
88 callgraph node and edge list will not be altered.)
90 See the CALL_EXPR handling case in copy_body_r (). */
92 /* 0 if we should not perform inlining.
93 1 if we should expand functions calls inline at the tree level.
94 2 if we should consider *all* functions to be inline
95 candidates. */
97 int flag_inline_trees = 0;
99 /* To Do:
101 o In order to make inlining-on-trees work, we pessimized
102 function-local static constants. In particular, they are now
103 always output, even when not addressed. Fix this by treating
104 function-local static constants just like global static
105 constants; the back-end already knows not to output them if they
106 are not needed.
108 o Provide heuristics to clamp inlining of recursive template
109 calls? */
112 /* Weights that estimate_num_insns uses for heuristics in inlining. */
114 eni_weights eni_inlining_weights;
116 /* Weights that estimate_num_insns uses to estimate the size of the
117 produced code. */
119 eni_weights eni_size_weights;
121 /* Weights that estimate_num_insns uses to estimate the time necessary
122 to execute the produced code. */
124 eni_weights eni_time_weights;
126 /* Prototypes. */
128 static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
129 static tree copy_generic_body (copy_body_data *);
130 static bool inlinable_function_p (tree);
131 static void remap_block (tree *, copy_body_data *);
132 static tree remap_decls (tree, copy_body_data *);
133 static void copy_bind_expr (tree *, int *, copy_body_data *);
134 static tree mark_local_for_remap_r (tree *, int *, void *);
135 static void unsave_expr_1 (tree);
136 static tree unsave_r (tree *, int *, void *);
137 static void declare_inline_vars (tree, tree);
138 static void remap_save_expr (tree *, void *, int *);
139 static void add_lexical_block (tree current_block, tree new_block);
140 static tree copy_decl_to_var (tree, copy_body_data *);
141 static tree copy_result_decl_to_var (tree, copy_body_data *);
142 static tree copy_decl_no_change (tree, copy_body_data *);
143 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
145 /* Insert a tree->tree mapping for ID. Despite the name suggests
146 that the trees should be variables, it is used for more than that. */
148 void
149 insert_decl_map (copy_body_data *id, tree key, tree value)
151 splay_tree_insert (id->decl_map, (splay_tree_key) key,
152 (splay_tree_value) value);
154 /* Always insert an identity map as well. If we see this same new
155 node again, we won't want to duplicate it a second time. */
156 if (key != value)
157 splay_tree_insert (id->decl_map, (splay_tree_key) value,
158 (splay_tree_value) value);
161 /* Construct new SSA name for old NAME. ID is the inline context. */
163 static tree
164 remap_ssa_name (tree name, copy_body_data *id)
166 tree new;
167 splay_tree_node n;
169 gcc_assert (TREE_CODE (name) == SSA_NAME);
171 n = splay_tree_lookup (id->decl_map, (splay_tree_key) name);
172 if (n)
173 return (tree) n->value;
175 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
176 in copy_bb. */
177 new = remap_decl (SSA_NAME_VAR (name), id);
178 /* We might've substituted constant or another SSA_NAME for
179 the variable.
181 Replace the SSA name representing RESULT_DECL by variable during
182 inlining: this saves us from need to introduce PHI node in a case
183 return value is just partly initialized. */
184 if ((TREE_CODE (new) == VAR_DECL || TREE_CODE (new) == PARM_DECL)
185 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
186 || !id->transform_return_to_modify))
188 new = make_ssa_name (new, NULL);
189 insert_decl_map (id, name, new);
190 if (IS_EMPTY_STMT (SSA_NAME_DEF_STMT (name)))
192 SSA_NAME_DEF_STMT (new) = build_empty_stmt ();
193 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name)) == name)
194 set_default_def (SSA_NAME_VAR (new), new);
196 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new)
197 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
198 TREE_TYPE (new) = TREE_TYPE (SSA_NAME_VAR (new));
200 else
201 insert_decl_map (id, name, new);
202 return new;
205 /* Remap DECL during the copying of the BLOCK tree for the function. */
207 tree
208 remap_decl (tree decl, copy_body_data *id)
210 splay_tree_node n;
211 tree fn;
213 /* We only remap local variables in the current function. */
214 fn = id->src_fn;
216 /* See if we have remapped this declaration. */
218 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
220 /* If we didn't already have an equivalent for this declaration,
221 create one now. */
222 if (!n)
224 /* Make a copy of the variable or label. */
225 tree t = id->copy_decl (decl, id);
227 /* Remember it, so that if we encounter this local entity again
228 we can reuse this copy. Do this early because remap_type may
229 need this decl for TYPE_STUB_DECL. */
230 insert_decl_map (id, decl, t);
232 if (!DECL_P (t))
233 return t;
235 /* Remap types, if necessary. */
236 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
237 if (TREE_CODE (t) == TYPE_DECL)
238 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
240 /* Remap sizes as necessary. */
241 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
242 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
244 /* If fields, do likewise for offset and qualifier. */
245 if (TREE_CODE (t) == FIELD_DECL)
247 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
248 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
249 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
252 if (cfun && gimple_in_ssa_p (cfun)
253 && (TREE_CODE (t) == VAR_DECL
254 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
256 tree def = gimple_default_def (id->src_cfun, decl);
257 get_var_ann (t);
258 if (TREE_CODE (decl) != PARM_DECL && def)
260 tree map = remap_ssa_name (def, id);
261 /* Watch out RESULT_DECLs whose SSA names map directly
262 to them. */
263 if (TREE_CODE (map) == SSA_NAME)
264 set_default_def (t, map);
266 add_referenced_var (t);
268 return t;
271 return unshare_expr ((tree) n->value);
274 static tree
275 remap_type_1 (tree type, copy_body_data *id)
277 splay_tree_node node;
278 tree new, t;
280 if (type == NULL)
281 return type;
283 /* See if we have remapped this type. */
284 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
285 if (node)
286 return (tree) node->value;
288 /* The type only needs remapping if it's variably modified. */
289 if (! variably_modified_type_p (type, id->src_fn))
291 insert_decl_map (id, type, type);
292 return type;
295 /* We do need a copy. build and register it now. If this is a pointer or
296 reference type, remap the designated type and make a new pointer or
297 reference type. */
298 if (TREE_CODE (type) == POINTER_TYPE)
300 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
301 TYPE_MODE (type),
302 TYPE_REF_CAN_ALIAS_ALL (type));
303 insert_decl_map (id, type, new);
304 return new;
306 else if (TREE_CODE (type) == REFERENCE_TYPE)
308 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
309 TYPE_MODE (type),
310 TYPE_REF_CAN_ALIAS_ALL (type));
311 insert_decl_map (id, type, new);
312 return new;
314 else
315 new = copy_node (type);
317 insert_decl_map (id, type, new);
319 /* This is a new type, not a copy of an old type. Need to reassociate
320 variants. We can handle everything except the main variant lazily. */
321 t = TYPE_MAIN_VARIANT (type);
322 if (type != t)
324 t = remap_type (t, id);
325 TYPE_MAIN_VARIANT (new) = t;
326 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
327 TYPE_NEXT_VARIANT (t) = new;
329 else
331 TYPE_MAIN_VARIANT (new) = new;
332 TYPE_NEXT_VARIANT (new) = NULL;
335 if (TYPE_STUB_DECL (type))
336 TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
338 /* Lazily create pointer and reference types. */
339 TYPE_POINTER_TO (new) = NULL;
340 TYPE_REFERENCE_TO (new) = NULL;
342 switch (TREE_CODE (new))
344 case INTEGER_TYPE:
345 case REAL_TYPE:
346 case ENUMERAL_TYPE:
347 case BOOLEAN_TYPE:
348 t = TYPE_MIN_VALUE (new);
349 if (t && TREE_CODE (t) != INTEGER_CST)
350 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
352 t = TYPE_MAX_VALUE (new);
353 if (t && TREE_CODE (t) != INTEGER_CST)
354 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
355 return new;
357 case FUNCTION_TYPE:
358 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
359 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
360 return new;
362 case ARRAY_TYPE:
363 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
364 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
365 break;
367 case RECORD_TYPE:
368 case UNION_TYPE:
369 case QUAL_UNION_TYPE:
371 tree f, nf = NULL;
373 for (f = TYPE_FIELDS (new); f ; f = TREE_CHAIN (f))
375 t = remap_decl (f, id);
376 DECL_CONTEXT (t) = new;
377 TREE_CHAIN (t) = nf;
378 nf = t;
380 TYPE_FIELDS (new) = nreverse (nf);
382 break;
384 case OFFSET_TYPE:
385 default:
386 /* Shouldn't have been thought variable sized. */
387 gcc_unreachable ();
390 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
391 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
393 return new;
396 tree
397 remap_type (tree type, copy_body_data *id)
399 splay_tree_node node;
401 if (type == NULL)
402 return type;
404 /* See if we have remapped this type. */
405 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
406 if (node)
407 return (tree) node->value;
409 /* The type only needs remapping if it's variably modified. */
410 if (! variably_modified_type_p (type, id->src_fn))
412 insert_decl_map (id, type, type);
413 return type;
416 return remap_type_1 (type, id);
419 static tree
420 remap_decls (tree decls, copy_body_data *id)
422 tree old_var;
423 tree new_decls = NULL_TREE;
425 /* Remap its variables. */
426 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
428 tree new_var;
430 /* We can not chain the local static declarations into the unexpanded_var_list
431 as we can't duplicate them or break one decl rule. Go ahead and link
432 them into unexpanded_var_list. */
433 if (!lang_hooks.tree_inlining.auto_var_in_fn_p (old_var, id->src_fn)
434 && !DECL_EXTERNAL (old_var))
436 cfun->unexpanded_var_list = tree_cons (NULL_TREE, old_var,
437 cfun->unexpanded_var_list);
438 continue;
441 /* Remap the variable. */
442 new_var = remap_decl (old_var, id);
444 /* If we didn't remap this variable, so we can't mess with its
445 TREE_CHAIN. If we remapped this variable to the return slot, it's
446 already declared somewhere else, so don't declare it here. */
447 if (!new_var || new_var == id->retvar)
449 else
451 gcc_assert (DECL_P (new_var));
452 TREE_CHAIN (new_var) = new_decls;
453 new_decls = new_var;
457 return nreverse (new_decls);
460 /* Copy the BLOCK to contain remapped versions of the variables
461 therein. And hook the new block into the block-tree. */
463 static void
464 remap_block (tree *block, copy_body_data *id)
466 tree old_block;
467 tree new_block;
468 tree fn;
470 /* Make the new block. */
471 old_block = *block;
472 new_block = make_node (BLOCK);
473 TREE_USED (new_block) = TREE_USED (old_block);
474 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
475 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
476 *block = new_block;
478 /* Remap its variables. */
479 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
481 fn = id->dst_fn;
483 if (id->transform_lang_insert_block)
484 lang_hooks.decls.insert_block (new_block);
486 /* Remember the remapped block. */
487 insert_decl_map (id, old_block, new_block);
490 /* Copy the whole block tree and root it in id->block. */
491 static tree
492 remap_blocks (tree block, copy_body_data *id)
494 tree t;
495 tree new = block;
497 if (!block)
498 return NULL;
500 remap_block (&new, id);
501 gcc_assert (new != block);
502 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
503 add_lexical_block (new, remap_blocks (t, id));
504 return new;
507 static void
508 copy_statement_list (tree *tp)
510 tree_stmt_iterator oi, ni;
511 tree new;
513 new = alloc_stmt_list ();
514 ni = tsi_start (new);
515 oi = tsi_start (*tp);
516 *tp = new;
518 for (; !tsi_end_p (oi); tsi_next (&oi))
519 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
522 static void
523 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
525 tree block = BIND_EXPR_BLOCK (*tp);
526 /* Copy (and replace) the statement. */
527 copy_tree_r (tp, walk_subtrees, NULL);
528 if (block)
530 remap_block (&block, id);
531 BIND_EXPR_BLOCK (*tp) = block;
534 if (BIND_EXPR_VARS (*tp))
535 /* This will remap a lot of the same decls again, but this should be
536 harmless. */
537 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
540 /* Called from copy_body_id via walk_tree. DATA is really an
541 `copy_body_data *'. */
543 tree
544 copy_body_r (tree *tp, int *walk_subtrees, void *data)
546 copy_body_data *id = (copy_body_data *) data;
547 tree fn = id->src_fn;
548 tree new_block;
550 /* Begin by recognizing trees that we'll completely rewrite for the
551 inlining context. Our output for these trees is completely
552 different from out input (e.g. RETURN_EXPR is deleted, and morphs
553 into an edge). Further down, we'll handle trees that get
554 duplicated and/or tweaked. */
556 /* When requested, RETURN_EXPRs should be transformed to just the
557 contained GIMPLE_MODIFY_STMT. The branch semantics of the return will
558 be handled elsewhere by manipulating the CFG rather than a statement. */
559 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
561 tree assignment = TREE_OPERAND (*tp, 0);
563 /* If we're returning something, just turn that into an
564 assignment into the equivalent of the original RESULT_DECL.
565 If the "assignment" is just the result decl, the result
566 decl has already been set (e.g. a recent "foo (&result_decl,
567 ...)"); just toss the entire RETURN_EXPR. */
568 if (assignment && TREE_CODE (assignment) == GIMPLE_MODIFY_STMT)
570 /* Replace the RETURN_EXPR with (a copy of) the
571 GIMPLE_MODIFY_STMT hanging underneath. */
572 *tp = copy_node (assignment);
574 else /* Else the RETURN_EXPR returns no value. */
576 *tp = NULL;
577 return (tree) (void *)1;
580 else if (TREE_CODE (*tp) == SSA_NAME)
582 *tp = remap_ssa_name (*tp, id);
583 *walk_subtrees = 0;
584 return NULL;
587 /* Local variables and labels need to be replaced by equivalent
588 variables. We don't want to copy static variables; there's only
589 one of those, no matter how many times we inline the containing
590 function. Similarly for globals from an outer function. */
591 else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
593 tree new_decl;
595 /* Remap the declaration. */
596 new_decl = remap_decl (*tp, id);
597 gcc_assert (new_decl);
598 /* Replace this variable with the copy. */
599 STRIP_TYPE_NOPS (new_decl);
600 *tp = new_decl;
601 *walk_subtrees = 0;
603 else if (TREE_CODE (*tp) == STATEMENT_LIST)
604 copy_statement_list (tp);
605 else if (TREE_CODE (*tp) == SAVE_EXPR)
606 remap_save_expr (tp, id->decl_map, walk_subtrees);
607 else if (TREE_CODE (*tp) == LABEL_DECL
608 && (! DECL_CONTEXT (*tp)
609 || decl_function_context (*tp) == id->src_fn))
610 /* These may need to be remapped for EH handling. */
611 *tp = remap_decl (*tp, id);
612 else if (TREE_CODE (*tp) == BIND_EXPR)
613 copy_bind_expr (tp, walk_subtrees, id);
614 /* Types may need remapping as well. */
615 else if (TYPE_P (*tp))
616 *tp = remap_type (*tp, id);
618 /* If this is a constant, we have to copy the node iff the type will be
619 remapped. copy_tree_r will not copy a constant. */
620 else if (CONSTANT_CLASS_P (*tp))
622 tree new_type = remap_type (TREE_TYPE (*tp), id);
624 if (new_type == TREE_TYPE (*tp))
625 *walk_subtrees = 0;
627 else if (TREE_CODE (*tp) == INTEGER_CST)
628 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
629 TREE_INT_CST_HIGH (*tp));
630 else
632 *tp = copy_node (*tp);
633 TREE_TYPE (*tp) = new_type;
637 /* Otherwise, just copy the node. Note that copy_tree_r already
638 knows not to copy VAR_DECLs, etc., so this is safe. */
639 else
641 /* Here we handle trees that are not completely rewritten.
642 First we detect some inlining-induced bogosities for
643 discarding. */
644 if (TREE_CODE (*tp) == GIMPLE_MODIFY_STMT
645 && GIMPLE_STMT_OPERAND (*tp, 0) == GIMPLE_STMT_OPERAND (*tp, 1)
646 && (lang_hooks.tree_inlining.auto_var_in_fn_p
647 (GIMPLE_STMT_OPERAND (*tp, 0), fn)))
649 /* Some assignments VAR = VAR; don't generate any rtl code
650 and thus don't count as variable modification. Avoid
651 keeping bogosities like 0 = 0. */
652 tree decl = GIMPLE_STMT_OPERAND (*tp, 0), value;
653 splay_tree_node n;
655 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
656 if (n)
658 value = (tree) n->value;
659 STRIP_TYPE_NOPS (value);
660 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
662 *tp = build_empty_stmt ();
663 return copy_body_r (tp, walk_subtrees, data);
667 else if (TREE_CODE (*tp) == INDIRECT_REF)
669 /* Get rid of *& from inline substitutions that can happen when a
670 pointer argument is an ADDR_EXPR. */
671 tree decl = TREE_OPERAND (*tp, 0);
672 splay_tree_node n;
674 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
675 if (n)
677 tree new;
678 tree old;
679 /* If we happen to get an ADDR_EXPR in n->value, strip
680 it manually here as we'll eventually get ADDR_EXPRs
681 which lie about their types pointed to. In this case
682 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
683 but we absolutely rely on that. As fold_indirect_ref
684 does other useful transformations, try that first, though. */
685 tree type = TREE_TYPE (TREE_TYPE ((tree)n->value));
686 new = unshare_expr ((tree)n->value);
687 old = *tp;
688 *tp = fold_indirect_ref_1 (type, new);
689 if (! *tp)
691 if (TREE_CODE (new) == ADDR_EXPR)
692 *tp = TREE_OPERAND (new, 0);
693 else
695 *tp = build1 (INDIRECT_REF, type, new);
696 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
699 *walk_subtrees = 0;
700 return NULL;
704 /* Here is the "usual case". Copy this tree node, and then
705 tweak some special cases. */
706 copy_tree_r (tp, walk_subtrees, NULL);
708 /* Global variables we didn't seen yet needs to go into referenced
709 vars. */
710 if (gimple_in_ssa_p (cfun) && TREE_CODE (*tp) == VAR_DECL)
711 add_referenced_var (*tp);
713 /* If EXPR has block defined, map it to newly constructed block.
714 When inlining we want EXPRs without block appear in the block
715 of function call. */
716 if (EXPR_P (*tp) || GIMPLE_STMT_P (*tp))
718 new_block = id->block;
719 if (TREE_BLOCK (*tp))
721 splay_tree_node n;
722 n = splay_tree_lookup (id->decl_map,
723 (splay_tree_key) TREE_BLOCK (*tp));
724 gcc_assert (n);
725 new_block = (tree) n->value;
727 TREE_BLOCK (*tp) = new_block;
730 if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
731 TREE_OPERAND (*tp, 0) =
732 build_int_cst
733 (NULL_TREE,
734 id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
736 if (!GIMPLE_TUPLE_P (*tp))
737 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
739 /* The copied TARGET_EXPR has never been expanded, even if the
740 original node was expanded already. */
741 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
743 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
744 TREE_OPERAND (*tp, 3) = NULL_TREE;
747 /* Variable substitution need not be simple. In particular, the
748 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
749 and friends are up-to-date. */
750 else if (TREE_CODE (*tp) == ADDR_EXPR)
752 walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
753 /* Handle the case where we substituted an INDIRECT_REF
754 into the operand of the ADDR_EXPR. */
755 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
756 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
757 else
758 recompute_tree_invariant_for_addr_expr (*tp);
759 *walk_subtrees = 0;
763 /* Keep iterating. */
764 return NULL_TREE;
767 /* Copy basic block, scale profile accordingly. Edges will be taken care of
768 later */
770 static basic_block
771 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, int count_scale)
773 block_stmt_iterator bsi, copy_bsi;
774 basic_block copy_basic_block;
776 /* create_basic_block() will append every new block to
777 basic_block_info automatically. */
778 copy_basic_block = create_basic_block (NULL, (void *) 0,
779 (basic_block) bb->prev_bb->aux);
780 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
782 /* We are going to rebuild frequencies from scratch. These values have just
783 small importance to drive canonicalize_loop_headers. */
784 copy_basic_block->frequency = ((gcov_type)bb->frequency
785 * frequency_scale / REG_BR_PROB_BASE);
786 if (copy_basic_block->frequency > BB_FREQ_MAX)
787 copy_basic_block->frequency = BB_FREQ_MAX;
788 copy_bsi = bsi_start (copy_basic_block);
790 for (bsi = bsi_start (bb);
791 !bsi_end_p (bsi); bsi_next (&bsi))
793 tree stmt = bsi_stmt (bsi);
794 tree orig_stmt = stmt;
796 walk_tree (&stmt, copy_body_r, id, NULL);
798 /* RETURN_EXPR might be removed,
799 this is signalled by making stmt pointer NULL. */
800 if (stmt)
802 tree call, decl;
804 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
806 /* With return slot optimization we can end up with
807 non-gimple (foo *)&this->m, fix that here. */
808 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
809 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == NOP_EXPR
810 && !is_gimple_val (TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0)))
811 gimplify_stmt (&stmt);
813 bsi_insert_after (&copy_bsi, stmt, BSI_NEW_STMT);
815 /* Process new statement. gimplify_stmt possibly turned statement
816 into multiple statements, we need to process all of them. */
817 while (!bsi_end_p (copy_bsi))
819 stmt = bsi_stmt (copy_bsi);
820 call = get_call_expr_in (stmt);
822 /* Statements produced by inlining can be unfolded, especially
823 when we constant propagated some operands. We can't fold
824 them right now for two reasons:
825 1) folding require SSA_NAME_DEF_STMTs to be correct
826 2) we can't change function calls to builtins.
827 So we just mark statement for later folding. We mark
828 all new statements, instead just statements that has changed
829 by some nontrivial substitution so even statements made
830 foldable indirectly are updated. If this turns out to be
831 expensive, copy_body can be told to watch for nontrivial
832 changes. */
833 if (id->statements_to_fold)
834 pointer_set_insert (id->statements_to_fold, stmt);
835 /* We're duplicating a CALL_EXPR. Find any corresponding
836 callgraph edges and update or duplicate them. */
837 if (call && (decl = get_callee_fndecl (call)))
839 struct cgraph_node *node;
840 struct cgraph_edge *edge;
842 switch (id->transform_call_graph_edges)
844 case CB_CGE_DUPLICATE:
845 edge = cgraph_edge (id->src_node, orig_stmt);
846 if (edge)
847 cgraph_clone_edge (edge, id->dst_node, stmt,
848 REG_BR_PROB_BASE, 1, edge->frequency, true);
849 break;
851 case CB_CGE_MOVE_CLONES:
852 for (node = id->dst_node->next_clone;
853 node;
854 node = node->next_clone)
856 edge = cgraph_edge (node, orig_stmt);
857 gcc_assert (edge);
858 cgraph_set_call_stmt (edge, stmt);
860 /* FALLTHRU */
862 case CB_CGE_MOVE:
863 edge = cgraph_edge (id->dst_node, orig_stmt);
864 if (edge)
865 cgraph_set_call_stmt (edge, stmt);
866 break;
868 default:
869 gcc_unreachable ();
872 /* If you think we can abort here, you are wrong.
873 There is no region 0 in tree land. */
874 gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt)
875 != 0);
877 if (tree_could_throw_p (stmt)
878 /* When we are cloning for inlining, we are supposed to
879 construct a clone that calls precisely the same functions
880 as original. However IPA optimizers might've proved
881 earlier some function calls as non-trapping that might
882 render some basic blocks dead that might become
883 unreachable.
885 We can't update SSA with unreachable blocks in CFG and thus
886 we prevent the scenario by preserving even the "dead" eh
887 edges until the point they are later removed by
888 fixup_cfg pass. */
889 || (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
890 && lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) > 0))
892 int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
893 /* Add an entry for the copied tree in the EH hashtable.
894 When cloning or versioning, use the hashtable in
895 cfun, and just copy the EH number. When inlining, use the
896 hashtable in the caller, and adjust the region number. */
897 if (region > 0)
898 add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
900 /* If this tree doesn't have a region associated with it,
901 and there is a "current region,"
902 then associate this tree with the current region
903 and add edges associated with this region. */
904 if ((lookup_stmt_eh_region_fn (id->src_cfun,
905 orig_stmt) <= 0
906 && id->eh_region > 0)
907 && tree_could_throw_p (stmt))
908 add_stmt_to_eh_region (stmt, id->eh_region);
910 if (gimple_in_ssa_p (cfun))
912 ssa_op_iter i;
913 tree def;
915 find_new_referenced_vars (bsi_stmt_ptr (copy_bsi));
916 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
917 if (TREE_CODE (def) == SSA_NAME)
918 SSA_NAME_DEF_STMT (def) = stmt;
920 bsi_next (&copy_bsi);
922 copy_bsi = bsi_last (copy_basic_block);
925 return copy_basic_block;
928 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
929 form is quite easy, since dominator relationship for old basic blocks does
930 not change.
932 There is however exception where inlining might change dominator relation
933 across EH edges from basic block within inlined functions destinating
934 to landing pads in function we inline into.
936 The function mark PHI_RESULT of such PHI nodes for renaming; it is
937 safe the EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI
938 must be set. This means, that there will be no overlapping live ranges
939 for the underlying symbol.
941 This might change in future if we allow redirecting of EH edges and
942 we might want to change way build CFG pre-inlining to include
943 all the possible edges then. */
944 static void
945 update_ssa_across_eh_edges (basic_block bb)
947 edge e;
948 edge_iterator ei;
950 FOR_EACH_EDGE (e, ei, bb->succs)
951 if (!e->dest->aux
952 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
954 tree phi;
956 gcc_assert (e->flags & EDGE_EH);
957 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
959 gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
960 (PHI_RESULT (phi)));
961 mark_sym_for_renaming
962 (SSA_NAME_VAR (PHI_RESULT (phi)));
967 /* Copy edges from BB into its copy constructed earlier, scale profile
968 accordingly. Edges will be taken care of later. Assume aux
969 pointers to point to the copies of each BB. */
970 static void
971 copy_edges_for_bb (basic_block bb, int count_scale)
973 basic_block new_bb = (basic_block) bb->aux;
974 edge_iterator ei;
975 edge old_edge;
976 block_stmt_iterator bsi;
977 int flags;
979 /* Use the indices from the original blocks to create edges for the
980 new ones. */
981 FOR_EACH_EDGE (old_edge, ei, bb->succs)
982 if (!(old_edge->flags & EDGE_EH))
984 edge new;
986 flags = old_edge->flags;
988 /* Return edges do get a FALLTHRU flag when the get inlined. */
989 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
990 && old_edge->dest->aux != EXIT_BLOCK_PTR)
991 flags |= EDGE_FALLTHRU;
992 new = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
993 new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
994 new->probability = old_edge->probability;
997 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
998 return;
1000 for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
1002 tree copy_stmt;
1004 copy_stmt = bsi_stmt (bsi);
1005 update_stmt (copy_stmt);
1006 if (gimple_in_ssa_p (cfun))
1007 mark_symbols_for_renaming (copy_stmt);
1008 /* Do this before the possible split_block. */
1009 bsi_next (&bsi);
1011 /* If this tree could throw an exception, there are two
1012 cases where we need to add abnormal edge(s): the
1013 tree wasn't in a region and there is a "current
1014 region" in the caller; or the original tree had
1015 EH edges. In both cases split the block after the tree,
1016 and add abnormal edge(s) as needed; we need both
1017 those from the callee and the caller.
1018 We check whether the copy can throw, because the const
1019 propagation can change an INDIRECT_REF which throws
1020 into a COMPONENT_REF which doesn't. If the copy
1021 can throw, the original could also throw. */
1023 if (tree_can_throw_internal (copy_stmt))
1025 if (!bsi_end_p (bsi))
1026 /* Note that bb's predecessor edges aren't necessarily
1027 right at this point; split_block doesn't care. */
1029 edge e = split_block (new_bb, copy_stmt);
1031 new_bb = e->dest;
1032 new_bb->aux = e->src->aux;
1033 bsi = bsi_start (new_bb);
1036 make_eh_edges (copy_stmt);
1038 if (gimple_in_ssa_p (cfun))
1039 update_ssa_across_eh_edges (bb_for_stmt (copy_stmt));
1044 /* Copy the PHIs. All blocks and edges are copied, some blocks
1045 was possibly split and new outgoing EH edges inserted.
1046 BB points to the block of original function and AUX pointers links
1047 the original and newly copied blocks. */
1049 static void
1050 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1052 basic_block new_bb = bb->aux;
1053 edge_iterator ei;
1054 tree phi;
1056 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1058 tree res = PHI_RESULT (phi);
1059 tree new_res = res;
1060 tree new_phi;
1061 edge new_edge;
1063 if (is_gimple_reg (res))
1065 walk_tree (&new_res, copy_body_r, id, NULL);
1066 SSA_NAME_DEF_STMT (new_res)
1067 = new_phi = create_phi_node (new_res, new_bb);
1068 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1070 edge old_edge = find_edge (new_edge->src->aux, bb);
1071 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1072 tree new_arg = arg;
1074 walk_tree (&new_arg, copy_body_r, id, NULL);
1075 gcc_assert (new_arg);
1076 add_phi_arg (new_phi, new_arg, new_edge);
1082 /* Wrapper for remap_decl so it can be used as a callback. */
1083 static tree
1084 remap_decl_1 (tree decl, void *data)
1086 return remap_decl (decl, (copy_body_data *) data);
1089 /* Build struct function and associated datastructures for the new clone
1090 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1092 static void
1093 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
1094 int frequency)
1096 struct function *new_cfun
1097 = (struct function *) ggc_alloc_cleared (sizeof (struct function));
1098 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1099 int count_scale, frequency_scale;
1101 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1102 count_scale = (REG_BR_PROB_BASE * count
1103 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1104 else
1105 count_scale = 1;
1107 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1108 frequency_scale = (REG_BR_PROB_BASE * frequency
1110 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1111 else
1112 frequency_scale = count_scale;
1114 /* Register specific tree functions. */
1115 tree_register_cfg_hooks ();
1116 *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
1117 new_cfun->funcdef_no = get_next_funcdef_no ();
1118 VALUE_HISTOGRAMS (new_cfun) = NULL;
1119 new_cfun->unexpanded_var_list = NULL;
1120 new_cfun->cfg = NULL;
1121 new_cfun->decl = new_fndecl /*= copy_node (callee_fndecl)*/;
1122 new_cfun->ib_boundaries_block = NULL;
1123 DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
1124 push_cfun (new_cfun);
1125 init_empty_tree_cfg ();
1127 ENTRY_BLOCK_PTR->count =
1128 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1129 REG_BR_PROB_BASE);
1130 ENTRY_BLOCK_PTR->frequency =
1131 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1132 frequency_scale / REG_BR_PROB_BASE);
1133 EXIT_BLOCK_PTR->count =
1134 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1135 REG_BR_PROB_BASE);
1136 EXIT_BLOCK_PTR->frequency =
1137 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1138 frequency_scale / REG_BR_PROB_BASE);
1139 if (src_cfun->eh)
1140 init_eh_for_function ();
1142 if (src_cfun->gimple_df)
1144 init_tree_ssa ();
1145 cfun->gimple_df->in_ssa_p = true;
1146 init_ssa_operands ();
1148 pop_cfun ();
1151 /* Make a copy of the body of FN so that it can be inserted inline in
1152 another function. Walks FN via CFG, returns new fndecl. */
1154 static tree
1155 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
1156 basic_block entry_block_map, basic_block exit_block_map)
1158 tree callee_fndecl = id->src_fn;
1159 /* Original cfun for the callee, doesn't change. */
1160 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1161 struct function *cfun_to_copy;
1162 basic_block bb;
1163 tree new_fndecl = NULL;
1164 int count_scale, frequency_scale;
1165 int last;
1167 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1168 count_scale = (REG_BR_PROB_BASE * count
1169 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1170 else
1171 count_scale = 1;
1173 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1174 frequency_scale = (REG_BR_PROB_BASE * frequency
1176 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1177 else
1178 frequency_scale = count_scale;
1180 /* Register specific tree functions. */
1181 tree_register_cfg_hooks ();
1183 /* Must have a CFG here at this point. */
1184 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
1185 (DECL_STRUCT_FUNCTION (callee_fndecl)));
1187 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1190 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
1191 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
1192 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1193 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1195 /* Duplicate any exception-handling regions. */
1196 if (cfun->eh)
1198 id->eh_region_offset
1199 = duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
1200 0, id->eh_region);
1202 /* Use aux pointers to map the original blocks to copy. */
1203 FOR_EACH_BB_FN (bb, cfun_to_copy)
1205 basic_block new = copy_bb (id, bb, frequency_scale, count_scale);
1206 bb->aux = new;
1207 new->aux = bb;
1210 last = n_basic_blocks;
1211 /* Now that we've duplicated the blocks, duplicate their edges. */
1212 FOR_ALL_BB_FN (bb, cfun_to_copy)
1213 copy_edges_for_bb (bb, count_scale);
1214 if (gimple_in_ssa_p (cfun))
1215 FOR_ALL_BB_FN (bb, cfun_to_copy)
1216 copy_phis_for_bb (bb, id);
1217 FOR_ALL_BB_FN (bb, cfun_to_copy)
1219 ((basic_block)bb->aux)->aux = NULL;
1220 bb->aux = NULL;
1222 /* Zero out AUX fields of newly created block during EH edge
1223 insertion. */
1224 for (; last < n_basic_blocks; last++)
1225 BASIC_BLOCK (last)->aux = NULL;
1226 entry_block_map->aux = NULL;
1227 exit_block_map->aux = NULL;
1229 return new_fndecl;
1232 /* Make a copy of the body of FN so that it can be inserted inline in
1233 another function. */
1235 static tree
1236 copy_generic_body (copy_body_data *id)
1238 tree body;
1239 tree fndecl = id->src_fn;
1241 body = DECL_SAVED_TREE (fndecl);
1242 walk_tree (&body, copy_body_r, id, NULL);
1244 return body;
1247 static tree
1248 copy_body (copy_body_data *id, gcov_type count, int frequency,
1249 basic_block entry_block_map, basic_block exit_block_map)
1251 tree fndecl = id->src_fn;
1252 tree body;
1254 /* If this body has a CFG, walk CFG and copy. */
1255 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
1256 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
1258 return body;
1261 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
1262 defined in function FN, or of a data member thereof. */
1264 static bool
1265 self_inlining_addr_expr (tree value, tree fn)
1267 tree var;
1269 if (TREE_CODE (value) != ADDR_EXPR)
1270 return false;
1272 var = get_base_address (TREE_OPERAND (value, 0));
1274 return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
1277 static void
1278 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
1279 basic_block bb, tree *vars)
1281 tree init_stmt;
1282 tree var;
1283 tree var_sub;
1284 tree rhs = value ? fold_convert (TREE_TYPE (p), value) : NULL;
1285 tree def = (gimple_in_ssa_p (cfun)
1286 ? gimple_default_def (id->src_cfun, p) : NULL);
1288 /* If the parameter is never assigned to, has no SSA_NAMEs created,
1289 we may not need to create a new variable here at all. Instead, we may
1290 be able to just use the argument value. */
1291 if (TREE_READONLY (p)
1292 && !TREE_ADDRESSABLE (p)
1293 && value && !TREE_SIDE_EFFECTS (value)
1294 && !def)
1296 /* We may produce non-gimple trees by adding NOPs or introduce
1297 invalid sharing when operand is not really constant.
1298 It is not big deal to prohibit constant propagation here as
1299 we will constant propagate in DOM1 pass anyway. */
1300 if (is_gimple_min_invariant (value)
1301 && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
1302 /* We have to be very careful about ADDR_EXPR. Make sure
1303 the base variable isn't a local variable of the inlined
1304 function, e.g., when doing recursive inlining, direct or
1305 mutually-recursive or whatever, which is why we don't
1306 just test whether fn == current_function_decl. */
1307 && ! self_inlining_addr_expr (value, fn))
1309 insert_decl_map (id, p, value);
1310 return;
1314 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
1315 here since the type of this decl must be visible to the calling
1316 function. */
1317 var = copy_decl_to_var (p, id);
1318 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
1320 get_var_ann (var);
1321 add_referenced_var (var);
1324 /* See if the frontend wants to pass this by invisible reference. If
1325 so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
1326 replace uses of the PARM_DECL with dereferences. */
1327 if (TREE_TYPE (var) != TREE_TYPE (p)
1328 && POINTER_TYPE_P (TREE_TYPE (var))
1329 && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
1331 insert_decl_map (id, var, var);
1332 var_sub = build_fold_indirect_ref (var);
1334 else
1335 var_sub = var;
1337 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
1338 that way, when the PARM_DECL is encountered, it will be
1339 automatically replaced by the VAR_DECL. */
1340 insert_decl_map (id, p, var_sub);
1342 /* Declare this new variable. */
1343 TREE_CHAIN (var) = *vars;
1344 *vars = var;
1346 /* Make gimplifier happy about this variable. */
1347 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1349 /* Even if P was TREE_READONLY, the new VAR should not be.
1350 In the original code, we would have constructed a
1351 temporary, and then the function body would have never
1352 changed the value of P. However, now, we will be
1353 constructing VAR directly. The constructor body may
1354 change its value multiple times as it is being
1355 constructed. Therefore, it must not be TREE_READONLY;
1356 the back-end assumes that TREE_READONLY variable is
1357 assigned to only once. */
1358 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
1359 TREE_READONLY (var) = 0;
1361 /* If there is no setup required and we are in SSA, take the easy route
1362 replacing all SSA names representing the function parameter by the
1363 SSA name passed to function.
1365 We need to construct map for the variable anyway as it might be used
1366 in different SSA names when parameter is set in function.
1368 FIXME: This usually kills the last connection in between inlined
1369 function parameter and the actual value in debug info. Can we do
1370 better here? If we just inserted the statement, copy propagation
1371 would kill it anyway as it always did in older versions of GCC.
1373 We might want to introduce a notion that single SSA_NAME might
1374 represent multiple variables for purposes of debugging. */
1375 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
1376 && (TREE_CODE (rhs) == SSA_NAME
1377 || is_gimple_min_invariant (rhs))
1378 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1380 insert_decl_map (id, def, rhs);
1381 return;
1384 /* Initialize this VAR_DECL from the equivalent argument. Convert
1385 the argument to the proper type in case it was promoted. */
1386 if (value)
1388 block_stmt_iterator bsi = bsi_last (bb);
1390 if (rhs == error_mark_node)
1392 insert_decl_map (id, p, var_sub);
1393 return;
1396 STRIP_USELESS_TYPE_CONVERSION (rhs);
1398 /* We want to use GIMPLE_MODIFY_STMT, not INIT_EXPR here so that we
1399 keep our trees in gimple form. */
1400 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
1402 def = remap_ssa_name (def, id);
1403 init_stmt = build_gimple_modify_stmt (def, rhs);
1404 SSA_NAME_DEF_STMT (def) = init_stmt;
1405 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
1406 set_default_def (var, NULL);
1408 else
1409 init_stmt = build_gimple_modify_stmt (var, rhs);
1411 /* If we did not create a gimple value and we did not create a gimple
1412 cast of a gimple value, then we will need to gimplify INIT_STMTS
1413 at the end. Note that is_gimple_cast only checks the outer
1414 tree code, not its operand. Thus the explicit check that its
1415 operand is a gimple value. */
1416 if ((!is_gimple_val (rhs)
1417 && (!is_gimple_cast (rhs)
1418 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
1419 || !is_gimple_reg (var))
1421 tree_stmt_iterator i;
1423 push_gimplify_context ();
1424 gimplify_stmt (&init_stmt);
1425 if (gimple_in_ssa_p (cfun)
1426 && init_stmt && TREE_CODE (init_stmt) == STATEMENT_LIST)
1428 /* The replacement can expose previously unreferenced
1429 variables. */
1430 for (i = tsi_start (init_stmt); !tsi_end_p (i); tsi_next (&i))
1431 find_new_referenced_vars (tsi_stmt_ptr (i));
1433 pop_gimplify_context (NULL);
1436 /* If VAR represents a zero-sized variable, it's possible that the
1437 assignment statment may result in no gimple statements. */
1438 if (init_stmt)
1439 bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
1440 if (gimple_in_ssa_p (cfun))
1441 for (;!bsi_end_p (bsi); bsi_next (&bsi))
1442 mark_symbols_for_renaming (bsi_stmt (bsi));
1446 /* Generate code to initialize the parameters of the function at the
1447 top of the stack in ID from the CALL_EXPR EXP. */
1449 static void
1450 initialize_inlined_parameters (copy_body_data *id, tree exp,
1451 tree fn, basic_block bb)
1453 tree parms;
1454 tree a;
1455 tree p;
1456 tree vars = NULL_TREE;
1457 int argnum = 0;
1458 call_expr_arg_iterator iter;
1459 tree static_chain = CALL_EXPR_STATIC_CHAIN (exp);
1461 /* Figure out what the parameters are. */
1462 parms = DECL_ARGUMENTS (fn);
1464 /* Loop through the parameter declarations, replacing each with an
1465 equivalent VAR_DECL, appropriately initialized. */
1466 for (p = parms, a = first_call_expr_arg (exp, &iter); p;
1467 a = next_call_expr_arg (&iter), p = TREE_CHAIN (p))
1469 tree value;
1471 ++argnum;
1473 /* Find the initializer. */
1474 value = lang_hooks.tree_inlining.convert_parm_for_inlining
1475 (p, a, fn, argnum);
1477 setup_one_parameter (id, p, value, fn, bb, &vars);
1480 /* Initialize the static chain. */
1481 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1482 gcc_assert (fn != current_function_decl);
1483 if (p)
1485 /* No static chain? Seems like a bug in tree-nested.c. */
1486 gcc_assert (static_chain);
1488 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
1491 declare_inline_vars (id->block, vars);
1494 /* Declare a return variable to replace the RESULT_DECL for the
1495 function we are calling. An appropriate DECL_STMT is returned.
1496 The USE_STMT is filled to contain a use of the declaration to
1497 indicate the return value of the function.
1499 RETURN_SLOT, if non-null is place where to store the result. It
1500 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
1501 was the LHS of the GIMPLE_MODIFY_STMT to which this call is the RHS.
1503 The return value is a (possibly null) value that is the result of the
1504 function as seen by the callee. *USE_P is a (possibly null) value that
1505 holds the result as seen by the caller. */
1507 static tree
1508 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
1509 tree *use_p)
1511 tree callee = id->src_fn;
1512 tree caller = id->dst_fn;
1513 tree result = DECL_RESULT (callee);
1514 tree callee_type = TREE_TYPE (result);
1515 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
1516 tree var, use;
1518 /* We don't need to do anything for functions that don't return
1519 anything. */
1520 if (!result || VOID_TYPE_P (callee_type))
1522 *use_p = NULL_TREE;
1523 return NULL_TREE;
1526 /* If there was a return slot, then the return value is the
1527 dereferenced address of that object. */
1528 if (return_slot)
1530 /* The front end shouldn't have used both return_slot and
1531 a modify expression. */
1532 gcc_assert (!modify_dest);
1533 if (DECL_BY_REFERENCE (result))
1535 tree return_slot_addr = build_fold_addr_expr (return_slot);
1536 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
1538 /* We are going to construct *&return_slot and we can't do that
1539 for variables believed to be not addressable.
1541 FIXME: This check possibly can match, because values returned
1542 via return slot optimization are not believed to have address
1543 taken by alias analysis. */
1544 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
1545 if (gimple_in_ssa_p (cfun))
1547 HOST_WIDE_INT bitsize;
1548 HOST_WIDE_INT bitpos;
1549 tree offset;
1550 enum machine_mode mode;
1551 int unsignedp;
1552 int volatilep;
1553 tree base;
1554 base = get_inner_reference (return_slot, &bitsize, &bitpos,
1555 &offset,
1556 &mode, &unsignedp, &volatilep,
1557 false);
1558 if (TREE_CODE (base) == INDIRECT_REF)
1559 base = TREE_OPERAND (base, 0);
1560 if (TREE_CODE (base) == SSA_NAME)
1561 base = SSA_NAME_VAR (base);
1562 mark_sym_for_renaming (base);
1564 var = return_slot_addr;
1566 else
1568 var = return_slot;
1569 gcc_assert (TREE_CODE (var) != SSA_NAME);
1571 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1572 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1573 && !DECL_GIMPLE_REG_P (result)
1574 && DECL_P (var))
1575 DECL_GIMPLE_REG_P (var) = 0;
1576 use = NULL;
1577 goto done;
1580 /* All types requiring non-trivial constructors should have been handled. */
1581 gcc_assert (!TREE_ADDRESSABLE (callee_type));
1583 /* Attempt to avoid creating a new temporary variable. */
1584 if (modify_dest
1585 && TREE_CODE (modify_dest) != SSA_NAME)
1587 bool use_it = false;
1589 /* We can't use MODIFY_DEST if there's type promotion involved. */
1590 if (!lang_hooks.types_compatible_p (caller_type, callee_type))
1591 use_it = false;
1593 /* ??? If we're assigning to a variable sized type, then we must
1594 reuse the destination variable, because we've no good way to
1595 create variable sized temporaries at this point. */
1596 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
1597 use_it = true;
1599 /* If the callee cannot possibly modify MODIFY_DEST, then we can
1600 reuse it as the result of the call directly. Don't do this if
1601 it would promote MODIFY_DEST to addressable. */
1602 else if (TREE_ADDRESSABLE (result))
1603 use_it = false;
1604 else
1606 tree base_m = get_base_address (modify_dest);
1608 /* If the base isn't a decl, then it's a pointer, and we don't
1609 know where that's going to go. */
1610 if (!DECL_P (base_m))
1611 use_it = false;
1612 else if (is_global_var (base_m))
1613 use_it = false;
1614 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1615 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1616 && !DECL_GIMPLE_REG_P (result)
1617 && DECL_GIMPLE_REG_P (base_m))
1618 use_it = false;
1619 else if (!TREE_ADDRESSABLE (base_m))
1620 use_it = true;
1623 if (use_it)
1625 var = modify_dest;
1626 use = NULL;
1627 goto done;
1631 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
1633 var = copy_result_decl_to_var (result, id);
1634 if (gimple_in_ssa_p (cfun))
1636 get_var_ann (var);
1637 add_referenced_var (var);
1640 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1641 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
1642 = tree_cons (NULL_TREE, var,
1643 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
1645 /* Do not have the rest of GCC warn about this variable as it should
1646 not be visible to the user. */
1647 TREE_NO_WARNING (var) = 1;
1649 declare_inline_vars (id->block, var);
1651 /* Build the use expr. If the return type of the function was
1652 promoted, convert it back to the expected type. */
1653 use = var;
1654 if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
1655 use = fold_convert (caller_type, var);
1657 STRIP_USELESS_TYPE_CONVERSION (use);
1659 if (DECL_BY_REFERENCE (result))
1660 var = build_fold_addr_expr (var);
1662 done:
1663 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
1664 way, when the RESULT_DECL is encountered, it will be
1665 automatically replaced by the VAR_DECL. */
1666 insert_decl_map (id, result, var);
1668 /* Remember this so we can ignore it in remap_decls. */
1669 id->retvar = var;
1671 *use_p = use;
1672 return var;
1675 /* Returns nonzero if a function can be inlined as a tree. */
1677 bool
1678 tree_inlinable_function_p (tree fn)
1680 return inlinable_function_p (fn);
1683 static const char *inline_forbidden_reason;
1685 static tree
1686 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
1687 void *fnp)
1689 tree node = *nodep;
1690 tree fn = (tree) fnp;
1691 tree t;
1693 switch (TREE_CODE (node))
1695 case CALL_EXPR:
1696 /* Refuse to inline alloca call unless user explicitly forced so as
1697 this may change program's memory overhead drastically when the
1698 function using alloca is called in loop. In GCC present in
1699 SPEC2000 inlining into schedule_block cause it to require 2GB of
1700 RAM instead of 256MB. */
1701 if (alloca_call_p (node)
1702 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1704 inline_forbidden_reason
1705 = G_("function %q+F can never be inlined because it uses "
1706 "alloca (override using the always_inline attribute)");
1707 return node;
1709 t = get_callee_fndecl (node);
1710 if (! t)
1711 break;
1713 /* We cannot inline functions that call setjmp. */
1714 if (setjmp_call_p (t))
1716 inline_forbidden_reason
1717 = G_("function %q+F can never be inlined because it uses setjmp");
1718 return node;
1721 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
1722 switch (DECL_FUNCTION_CODE (t))
1724 /* We cannot inline functions that take a variable number of
1725 arguments. */
1726 case BUILT_IN_VA_START:
1727 case BUILT_IN_STDARG_START:
1728 case BUILT_IN_NEXT_ARG:
1729 case BUILT_IN_VA_END:
1730 inline_forbidden_reason
1731 = G_("function %q+F can never be inlined because it "
1732 "uses variable argument lists");
1733 return node;
1735 case BUILT_IN_LONGJMP:
1736 /* We can't inline functions that call __builtin_longjmp at
1737 all. The non-local goto machinery really requires the
1738 destination be in a different function. If we allow the
1739 function calling __builtin_longjmp to be inlined into the
1740 function calling __builtin_setjmp, Things will Go Awry. */
1741 inline_forbidden_reason
1742 = G_("function %q+F can never be inlined because "
1743 "it uses setjmp-longjmp exception handling");
1744 return node;
1746 case BUILT_IN_NONLOCAL_GOTO:
1747 /* Similarly. */
1748 inline_forbidden_reason
1749 = G_("function %q+F can never be inlined because "
1750 "it uses non-local goto");
1751 return node;
1753 case BUILT_IN_RETURN:
1754 case BUILT_IN_APPLY_ARGS:
1755 /* If a __builtin_apply_args caller would be inlined,
1756 it would be saving arguments of the function it has
1757 been inlined into. Similarly __builtin_return would
1758 return from the function the inline has been inlined into. */
1759 inline_forbidden_reason
1760 = G_("function %q+F can never be inlined because "
1761 "it uses __builtin_return or __builtin_apply_args");
1762 return node;
1764 default:
1765 break;
1767 break;
1769 case GOTO_EXPR:
1770 t = TREE_OPERAND (node, 0);
1772 /* We will not inline a function which uses computed goto. The
1773 addresses of its local labels, which may be tucked into
1774 global storage, are of course not constant across
1775 instantiations, which causes unexpected behavior. */
1776 if (TREE_CODE (t) != LABEL_DECL)
1778 inline_forbidden_reason
1779 = G_("function %q+F can never be inlined "
1780 "because it contains a computed goto");
1781 return node;
1783 break;
1785 case LABEL_EXPR:
1786 t = TREE_OPERAND (node, 0);
1787 if (DECL_NONLOCAL (t))
1789 /* We cannot inline a function that receives a non-local goto
1790 because we cannot remap the destination label used in the
1791 function that is performing the non-local goto. */
1792 inline_forbidden_reason
1793 = G_("function %q+F can never be inlined "
1794 "because it receives a non-local goto");
1795 return node;
1797 break;
1799 case RECORD_TYPE:
1800 case UNION_TYPE:
1801 /* We cannot inline a function of the form
1803 void F (int i) { struct S { int ar[i]; } s; }
1805 Attempting to do so produces a catch-22.
1806 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1807 UNION_TYPE nodes, then it goes into infinite recursion on a
1808 structure containing a pointer to its own type. If it doesn't,
1809 then the type node for S doesn't get adjusted properly when
1810 F is inlined.
1812 ??? This is likely no longer true, but it's too late in the 4.0
1813 cycle to try to find out. This should be checked for 4.1. */
1814 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1815 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1817 inline_forbidden_reason
1818 = G_("function %q+F can never be inlined "
1819 "because it uses variable sized variables");
1820 return node;
1823 default:
1824 break;
1827 return NULL_TREE;
1830 /* Return subexpression representing possible alloca call, if any. */
1831 static tree
1832 inline_forbidden_p (tree fndecl)
1834 location_t saved_loc = input_location;
1835 block_stmt_iterator bsi;
1836 basic_block bb;
1837 tree ret = NULL_TREE;
1839 FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (fndecl))
1840 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1842 ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
1843 inline_forbidden_p_1, fndecl);
1844 if (ret)
1845 goto egress;
1848 egress:
1849 input_location = saved_loc;
1850 return ret;
1853 /* Returns nonzero if FN is a function that does not have any
1854 fundamental inline blocking properties. */
1856 static bool
1857 inlinable_function_p (tree fn)
1859 bool inlinable = true;
1861 /* If we've already decided this function shouldn't be inlined,
1862 there's no need to check again. */
1863 if (DECL_UNINLINABLE (fn))
1864 return false;
1866 /* See if there is any language-specific reason it cannot be
1867 inlined. (It is important that this hook be called early because
1868 in C++ it may result in template instantiation.)
1869 If the function is not inlinable for language-specific reasons,
1870 it is left up to the langhook to explain why. */
1871 inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1873 /* If we don't have the function body available, we can't inline it.
1874 However, this should not be recorded since we also get here for
1875 forward declared inline functions. Therefore, return at once. */
1876 if (!DECL_SAVED_TREE (fn))
1877 return false;
1879 /* If we're not inlining at all, then we cannot inline this function. */
1880 else if (!flag_inline_trees)
1881 inlinable = false;
1883 /* Only try to inline functions if DECL_INLINE is set. This should be
1884 true for all functions declared `inline', and for all other functions
1885 as well with -finline-functions.
1887 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1888 it's the front-end that must set DECL_INLINE in this case, because
1889 dwarf2out loses if a function that does not have DECL_INLINE set is
1890 inlined anyway. That is why we have both DECL_INLINE and
1891 DECL_DECLARED_INLINE_P. */
1892 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1893 here should be redundant. */
1894 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1895 inlinable = false;
1897 else if (inline_forbidden_p (fn))
1899 /* See if we should warn about uninlinable functions. Previously,
1900 some of these warnings would be issued while trying to expand
1901 the function inline, but that would cause multiple warnings
1902 about functions that would for example call alloca. But since
1903 this a property of the function, just one warning is enough.
1904 As a bonus we can now give more details about the reason why a
1905 function is not inlinable.
1906 We only warn for functions declared `inline' by the user. */
1907 bool do_warning = (warn_inline
1908 && DECL_INLINE (fn)
1909 && DECL_DECLARED_INLINE_P (fn)
1910 && !DECL_IN_SYSTEM_HEADER (fn));
1912 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1913 sorry (inline_forbidden_reason, fn);
1914 else if (do_warning)
1915 warning (OPT_Winline, inline_forbidden_reason, fn);
1917 inlinable = false;
1920 /* Squirrel away the result so that we don't have to check again. */
1921 DECL_UNINLINABLE (fn) = !inlinable;
1923 return inlinable;
1926 /* Estimate the cost of a memory move. Use machine dependent
1927 word size and take possible memcpy call into account. */
1930 estimate_move_cost (tree type)
1932 HOST_WIDE_INT size;
1934 size = int_size_in_bytes (type);
1936 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1937 /* Cost of a memcpy call, 3 arguments and the call. */
1938 return 4;
1939 else
1940 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1943 /* Arguments for estimate_num_insns_1. */
1945 struct eni_data
1947 /* Used to return the number of insns. */
1948 int count;
1950 /* Weights of various constructs. */
1951 eni_weights *weights;
1954 /* Used by estimate_num_insns. Estimate number of instructions seen
1955 by given statement. */
1957 static tree
1958 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1960 struct eni_data *d = data;
1961 tree x = *tp;
1962 unsigned cost;
1964 if (IS_TYPE_OR_DECL_P (x))
1966 *walk_subtrees = 0;
1967 return NULL;
1969 /* Assume that constants and references counts nothing. These should
1970 be majorized by amount of operations among them we count later
1971 and are common target of CSE and similar optimizations. */
1972 else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
1973 return NULL;
1975 switch (TREE_CODE (x))
1977 /* Containers have no cost. */
1978 case TREE_LIST:
1979 case TREE_VEC:
1980 case BLOCK:
1981 case COMPONENT_REF:
1982 case BIT_FIELD_REF:
1983 case INDIRECT_REF:
1984 case ALIGN_INDIRECT_REF:
1985 case MISALIGNED_INDIRECT_REF:
1986 case ARRAY_REF:
1987 case ARRAY_RANGE_REF:
1988 case OBJ_TYPE_REF:
1989 case EXC_PTR_EXPR: /* ??? */
1990 case FILTER_EXPR: /* ??? */
1991 case COMPOUND_EXPR:
1992 case BIND_EXPR:
1993 case WITH_CLEANUP_EXPR:
1994 case NOP_EXPR:
1995 case VIEW_CONVERT_EXPR:
1996 case SAVE_EXPR:
1997 case ADDR_EXPR:
1998 case COMPLEX_EXPR:
1999 case RANGE_EXPR:
2000 case CASE_LABEL_EXPR:
2001 case SSA_NAME:
2002 case CATCH_EXPR:
2003 case EH_FILTER_EXPR:
2004 case STATEMENT_LIST:
2005 case ERROR_MARK:
2006 case NON_LVALUE_EXPR:
2007 case FDESC_EXPR:
2008 case VA_ARG_EXPR:
2009 case TRY_CATCH_EXPR:
2010 case TRY_FINALLY_EXPR:
2011 case LABEL_EXPR:
2012 case GOTO_EXPR:
2013 case RETURN_EXPR:
2014 case EXIT_EXPR:
2015 case LOOP_EXPR:
2016 case PHI_NODE:
2017 case WITH_SIZE_EXPR:
2018 case OMP_CLAUSE:
2019 case OMP_RETURN:
2020 case OMP_CONTINUE:
2021 break;
2023 /* We don't account constants for now. Assume that the cost is amortized
2024 by operations that do use them. We may re-consider this decision once
2025 we are able to optimize the tree before estimating its size and break
2026 out static initializers. */
2027 case IDENTIFIER_NODE:
2028 case INTEGER_CST:
2029 case REAL_CST:
2030 case COMPLEX_CST:
2031 case VECTOR_CST:
2032 case STRING_CST:
2033 *walk_subtrees = 0;
2034 return NULL;
2036 /* Try to estimate the cost of assignments. We have three cases to
2037 deal with:
2038 1) Simple assignments to registers;
2039 2) Stores to things that must live in memory. This includes
2040 "normal" stores to scalars, but also assignments of large
2041 structures, or constructors of big arrays;
2042 3) TARGET_EXPRs.
2044 Let us look at the first two cases, assuming we have "a = b + C":
2045 <GIMPLE_MODIFY_STMT <var_decl "a">
2046 <plus_expr <var_decl "b"> <constant C>>
2047 If "a" is a GIMPLE register, the assignment to it is free on almost
2048 any target, because "a" usually ends up in a real register. Hence
2049 the only cost of this expression comes from the PLUS_EXPR, and we
2050 can ignore the GIMPLE_MODIFY_STMT.
2051 If "a" is not a GIMPLE register, the assignment to "a" will most
2052 likely be a real store, so the cost of the GIMPLE_MODIFY_STMT is the cost
2053 of moving something into "a", which we compute using the function
2054 estimate_move_cost.
2056 The third case deals with TARGET_EXPRs, for which the semantics are
2057 that a temporary is assigned, unless the TARGET_EXPR itself is being
2058 assigned to something else. In the latter case we do not need the
2059 temporary. E.g. in:
2060 <GIMPLE_MODIFY_STMT <var_decl "a"> <target_expr>>, the
2061 GIMPLE_MODIFY_STMT is free. */
2062 case INIT_EXPR:
2063 case GIMPLE_MODIFY_STMT:
2064 /* Is the right and side a TARGET_EXPR? */
2065 if (TREE_CODE (GENERIC_TREE_OPERAND (x, 1)) == TARGET_EXPR)
2066 break;
2067 /* ... fall through ... */
2069 case TARGET_EXPR:
2070 x = GENERIC_TREE_OPERAND (x, 0);
2071 /* Is this an assignments to a register? */
2072 if (is_gimple_reg (x))
2073 break;
2074 /* Otherwise it's a store, so fall through to compute the move cost. */
2076 case CONSTRUCTOR:
2077 d->count += estimate_move_cost (TREE_TYPE (x));
2078 break;
2080 /* Assign cost of 1 to usual operations.
2081 ??? We may consider mapping RTL costs to this. */
2082 case COND_EXPR:
2083 case VEC_COND_EXPR:
2085 case PLUS_EXPR:
2086 case MINUS_EXPR:
2087 case MULT_EXPR:
2089 case FIX_TRUNC_EXPR:
2091 case NEGATE_EXPR:
2092 case FLOAT_EXPR:
2093 case MIN_EXPR:
2094 case MAX_EXPR:
2095 case ABS_EXPR:
2097 case LSHIFT_EXPR:
2098 case RSHIFT_EXPR:
2099 case LROTATE_EXPR:
2100 case RROTATE_EXPR:
2101 case VEC_LSHIFT_EXPR:
2102 case VEC_RSHIFT_EXPR:
2104 case BIT_IOR_EXPR:
2105 case BIT_XOR_EXPR:
2106 case BIT_AND_EXPR:
2107 case BIT_NOT_EXPR:
2109 case TRUTH_ANDIF_EXPR:
2110 case TRUTH_ORIF_EXPR:
2111 case TRUTH_AND_EXPR:
2112 case TRUTH_OR_EXPR:
2113 case TRUTH_XOR_EXPR:
2114 case TRUTH_NOT_EXPR:
2116 case LT_EXPR:
2117 case LE_EXPR:
2118 case GT_EXPR:
2119 case GE_EXPR:
2120 case EQ_EXPR:
2121 case NE_EXPR:
2122 case ORDERED_EXPR:
2123 case UNORDERED_EXPR:
2125 case UNLT_EXPR:
2126 case UNLE_EXPR:
2127 case UNGT_EXPR:
2128 case UNGE_EXPR:
2129 case UNEQ_EXPR:
2130 case LTGT_EXPR:
2132 case CONVERT_EXPR:
2134 case CONJ_EXPR:
2136 case PREDECREMENT_EXPR:
2137 case PREINCREMENT_EXPR:
2138 case POSTDECREMENT_EXPR:
2139 case POSTINCREMENT_EXPR:
2141 case ASM_EXPR:
2143 case REALIGN_LOAD_EXPR:
2145 case REDUC_MAX_EXPR:
2146 case REDUC_MIN_EXPR:
2147 case REDUC_PLUS_EXPR:
2148 case WIDEN_SUM_EXPR:
2149 case DOT_PROD_EXPR:
2150 case VEC_WIDEN_MULT_HI_EXPR:
2151 case VEC_WIDEN_MULT_LO_EXPR:
2152 case VEC_UNPACK_HI_EXPR:
2153 case VEC_UNPACK_LO_EXPR:
2154 case VEC_PACK_MOD_EXPR:
2155 case VEC_PACK_SAT_EXPR:
2157 case WIDEN_MULT_EXPR:
2159 case VEC_EXTRACT_EVEN_EXPR:
2160 case VEC_EXTRACT_ODD_EXPR:
2161 case VEC_INTERLEAVE_HIGH_EXPR:
2162 case VEC_INTERLEAVE_LOW_EXPR:
2164 case RESX_EXPR:
2165 d->count += 1;
2166 break;
2168 case SWITCH_EXPR:
2169 /* TODO: Cost of a switch should be derived from the number of
2170 branches. */
2171 d->count += d->weights->switch_cost;
2172 break;
2174 /* Few special cases of expensive operations. This is useful
2175 to avoid inlining on functions having too many of these. */
2176 case TRUNC_DIV_EXPR:
2177 case CEIL_DIV_EXPR:
2178 case FLOOR_DIV_EXPR:
2179 case ROUND_DIV_EXPR:
2180 case EXACT_DIV_EXPR:
2181 case TRUNC_MOD_EXPR:
2182 case CEIL_MOD_EXPR:
2183 case FLOOR_MOD_EXPR:
2184 case ROUND_MOD_EXPR:
2185 case RDIV_EXPR:
2186 d->count += d->weights->div_mod_cost;
2187 break;
2188 case CALL_EXPR:
2190 tree decl = get_callee_fndecl (x);
2192 cost = d->weights->call_cost;
2193 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
2194 switch (DECL_FUNCTION_CODE (decl))
2196 case BUILT_IN_CONSTANT_P:
2197 *walk_subtrees = 0;
2198 return NULL_TREE;
2199 case BUILT_IN_EXPECT:
2200 return NULL_TREE;
2201 /* Prefetch instruction is not expensive. */
2202 case BUILT_IN_PREFETCH:
2203 cost = 1;
2204 break;
2205 default:
2206 break;
2209 /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
2210 that does use function declaration to figure out the arguments. */
2211 if (!decl)
2213 tree a;
2214 call_expr_arg_iterator iter;
2215 FOR_EACH_CALL_EXPR_ARG (a, iter, x)
2216 d->count += estimate_move_cost (TREE_TYPE (a));
2218 else
2220 tree arg;
2221 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2222 d->count += estimate_move_cost (TREE_TYPE (arg));
2225 d->count += cost;
2226 break;
2229 case OMP_PARALLEL:
2230 case OMP_FOR:
2231 case OMP_SECTIONS:
2232 case OMP_SINGLE:
2233 case OMP_SECTION:
2234 case OMP_MASTER:
2235 case OMP_ORDERED:
2236 case OMP_CRITICAL:
2237 case OMP_ATOMIC:
2238 /* OpenMP directives are generally very expensive. */
2239 d->count += d->weights->omp_cost;
2240 break;
2242 default:
2243 gcc_unreachable ();
2245 return NULL;
2248 /* Estimate number of instructions that will be created by expanding EXPR.
2249 WEIGHTS contains weights attributed to various constructs. */
2252 estimate_num_insns (tree expr, eni_weights *weights)
2254 struct pointer_set_t *visited_nodes;
2255 basic_block bb;
2256 block_stmt_iterator bsi;
2257 struct function *my_function;
2258 struct eni_data data;
2260 data.count = 0;
2261 data.weights = weights;
2263 /* If we're given an entire function, walk the CFG. */
2264 if (TREE_CODE (expr) == FUNCTION_DECL)
2266 my_function = DECL_STRUCT_FUNCTION (expr);
2267 gcc_assert (my_function && my_function->cfg);
2268 visited_nodes = pointer_set_create ();
2269 FOR_EACH_BB_FN (bb, my_function)
2271 for (bsi = bsi_start (bb);
2272 !bsi_end_p (bsi);
2273 bsi_next (&bsi))
2275 walk_tree (bsi_stmt_ptr (bsi), estimate_num_insns_1,
2276 &data, visited_nodes);
2279 pointer_set_destroy (visited_nodes);
2281 else
2282 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &data);
2284 return data.count;
2287 /* Initializes weights used by estimate_num_insns. */
2289 void
2290 init_inline_once (void)
2292 eni_inlining_weights.call_cost = PARAM_VALUE (PARAM_INLINE_CALL_COST);
2293 eni_inlining_weights.div_mod_cost = 10;
2294 eni_inlining_weights.switch_cost = 1;
2295 eni_inlining_weights.omp_cost = 40;
2297 eni_size_weights.call_cost = 1;
2298 eni_size_weights.div_mod_cost = 1;
2299 eni_size_weights.switch_cost = 10;
2300 eni_size_weights.omp_cost = 40;
2302 /* Estimating time for call is difficult, since we have no idea what the
2303 called function does. In the current uses of eni_time_weights,
2304 underestimating the cost does less harm than overestimating it, so
2305 we choose a rather small value here. */
2306 eni_time_weights.call_cost = 10;
2307 eni_time_weights.div_mod_cost = 10;
2308 eni_time_weights.switch_cost = 4;
2309 eni_time_weights.omp_cost = 40;
2312 typedef struct function *function_p;
2314 DEF_VEC_P(function_p);
2315 DEF_VEC_ALLOC_P(function_p,heap);
2317 /* Initialized with NOGC, making this poisonous to the garbage collector. */
2318 static VEC(function_p,heap) *cfun_stack;
2320 void
2321 push_cfun (struct function *new_cfun)
2323 VEC_safe_push (function_p, heap, cfun_stack, cfun);
2324 cfun = new_cfun;
2327 void
2328 pop_cfun (void)
2330 cfun = VEC_pop (function_p, cfun_stack);
2333 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
2334 static void
2335 add_lexical_block (tree current_block, tree new_block)
2337 tree *blk_p;
2339 /* Walk to the last sub-block. */
2340 for (blk_p = &BLOCK_SUBBLOCKS (current_block);
2341 *blk_p;
2342 blk_p = &TREE_CHAIN (*blk_p))
2344 *blk_p = new_block;
2345 BLOCK_SUPERCONTEXT (new_block) = current_block;
2348 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
2350 static bool
2351 expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
2353 copy_body_data *id;
2354 tree t;
2355 tree use_retvar;
2356 tree fn;
2357 splay_tree st;
2358 tree return_slot;
2359 tree modify_dest;
2360 location_t saved_location;
2361 struct cgraph_edge *cg_edge;
2362 const char *reason;
2363 basic_block return_block;
2364 edge e;
2365 block_stmt_iterator bsi, stmt_bsi;
2366 bool successfully_inlined = FALSE;
2367 bool purge_dead_abnormal_edges;
2368 tree t_step;
2369 tree var;
2371 /* See what we've got. */
2372 id = (copy_body_data *) data;
2373 t = *tp;
2375 /* Set input_location here so we get the right instantiation context
2376 if we call instantiate_decl from inlinable_function_p. */
2377 saved_location = input_location;
2378 if (EXPR_HAS_LOCATION (t))
2379 input_location = EXPR_LOCATION (t);
2381 /* From here on, we're only interested in CALL_EXPRs. */
2382 if (TREE_CODE (t) != CALL_EXPR)
2383 goto egress;
2385 /* First, see if we can figure out what function is being called.
2386 If we cannot, then there is no hope of inlining the function. */
2387 fn = get_callee_fndecl (t);
2388 if (!fn)
2389 goto egress;
2391 /* Turn forward declarations into real ones. */
2392 fn = cgraph_node (fn)->decl;
2394 /* If fn is a declaration of a function in a nested scope that was
2395 globally declared inline, we don't set its DECL_INITIAL.
2396 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
2397 C++ front-end uses it for cdtors to refer to their internal
2398 declarations, that are not real functions. Fortunately those
2399 don't have trees to be saved, so we can tell by checking their
2400 DECL_SAVED_TREE. */
2401 if (! DECL_INITIAL (fn)
2402 && DECL_ABSTRACT_ORIGIN (fn)
2403 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
2404 fn = DECL_ABSTRACT_ORIGIN (fn);
2406 /* Objective C and fortran still calls tree_rest_of_compilation directly.
2407 Kill this check once this is fixed. */
2408 if (!id->dst_node->analyzed)
2409 goto egress;
2411 cg_edge = cgraph_edge (id->dst_node, stmt);
2413 /* Constant propagation on argument done during previous inlining
2414 may create new direct call. Produce an edge for it. */
2415 if (!cg_edge)
2417 struct cgraph_node *dest = cgraph_node (fn);
2419 /* We have missing edge in the callgraph. This can happen in one case
2420 where previous inlining turned indirect call into direct call by
2421 constant propagating arguments. In all other cases we hit a bug
2422 (incorrect node sharing is most common reason for missing edges. */
2423 gcc_assert (dest->needed || !flag_unit_at_a_time);
2424 cgraph_create_edge (id->dst_node, dest, stmt,
2425 bb->count, CGRAPH_FREQ_BASE,
2426 bb->loop_depth)->inline_failed
2427 = N_("originally indirect function call not considered for inlining");
2428 if (dump_file)
2430 fprintf (dump_file, "Created new direct edge to %s",
2431 cgraph_node_name (dest));
2433 goto egress;
2436 /* Don't try to inline functions that are not well-suited to
2437 inlining. */
2438 if (!cgraph_inline_p (cg_edge, &reason))
2440 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
2441 /* Avoid warnings during early inline pass. */
2442 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2444 sorry ("inlining failed in call to %q+F: %s", fn, reason);
2445 sorry ("called from here");
2447 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
2448 && !DECL_IN_SYSTEM_HEADER (fn)
2449 && strlen (reason)
2450 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
2451 /* Avoid warnings during early inline pass. */
2452 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2454 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
2455 fn, reason);
2456 warning (OPT_Winline, "called from here");
2458 goto egress;
2460 fn = cg_edge->callee->decl;
2462 #ifdef ENABLE_CHECKING
2463 if (cg_edge->callee->decl != id->dst_node->decl)
2464 verify_cgraph_node (cg_edge->callee);
2465 #endif
2467 /* We will be inlining this callee. */
2468 id->eh_region = lookup_stmt_eh_region (stmt);
2470 /* Split the block holding the CALL_EXPR. */
2471 e = split_block (bb, stmt);
2472 bb = e->src;
2473 return_block = e->dest;
2474 remove_edge (e);
2476 /* split_block splits after the statement; work around this by
2477 moving the call into the second block manually. Not pretty,
2478 but seems easier than doing the CFG manipulation by hand
2479 when the CALL_EXPR is in the last statement of BB. */
2480 stmt_bsi = bsi_last (bb);
2481 bsi_remove (&stmt_bsi, false);
2483 /* If the CALL_EXPR was in the last statement of BB, it may have
2484 been the source of abnormal edges. In this case, schedule
2485 the removal of dead abnormal edges. */
2486 bsi = bsi_start (return_block);
2487 if (bsi_end_p (bsi))
2489 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2490 purge_dead_abnormal_edges = true;
2492 else
2494 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
2495 purge_dead_abnormal_edges = false;
2498 stmt_bsi = bsi_start (return_block);
2500 /* Build a block containing code to initialize the arguments, the
2501 actual inline expansion of the body, and a label for the return
2502 statements within the function to jump to. The type of the
2503 statement expression is the return type of the function call. */
2504 id->block = make_node (BLOCK);
2505 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
2506 BLOCK_SOURCE_LOCATION (id->block) = input_location;
2507 add_lexical_block (TREE_BLOCK (stmt), id->block);
2509 /* Local declarations will be replaced by their equivalents in this
2510 map. */
2511 st = id->decl_map;
2512 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
2513 NULL, NULL);
2515 /* Record the function we are about to inline. */
2516 id->src_fn = fn;
2517 id->src_node = cg_edge->callee;
2518 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
2520 initialize_inlined_parameters (id, t, fn, bb);
2522 if (DECL_INITIAL (fn))
2523 add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
2525 /* Return statements in the function body will be replaced by jumps
2526 to the RET_LABEL. */
2528 gcc_assert (DECL_INITIAL (fn));
2529 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
2531 /* Find the lhs to which the result of this call is assigned. */
2532 return_slot = NULL;
2533 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
2535 modify_dest = GIMPLE_STMT_OPERAND (stmt, 0);
2537 /* The function which we are inlining might not return a value,
2538 in which case we should issue a warning that the function
2539 does not return a value. In that case the optimizers will
2540 see that the variable to which the value is assigned was not
2541 initialized. We do not want to issue a warning about that
2542 uninitialized variable. */
2543 if (DECL_P (modify_dest))
2544 TREE_NO_WARNING (modify_dest) = 1;
2545 if (CALL_EXPR_RETURN_SLOT_OPT (t))
2547 return_slot = modify_dest;
2548 modify_dest = NULL;
2551 else
2552 modify_dest = NULL;
2554 /* Declare the return variable for the function. */
2555 declare_return_variable (id, return_slot,
2556 modify_dest, &use_retvar);
2558 /* This is it. Duplicate the callee body. Assume callee is
2559 pre-gimplified. Note that we must not alter the caller
2560 function in any way before this point, as this CALL_EXPR may be
2561 a self-referential call; if we're calling ourselves, we need to
2562 duplicate our body before altering anything. */
2563 copy_body (id, bb->count, bb->frequency, bb, return_block);
2565 /* Add local vars in this inlined callee to caller. */
2566 t_step = id->src_cfun->unexpanded_var_list;
2567 for (; t_step; t_step = TREE_CHAIN (t_step))
2569 var = TREE_VALUE (t_step);
2570 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2571 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2572 cfun->unexpanded_var_list);
2573 else
2574 cfun->unexpanded_var_list = tree_cons (NULL_TREE, remap_decl (var, id),
2575 cfun->unexpanded_var_list);
2578 /* Clean up. */
2579 splay_tree_delete (id->decl_map);
2580 id->decl_map = st;
2582 /* If the inlined function returns a result that we care about,
2583 clobber the CALL_EXPR with a reference to the return variable. */
2584 if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
2586 *tp = use_retvar;
2587 if (gimple_in_ssa_p (cfun))
2589 update_stmt (stmt);
2590 mark_symbols_for_renaming (stmt);
2592 maybe_clean_or_replace_eh_stmt (stmt, stmt);
2594 else
2595 /* We're modifying a TSI owned by gimple_expand_calls_inline();
2596 tsi_delink() will leave the iterator in a sane state. */
2598 /* Handle case of inlining function that miss return statement so
2599 return value becomes undefined. */
2600 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
2601 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME)
2603 tree name = TREE_OPERAND (stmt, 0);
2604 tree var = SSA_NAME_VAR (TREE_OPERAND (stmt, 0));
2605 tree def = gimple_default_def (cfun, var);
2607 /* If the variable is used undefined, make this name undefined via
2608 move. */
2609 if (def)
2611 TREE_OPERAND (stmt, 1) = def;
2612 update_stmt (stmt);
2614 /* Otherwise make this variable undefined. */
2615 else
2617 bsi_remove (&stmt_bsi, true);
2618 set_default_def (var, name);
2619 SSA_NAME_DEF_STMT (name) = build_empty_stmt ();
2622 else
2623 bsi_remove (&stmt_bsi, true);
2626 if (purge_dead_abnormal_edges)
2627 tree_purge_dead_abnormal_call_edges (return_block);
2629 /* If the value of the new expression is ignored, that's OK. We
2630 don't warn about this for CALL_EXPRs, so we shouldn't warn about
2631 the equivalent inlined version either. */
2632 TREE_USED (*tp) = 1;
2634 /* Output the inlining info for this abstract function, since it has been
2635 inlined. If we don't do this now, we can lose the information about the
2636 variables in the function when the blocks get blown away as soon as we
2637 remove the cgraph node. */
2638 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
2640 /* Update callgraph if needed. */
2641 cgraph_remove_node (cg_edge->callee);
2643 id->block = NULL_TREE;
2644 successfully_inlined = TRUE;
2646 egress:
2647 input_location = saved_location;
2648 return successfully_inlined;
2651 /* Expand call statements reachable from STMT_P.
2652 We can only have CALL_EXPRs as the "toplevel" tree code or nested
2653 in a GIMPLE_MODIFY_STMT. See tree-gimple.c:get_call_expr_in(). We can
2654 unfortunately not use that function here because we need a pointer
2655 to the CALL_EXPR, not the tree itself. */
2657 static bool
2658 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
2660 block_stmt_iterator bsi;
2662 /* Register specific tree functions. */
2663 tree_register_cfg_hooks ();
2664 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2666 tree *expr_p = bsi_stmt_ptr (bsi);
2667 tree stmt = *expr_p;
2669 if (TREE_CODE (*expr_p) == GIMPLE_MODIFY_STMT)
2670 expr_p = &GIMPLE_STMT_OPERAND (*expr_p, 1);
2671 if (TREE_CODE (*expr_p) == WITH_SIZE_EXPR)
2672 expr_p = &TREE_OPERAND (*expr_p, 0);
2673 if (TREE_CODE (*expr_p) == CALL_EXPR)
2674 if (expand_call_inline (bb, stmt, expr_p, id))
2675 return true;
2677 return false;
2680 /* Walk all basic blocks created after FIRST and try to fold every statement
2681 in the STATEMENTS pointer set. */
2682 static void
2683 fold_marked_statements (int first, struct pointer_set_t *statements)
2685 for (;first < n_basic_blocks;first++)
2686 if (BASIC_BLOCK (first))
2688 block_stmt_iterator bsi;
2689 for (bsi = bsi_start (BASIC_BLOCK (first));
2690 !bsi_end_p (bsi); bsi_next (&bsi))
2691 if (pointer_set_contains (statements, bsi_stmt (bsi)))
2693 tree old_stmt = bsi_stmt (bsi);
2694 if (fold_stmt (bsi_stmt_ptr (bsi)))
2696 update_stmt (bsi_stmt (bsi));
2697 if (maybe_clean_or_replace_eh_stmt (old_stmt, bsi_stmt (bsi)))
2698 tree_purge_dead_eh_edges (BASIC_BLOCK (first));
2704 /* Return true if BB has at least one abnormal outgoing edge. */
2706 static inline bool
2707 has_abnormal_outgoing_edge_p (basic_block bb)
2709 edge e;
2710 edge_iterator ei;
2712 FOR_EACH_EDGE (e, ei, bb->succs)
2713 if (e->flags & EDGE_ABNORMAL)
2714 return true;
2716 return false;
2719 /* When a block from the inlined function contains a call with side-effects
2720 in the middle gets inlined in a function with non-locals labels, the call
2721 becomes a potential non-local goto so we need to add appropriate edge. */
2723 static void
2724 make_nonlocal_label_edges (void)
2726 block_stmt_iterator bsi;
2727 basic_block bb;
2729 FOR_EACH_BB (bb)
2731 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2733 tree stmt = bsi_stmt (bsi);
2734 if (tree_can_make_abnormal_goto (stmt))
2736 if (stmt == bsi_stmt (bsi_last (bb)))
2738 if (!has_abnormal_outgoing_edge_p (bb))
2739 make_abnormal_goto_edges (bb, true);
2741 else
2743 edge e = split_block (bb, stmt);
2744 bb = e->src;
2745 make_abnormal_goto_edges (bb, true);
2747 break;
2750 /* Update PHIs on nonlocal goto receivers we (possibly)
2751 just created new edges into. */
2752 if (TREE_CODE (stmt) == LABEL_EXPR
2753 && gimple_in_ssa_p (cfun))
2755 tree target = LABEL_EXPR_LABEL (stmt);
2756 if (DECL_NONLOCAL (target))
2758 tree phi;
2760 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2762 gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
2763 (PHI_RESULT (phi)));
2764 mark_sym_for_renaming
2765 (SSA_NAME_VAR (PHI_RESULT (phi)));
2773 /* Expand calls to inline functions in the body of FN. */
2775 unsigned int
2776 optimize_inline_calls (tree fn)
2778 copy_body_data id;
2779 tree prev_fn;
2780 basic_block bb;
2781 int last = n_basic_blocks;
2782 /* There is no point in performing inlining if errors have already
2783 occurred -- and we might crash if we try to inline invalid
2784 code. */
2785 if (errorcount || sorrycount)
2786 return 0;
2788 /* Clear out ID. */
2789 memset (&id, 0, sizeof (id));
2791 id.src_node = id.dst_node = cgraph_node (fn);
2792 id.dst_fn = fn;
2793 /* Or any functions that aren't finished yet. */
2794 prev_fn = NULL_TREE;
2795 if (current_function_decl)
2797 id.dst_fn = current_function_decl;
2798 prev_fn = current_function_decl;
2801 id.copy_decl = copy_decl_maybe_to_var;
2802 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2803 id.transform_new_cfg = false;
2804 id.transform_return_to_modify = true;
2805 id.transform_lang_insert_block = false;
2806 id.statements_to_fold = pointer_set_create ();
2808 push_gimplify_context ();
2810 /* Reach the trees by walking over the CFG, and note the
2811 enclosing basic-blocks in the call edges. */
2812 /* We walk the blocks going forward, because inlined function bodies
2813 will split id->current_basic_block, and the new blocks will
2814 follow it; we'll trudge through them, processing their CALL_EXPRs
2815 along the way. */
2816 FOR_EACH_BB (bb)
2817 gimple_expand_calls_inline (bb, &id);
2819 pop_gimplify_context (NULL);
2820 /* Renumber the (code) basic_blocks consecutively. */
2821 compact_blocks ();
2822 /* Renumber the lexical scoping (non-code) blocks consecutively. */
2823 number_blocks (fn);
2825 #ifdef ENABLE_CHECKING
2827 struct cgraph_edge *e;
2829 verify_cgraph_node (id.dst_node);
2831 /* Double check that we inlined everything we are supposed to inline. */
2832 for (e = id.dst_node->callees; e; e = e->next_callee)
2833 gcc_assert (e->inline_failed);
2835 #endif
2837 /* We are not going to maintain the cgraph edges up to date.
2838 Kill it so it won't confuse us. */
2839 cgraph_node_remove_callees (id.dst_node);
2841 fold_marked_statements (last, id.statements_to_fold);
2842 pointer_set_destroy (id.statements_to_fold);
2843 fold_cond_expr_cond ();
2844 if (current_function_has_nonlocal_label)
2845 make_nonlocal_label_edges ();
2846 /* We make no attempts to keep dominance info up-to-date. */
2847 free_dominance_info (CDI_DOMINATORS);
2848 free_dominance_info (CDI_POST_DOMINATORS);
2849 /* It would be nice to check SSA/CFG/statement consistency here, but it is
2850 not possible yet - the IPA passes might make various functions to not
2851 throw and they don't care to proactively update local EH info. This is
2852 done later in fixup_cfg pass that also execute the verification. */
2853 return (TODO_update_ssa | TODO_cleanup_cfg
2854 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
2855 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
2858 /* FN is a function that has a complete body, and CLONE is a function whose
2859 body is to be set to a copy of FN, mapping argument declarations according
2860 to the ARG_MAP splay_tree. */
2862 void
2863 clone_body (tree clone, tree fn, void *arg_map)
2865 copy_body_data id;
2867 /* Clone the body, as if we were making an inline call. But, remap the
2868 parameters in the callee to the parameters of caller. */
2869 memset (&id, 0, sizeof (id));
2870 id.src_fn = fn;
2871 id.dst_fn = clone;
2872 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
2873 id.decl_map = (splay_tree)arg_map;
2875 id.copy_decl = copy_decl_no_change;
2876 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2877 id.transform_new_cfg = true;
2878 id.transform_return_to_modify = false;
2879 id.transform_lang_insert_block = true;
2881 /* We're not inside any EH region. */
2882 id.eh_region = -1;
2884 /* Actually copy the body. */
2885 append_to_statement_list_force (copy_generic_body (&id), &DECL_SAVED_TREE (clone));
2888 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2890 tree
2891 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2893 enum tree_code code = TREE_CODE (*tp);
2894 enum tree_code_class cl = TREE_CODE_CLASS (code);
2896 /* We make copies of most nodes. */
2897 if (IS_EXPR_CODE_CLASS (cl)
2898 || IS_GIMPLE_STMT_CODE_CLASS (cl)
2899 || code == TREE_LIST
2900 || code == TREE_VEC
2901 || code == TYPE_DECL
2902 || code == OMP_CLAUSE)
2904 /* Because the chain gets clobbered when we make a copy, we save it
2905 here. */
2906 tree chain = NULL_TREE, new;
2908 if (!GIMPLE_TUPLE_P (*tp))
2909 chain = TREE_CHAIN (*tp);
2911 /* Copy the node. */
2912 new = copy_node (*tp);
2914 /* Propagate mudflap marked-ness. */
2915 if (flag_mudflap && mf_marked_p (*tp))
2916 mf_mark (new);
2918 *tp = new;
2920 /* Now, restore the chain, if appropriate. That will cause
2921 walk_tree to walk into the chain as well. */
2922 if (code == PARM_DECL
2923 || code == TREE_LIST
2924 || code == OMP_CLAUSE)
2925 TREE_CHAIN (*tp) = chain;
2927 /* For now, we don't update BLOCKs when we make copies. So, we
2928 have to nullify all BIND_EXPRs. */
2929 if (TREE_CODE (*tp) == BIND_EXPR)
2930 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2932 else if (code == CONSTRUCTOR)
2934 /* CONSTRUCTOR nodes need special handling because
2935 we need to duplicate the vector of elements. */
2936 tree new;
2938 new = copy_node (*tp);
2940 /* Propagate mudflap marked-ness. */
2941 if (flag_mudflap && mf_marked_p (*tp))
2942 mf_mark (new);
2944 CONSTRUCTOR_ELTS (new) = VEC_copy (constructor_elt, gc,
2945 CONSTRUCTOR_ELTS (*tp));
2946 *tp = new;
2948 else if (TREE_CODE_CLASS (code) == tcc_type)
2949 *walk_subtrees = 0;
2950 else if (TREE_CODE_CLASS (code) == tcc_declaration)
2951 *walk_subtrees = 0;
2952 else if (TREE_CODE_CLASS (code) == tcc_constant)
2953 *walk_subtrees = 0;
2954 else
2955 gcc_assert (code != STATEMENT_LIST);
2956 return NULL_TREE;
2959 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2960 information indicating to what new SAVE_EXPR this one should be mapped,
2961 use that one. Otherwise, create a new node and enter it in ST. FN is
2962 the function into which the copy will be placed. */
2964 static void
2965 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2967 splay_tree st = (splay_tree) st_;
2968 splay_tree_node n;
2969 tree t;
2971 /* See if we already encountered this SAVE_EXPR. */
2972 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2974 /* If we didn't already remap this SAVE_EXPR, do so now. */
2975 if (!n)
2977 t = copy_node (*tp);
2979 /* Remember this SAVE_EXPR. */
2980 splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2981 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2982 splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2984 else
2986 /* We've already walked into this SAVE_EXPR; don't do it again. */
2987 *walk_subtrees = 0;
2988 t = (tree) n->value;
2991 /* Replace this SAVE_EXPR with the copy. */
2992 *tp = t;
2995 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
2996 copies the declaration and enters it in the splay_tree in DATA (which is
2997 really an `copy_body_data *'). */
2999 static tree
3000 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
3001 void *data)
3003 copy_body_data *id = (copy_body_data *) data;
3005 /* Don't walk into types. */
3006 if (TYPE_P (*tp))
3007 *walk_subtrees = 0;
3009 else if (TREE_CODE (*tp) == LABEL_EXPR)
3011 tree decl = TREE_OPERAND (*tp, 0);
3013 /* Copy the decl and remember the copy. */
3014 insert_decl_map (id, decl, id->copy_decl (decl, id));
3017 return NULL_TREE;
3020 /* Perform any modifications to EXPR required when it is unsaved. Does
3021 not recurse into EXPR's subtrees. */
3023 static void
3024 unsave_expr_1 (tree expr)
3026 switch (TREE_CODE (expr))
3028 case TARGET_EXPR:
3029 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
3030 It's OK for this to happen if it was part of a subtree that
3031 isn't immediately expanded, such as operand 2 of another
3032 TARGET_EXPR. */
3033 if (TREE_OPERAND (expr, 1))
3034 break;
3036 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
3037 TREE_OPERAND (expr, 3) = NULL_TREE;
3038 break;
3040 default:
3041 break;
3045 /* Called via walk_tree when an expression is unsaved. Using the
3046 splay_tree pointed to by ST (which is really a `splay_tree'),
3047 remaps all local declarations to appropriate replacements. */
3049 static tree
3050 unsave_r (tree *tp, int *walk_subtrees, void *data)
3052 copy_body_data *id = (copy_body_data *) data;
3053 splay_tree st = id->decl_map;
3054 splay_tree_node n;
3056 /* Only a local declaration (variable or label). */
3057 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
3058 || TREE_CODE (*tp) == LABEL_DECL)
3060 /* Lookup the declaration. */
3061 n = splay_tree_lookup (st, (splay_tree_key) *tp);
3063 /* If it's there, remap it. */
3064 if (n)
3065 *tp = (tree) n->value;
3068 else if (TREE_CODE (*tp) == STATEMENT_LIST)
3069 copy_statement_list (tp);
3070 else if (TREE_CODE (*tp) == BIND_EXPR)
3071 copy_bind_expr (tp, walk_subtrees, id);
3072 else if (TREE_CODE (*tp) == SAVE_EXPR)
3073 remap_save_expr (tp, st, walk_subtrees);
3074 else
3076 copy_tree_r (tp, walk_subtrees, NULL);
3078 /* Do whatever unsaving is required. */
3079 unsave_expr_1 (*tp);
3082 /* Keep iterating. */
3083 return NULL_TREE;
3086 /* Copies everything in EXPR and replaces variables, labels
3087 and SAVE_EXPRs local to EXPR. */
3089 tree
3090 unsave_expr_now (tree expr)
3092 copy_body_data id;
3094 /* There's nothing to do for NULL_TREE. */
3095 if (expr == 0)
3096 return expr;
3098 /* Set up ID. */
3099 memset (&id, 0, sizeof (id));
3100 id.src_fn = current_function_decl;
3101 id.dst_fn = current_function_decl;
3102 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
3104 id.copy_decl = copy_decl_no_change;
3105 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3106 id.transform_new_cfg = false;
3107 id.transform_return_to_modify = false;
3108 id.transform_lang_insert_block = false;
3110 /* Walk the tree once to find local labels. */
3111 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
3113 /* Walk the tree again, copying, remapping, and unsaving. */
3114 walk_tree (&expr, unsave_r, &id, NULL);
3116 /* Clean up. */
3117 splay_tree_delete (id.decl_map);
3119 return expr;
3122 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
3124 static tree
3125 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
3127 if (*tp == data)
3128 return (tree) data;
3129 else
3130 return NULL;
3133 bool
3134 debug_find_tree (tree top, tree search)
3136 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
3140 /* Declare the variables created by the inliner. Add all the variables in
3141 VARS to BIND_EXPR. */
3143 static void
3144 declare_inline_vars (tree block, tree vars)
3146 tree t;
3147 for (t = vars; t; t = TREE_CHAIN (t))
3149 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
3150 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
3151 cfun->unexpanded_var_list =
3152 tree_cons (NULL_TREE, t,
3153 cfun->unexpanded_var_list);
3156 if (block)
3157 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
3161 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
3162 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
3163 VAR_DECL translation. */
3165 static tree
3166 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
3168 /* Don't generate debug information for the copy if we wouldn't have
3169 generated it for the copy either. */
3170 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
3171 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
3173 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
3174 declaration inspired this copy. */
3175 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
3177 /* The new variable/label has no RTL, yet. */
3178 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
3179 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
3180 SET_DECL_RTL (copy, NULL_RTX);
3182 /* These args would always appear unused, if not for this. */
3183 TREE_USED (copy) = 1;
3185 /* Set the context for the new declaration. */
3186 if (!DECL_CONTEXT (decl))
3187 /* Globals stay global. */
3189 else if (DECL_CONTEXT (decl) != id->src_fn)
3190 /* Things that weren't in the scope of the function we're inlining
3191 from aren't in the scope we're inlining to, either. */
3193 else if (TREE_STATIC (decl))
3194 /* Function-scoped static variables should stay in the original
3195 function. */
3197 else
3198 /* Ordinary automatic local variables are now in the scope of the
3199 new function. */
3200 DECL_CONTEXT (copy) = id->dst_fn;
3202 return copy;
3205 static tree
3206 copy_decl_to_var (tree decl, copy_body_data *id)
3208 tree copy, type;
3210 gcc_assert (TREE_CODE (decl) == PARM_DECL
3211 || TREE_CODE (decl) == RESULT_DECL);
3213 type = TREE_TYPE (decl);
3215 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
3216 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
3217 TREE_READONLY (copy) = TREE_READONLY (decl);
3218 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
3219 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
3221 return copy_decl_for_dup_finish (id, decl, copy);
3224 /* Like copy_decl_to_var, but create a return slot object instead of a
3225 pointer variable for return by invisible reference. */
3227 static tree
3228 copy_result_decl_to_var (tree decl, copy_body_data *id)
3230 tree copy, type;
3232 gcc_assert (TREE_CODE (decl) == PARM_DECL
3233 || TREE_CODE (decl) == RESULT_DECL);
3235 type = TREE_TYPE (decl);
3236 if (DECL_BY_REFERENCE (decl))
3237 type = TREE_TYPE (type);
3239 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
3240 TREE_READONLY (copy) = TREE_READONLY (decl);
3241 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
3242 if (!DECL_BY_REFERENCE (decl))
3244 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
3245 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
3248 return copy_decl_for_dup_finish (id, decl, copy);
3252 static tree
3253 copy_decl_no_change (tree decl, copy_body_data *id)
3255 tree copy;
3257 copy = copy_node (decl);
3259 /* The COPY is not abstract; it will be generated in DST_FN. */
3260 DECL_ABSTRACT (copy) = 0;
3261 lang_hooks.dup_lang_specific_decl (copy);
3263 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
3264 been taken; it's for internal bookkeeping in expand_goto_internal. */
3265 if (TREE_CODE (copy) == LABEL_DECL)
3267 TREE_ADDRESSABLE (copy) = 0;
3268 LABEL_DECL_UID (copy) = -1;
3271 return copy_decl_for_dup_finish (id, decl, copy);
3274 static tree
3275 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
3277 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
3278 return copy_decl_to_var (decl, id);
3279 else
3280 return copy_decl_no_change (decl, id);
3283 /* Return a copy of the function's argument tree. */
3284 static tree
3285 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id)
3287 tree *arg_copy, *parg;
3289 arg_copy = &orig_parm;
3290 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
3292 tree new = remap_decl (*parg, id);
3293 lang_hooks.dup_lang_specific_decl (new);
3294 TREE_CHAIN (new) = TREE_CHAIN (*parg);
3295 *parg = new;
3297 return orig_parm;
3300 /* Return a copy of the function's static chain. */
3301 static tree
3302 copy_static_chain (tree static_chain, copy_body_data * id)
3304 tree *chain_copy, *pvar;
3306 chain_copy = &static_chain;
3307 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
3309 tree new = remap_decl (*pvar, id);
3310 lang_hooks.dup_lang_specific_decl (new);
3311 TREE_CHAIN (new) = TREE_CHAIN (*pvar);
3312 *pvar = new;
3314 return static_chain;
3317 /* Return true if the function is allowed to be versioned.
3318 This is a guard for the versioning functionality. */
3319 bool
3320 tree_versionable_function_p (tree fndecl)
3322 if (fndecl == NULL_TREE)
3323 return false;
3324 /* ??? There are cases where a function is
3325 uninlinable but can be versioned. */
3326 if (!tree_inlinable_function_p (fndecl))
3327 return false;
3329 return true;
3332 /* Create a copy of a function's tree.
3333 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
3334 of the original function and the new copied function
3335 respectively. In case we want to replace a DECL
3336 tree with another tree while duplicating the function's
3337 body, TREE_MAP represents the mapping between these
3338 trees. If UPDATE_CLONES is set, the call_stmt fields
3339 of edges of clones of the function will be updated. */
3340 void
3341 tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
3342 bool update_clones)
3344 struct cgraph_node *old_version_node;
3345 struct cgraph_node *new_version_node;
3346 copy_body_data id;
3347 tree p;
3348 unsigned i;
3349 struct ipa_replace_map *replace_info;
3350 basic_block old_entry_block;
3351 tree t_step;
3352 tree old_current_function_decl = current_function_decl;
3354 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
3355 && TREE_CODE (new_decl) == FUNCTION_DECL);
3356 DECL_POSSIBLY_INLINED (old_decl) = 1;
3358 old_version_node = cgraph_node (old_decl);
3359 new_version_node = cgraph_node (new_decl);
3361 DECL_ARTIFICIAL (new_decl) = 1;
3362 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
3364 /* Prepare the data structures for the tree copy. */
3365 memset (&id, 0, sizeof (id));
3367 /* Generate a new name for the new version. */
3368 if (!update_clones)
3370 DECL_NAME (new_decl) = create_tmp_var_name (NULL);
3371 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
3372 SET_DECL_RTL (new_decl, NULL_RTX);
3373 id.statements_to_fold = pointer_set_create ();
3376 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
3377 id.src_fn = old_decl;
3378 id.dst_fn = new_decl;
3379 id.src_node = old_version_node;
3380 id.dst_node = new_version_node;
3381 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
3383 id.copy_decl = copy_decl_no_change;
3384 id.transform_call_graph_edges
3385 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
3386 id.transform_new_cfg = true;
3387 id.transform_return_to_modify = false;
3388 id.transform_lang_insert_block = false;
3390 current_function_decl = new_decl;
3391 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
3392 (DECL_STRUCT_FUNCTION (old_decl));
3393 initialize_cfun (new_decl, old_decl,
3394 old_entry_block->count,
3395 old_entry_block->frequency);
3396 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
3398 /* Copy the function's static chain. */
3399 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
3400 if (p)
3401 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
3402 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
3403 &id);
3404 /* Copy the function's arguments. */
3405 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
3406 DECL_ARGUMENTS (new_decl) =
3407 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id);
3409 /* If there's a tree_map, prepare for substitution. */
3410 if (tree_map)
3411 for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
3413 replace_info = VARRAY_GENERIC_PTR (tree_map, i);
3414 if (replace_info->replace_p)
3415 insert_decl_map (&id, replace_info->old_tree,
3416 replace_info->new_tree);
3419 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
3421 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3422 number_blocks (id.dst_fn);
3424 if (DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list != NULL_TREE)
3425 /* Add local vars. */
3426 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list;
3427 t_step; t_step = TREE_CHAIN (t_step))
3429 tree var = TREE_VALUE (t_step);
3430 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3431 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
3432 cfun->unexpanded_var_list);
3433 else
3434 cfun->unexpanded_var_list =
3435 tree_cons (NULL_TREE, remap_decl (var, &id),
3436 cfun->unexpanded_var_list);
3439 /* Copy the Function's body. */
3440 copy_body (&id, old_entry_block->count, old_entry_block->frequency, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
3442 if (DECL_RESULT (old_decl) != NULL_TREE)
3444 tree *res_decl = &DECL_RESULT (old_decl);
3445 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
3446 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
3449 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3450 number_blocks (new_decl);
3452 /* Clean up. */
3453 splay_tree_delete (id.decl_map);
3454 if (!update_clones)
3456 fold_marked_statements (0, id.statements_to_fold);
3457 pointer_set_destroy (id.statements_to_fold);
3458 fold_cond_expr_cond ();
3460 if (gimple_in_ssa_p (cfun))
3462 free_dominance_info (CDI_DOMINATORS);
3463 free_dominance_info (CDI_POST_DOMINATORS);
3464 if (!update_clones)
3465 delete_unreachable_blocks ();
3466 update_ssa (TODO_update_ssa);
3467 if (!update_clones)
3469 fold_cond_expr_cond ();
3470 if (need_ssa_update_p ())
3471 update_ssa (TODO_update_ssa);
3474 free_dominance_info (CDI_DOMINATORS);
3475 free_dominance_info (CDI_POST_DOMINATORS);
3476 pop_cfun ();
3477 current_function_decl = old_current_function_decl;
3478 gcc_assert (!current_function_decl
3479 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
3480 return;
3483 /* Duplicate a type, fields and all. */
3485 tree
3486 build_duplicate_type (tree type)
3488 struct copy_body_data id;
3490 memset (&id, 0, sizeof (id));
3491 id.src_fn = current_function_decl;
3492 id.dst_fn = current_function_decl;
3493 id.src_cfun = cfun;
3494 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
3496 type = remap_type_1 (type, &id);
3498 splay_tree_delete (id.decl_map);
3500 return type;