2 Copyright 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
24 #include "coretypes.h"
28 #include "tree-inline.h"
34 #include "insn-config.h"
37 #include "splay-tree.h"
38 #include "langhooks.h"
39 #include "basic-block.h"
40 #include "tree-iterator.h"
43 #include "tree-mudflap.h"
44 #include "tree-flow.h"
47 #include "tree-flow.h"
48 #include "diagnostic.h"
51 #include "pointer-set.h"
53 #include "tree-pass.h"
55 /* I'm not real happy about this, but we need to handle gimple and
57 #include "tree-gimple.h"
59 /* Inlining, Saving, Cloning
61 Inlining: a function body is duplicated, but the PARM_DECLs are
62 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
63 MODIFY_EXPRs that store to a dedicated returned-value variable.
64 The duplicated eh_region info of the copy will later be appended
65 to the info for the caller; the eh_region info in copied throwing
66 statements and RESX_EXPRs is adjusted accordingly.
68 Saving: make a semantically-identical copy of the function body.
69 Necessary when we want to generate code for the body (a destructive
70 operation), but we expect to need this body in the future (e.g. for
71 inlining into another function).
73 Cloning: (only in C++) We have one body for a con/de/structor, and
74 multiple function decls, each with a unique parameter list.
75 Duplicate the body, using the given splay tree; some parameters
76 will become constants (like 0 or 1).
78 All of these will simultaneously lookup any callgraph edges. If
79 we're going to inline the duplicated function body, and the given
80 function has some cloned callgraph nodes (one for each place this
81 function will be inlined) those callgraph edges will be duplicated.
82 If we're saving or cloning the body, those callgraph edges will be
83 updated to point into the new body. (Note that the original
84 callgraph node and edge list will not be altered.)
86 See the CALL_EXPR handling case in copy_body_r (). */
88 /* 0 if we should not perform inlining.
89 1 if we should expand functions calls inline at the tree level.
90 2 if we should consider *all* functions to be inline
93 int flag_inline_trees
= 0;
97 o In order to make inlining-on-trees work, we pessimized
98 function-local static constants. In particular, they are now
99 always output, even when not addressed. Fix this by treating
100 function-local static constants just like global static
101 constants; the back-end already knows not to output them if they
104 o Provide heuristics to clamp inlining of recursive template
107 /* Data required for function inlining. */
109 typedef struct inline_data
111 /* FUNCTION_DECL for function being inlined. */
113 /* FUNCTION_DECL for function being inlined into. */
115 /* struct function for function being inlined. */
116 struct function
*callee_cfun
;
117 /* The VAR_DECL for the return value. */
119 /* The map from local declarations in the inlined function to
120 equivalents in the function into which it is being inlined. */
122 /* We use the same mechanism to build clones that we do to perform
123 inlining. However, there are a few places where we need to
124 distinguish between those two situations. This flag is true if
125 we are cloning, rather than inlining. */
127 /* Versioning function is slightly different from inlining. */
129 bool update_clones_p
;
130 /* Callgraph node of function we are inlining into. */
131 struct cgraph_node
*node
;
132 /* Callgraph node of currently inlined function. */
133 struct cgraph_node
*current_node
;
136 varray_type ipa_info
;
137 /* Exception region the inlined call lie in. */
139 /* Take region number in the function being copied, add this value and
140 get eh region number of the duplicate in the function we inline into. */
141 int eh_region_offset
;
143 /* In early inlining the warnings are supressed. */
144 bool early_inlining_p
;
149 static tree
declare_return_variable (inline_data
*, tree
, tree
, tree
*);
150 static tree
copy_body_r (tree
*, int *, void *);
151 static tree
copy_generic_body (inline_data
*);
152 static bool inlinable_function_p (tree
);
153 static tree
remap_decl (tree
, inline_data
*);
154 static tree
remap_type (tree
, inline_data
*);
155 static void remap_block (tree
*, inline_data
*);
156 static tree
remap_decl (tree
, inline_data
*);
157 static tree
remap_decls (tree
, inline_data
*);
158 static void copy_bind_expr (tree
*, int *, inline_data
*);
159 static tree
mark_local_for_remap_r (tree
*, int *, void *);
160 static void unsave_expr_1 (tree
);
161 static tree
unsave_r (tree
*, int *, void *);
162 static void declare_inline_vars (tree
, tree
);
163 static void remap_save_expr (tree
*, void *, int *);
164 static bool replace_ref_tree (inline_data
*, tree
*);
165 static inline bool inlining_p (inline_data
*);
166 static void add_lexical_block (tree current_block
, tree new_block
);
168 /* Insert a tree->tree mapping for ID. Despite the name suggests
169 that the trees should be variables, it is used for more than that. */
172 insert_decl_map (inline_data
*id
, tree key
, tree value
)
174 splay_tree_insert (id
->decl_map
, (splay_tree_key
) key
,
175 (splay_tree_value
) value
);
177 /* Always insert an identity map as well. If we see this same new
178 node again, we won't want to duplicate it a second time. */
180 splay_tree_insert (id
->decl_map
, (splay_tree_key
) value
,
181 (splay_tree_value
) value
);
184 /* Construct new SSA name for old one. */
187 remap_ssa_name (tree name
, inline_data
*id
)
192 gcc_assert (TREE_CODE (name
) == SSA_NAME
);
194 n
= splay_tree_lookup (id
->decl_map
, (splay_tree_key
) name
);
196 return (tree
) n
->value
;
198 /* Do not set DEF_STMT yet as statement might not get copied. */
199 new = remap_decl (SSA_NAME_VAR (name
), id
);
200 /* We might've substituted constant or another SSA_NAME for the variable.
202 if ((TREE_CODE (new) == VAR_DECL
|| TREE_CODE (new) == PARM_DECL
)
203 /* Forcingly coalesce all SSA names for return values so we don't need to
204 construct PHI node for possibly multiple return statements by hand. */
205 && (TREE_CODE (SSA_NAME_VAR (name
)) != RESULT_DECL
|| !inlining_p (id
)))
207 new = make_ssa_name (new, NULL
);
208 insert_decl_map (id
, name
, new);
209 if (IS_EMPTY_STMT (SSA_NAME_DEF_STMT (name
))
210 /* When inlining, parameters are replaced by initialized vars. */
211 && (TREE_CODE (new) == PARM_DECL
|| TREE_CODE (name
) != PARM_DECL
))
213 SSA_NAME_DEF_STMT (new) = build_empty_stmt ();
214 if (default_def_fn (id
->callee_cfun
, SSA_NAME_VAR (name
)) == name
)
215 set_default_def (SSA_NAME_VAR (new), new);
217 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new)
218 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
);
221 insert_decl_map (id
, name
, new);
222 TREE_TYPE (new) = remap_type (TREE_TYPE (name
), id
);
227 /* Remap DECL during the copying of the BLOCK tree for the function. */
230 remap_decl (tree decl
, inline_data
*id
)
235 /* We only remap local variables in the current function. */
238 /* See if we have remapped this declaration. */
240 n
= splay_tree_lookup (id
->decl_map
, (splay_tree_key
) decl
);
242 /* If we didn't already have an equivalent for this declaration,
246 /* Make a copy of the variable or label. */
248 t
= copy_decl_for_dup (decl
, fn
, id
->caller
, id
->versioning_p
);
250 /* Remember it, so that if we encounter this local entity again
251 we can reuse this copy. Do this early because remap_type may
252 need this decl for TYPE_STUB_DECL. */
253 insert_decl_map (id
, decl
, t
);
255 /* Remap types, if necessary. */
256 TREE_TYPE (t
) = remap_type (TREE_TYPE (t
), id
);
257 if (TREE_CODE (t
) == TYPE_DECL
)
258 DECL_ORIGINAL_TYPE (t
) = remap_type (DECL_ORIGINAL_TYPE (t
), id
);
260 /* Remap sizes as necessary. */
261 walk_tree (&DECL_SIZE (t
), copy_body_r
, id
, NULL
);
262 walk_tree (&DECL_SIZE_UNIT (t
), copy_body_r
, id
, NULL
);
264 /* If fields, do likewise for offset and qualifier. */
265 if (TREE_CODE (t
) == FIELD_DECL
)
267 walk_tree (&DECL_FIELD_OFFSET (t
), copy_body_r
, id
, NULL
);
268 if (TREE_CODE (DECL_CONTEXT (t
)) == QUAL_UNION_TYPE
)
269 walk_tree (&DECL_QUALIFIER (t
), copy_body_r
, id
, NULL
);
273 /* FIXME handle anon aggrs. */
274 if (! DECL_NAME (t
) && TREE_TYPE (t
)
275 && lang_hooks
.tree_inlining
.anon_aggr_type_p (TREE_TYPE (t
)))
277 /* For a VAR_DECL of anonymous type, we must also copy the
278 member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS. */
282 for (src
= DECL_ANON_UNION_ELEMS (t
); src
;
283 src
= TREE_CHAIN (src
))
285 tree member
= remap_decl (TREE_VALUE (src
), id
);
287 gcc_assert (!TREE_PURPOSE (src
));
288 members
= tree_cons (NULL
, member
, members
);
290 DECL_ANON_UNION_ELEMS (t
) = nreverse (members
);
294 /* Remember it, so that if we encounter this local entity
295 again we can reuse this copy. */
296 insert_decl_map (id
, decl
, t
);
299 && (TREE_CODE (t
) == VAR_DECL
300 || TREE_CODE (t
) == RESULT_DECL
|| TREE_CODE (t
) == PARM_DECL
))
305 if (TREE_CODE (decl
) != PARM_DECL
306 && (def
= default_def_fn (id
->callee_cfun
, decl
)))
308 tree map
= remap_ssa_name (def
, id
);
309 /* Watch out RESULT_DECLs whose SSA names map directly to them.
311 if (TREE_CODE (map
) == SSA_NAME
)
312 set_default_def (t
, map
);
314 add_referenced_tmp_var (t
);
319 return unshare_expr ((tree
) n
->value
);
323 remap_type_1 (tree type
, inline_data
*id
)
327 /* We do need a copy. build and register it now. If this is a pointer or
328 reference type, remap the designated type and make a new pointer or
330 if (TREE_CODE (type
) == POINTER_TYPE
)
332 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type
), id
),
334 TYPE_REF_CAN_ALIAS_ALL (type
));
335 insert_decl_map (id
, type
, new);
338 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
340 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type
), id
),
342 TYPE_REF_CAN_ALIAS_ALL (type
));
343 insert_decl_map (id
, type
, new);
347 new = copy_node (type
);
349 insert_decl_map (id
, type
, new);
351 /* This is a new type, not a copy of an old type. Need to reassociate
352 variants. We can handle everything except the main variant lazily. */
353 t
= TYPE_MAIN_VARIANT (type
);
356 t
= remap_type (t
, id
);
357 TYPE_MAIN_VARIANT (new) = t
;
358 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t
);
359 TYPE_NEXT_VARIANT (t
) = new;
363 TYPE_MAIN_VARIANT (new) = new;
364 TYPE_NEXT_VARIANT (new) = NULL
;
367 if (TYPE_STUB_DECL (type
))
368 TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type
), id
);
370 /* Lazily create pointer and reference types. */
371 TYPE_POINTER_TO (new) = NULL
;
372 TYPE_REFERENCE_TO (new) = NULL
;
374 switch (TREE_CODE (new))
381 t
= TYPE_MIN_VALUE (new);
382 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
383 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r
, id
, NULL
);
385 t
= TYPE_MAX_VALUE (new);
386 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
387 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r
, id
, NULL
);
391 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id
);
392 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r
, id
, NULL
);
396 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id
);
397 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id
);
402 case QUAL_UNION_TYPE
:
406 for (f
= TYPE_FIELDS (new); f
; f
= TREE_CHAIN (f
))
408 t
= remap_decl (f
, id
);
409 DECL_CONTEXT (t
) = new;
413 TYPE_FIELDS (new) = nreverse (nf
);
419 /* Shouldn't have been thought variable sized. */
423 walk_tree (&TYPE_SIZE (new), copy_body_r
, id
, NULL
);
424 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r
, id
, NULL
);
430 remap_type (tree type
, inline_data
*id
)
432 splay_tree_node node
;
437 /* See if we have remapped this type. */
438 node
= splay_tree_lookup (id
->decl_map
, (splay_tree_key
) type
);
440 return (tree
) node
->value
;
442 /* The type only needs remapping if it's variably modified. */
443 if (! variably_modified_type_p (type
, id
->callee
))
445 insert_decl_map (id
, type
, type
);
449 return remap_type_1 (type
, id
);
453 remap_decls (tree decls
, inline_data
*id
)
456 tree new_decls
= NULL_TREE
;
458 /* Remap its variables. */
459 for (old_var
= decls
; old_var
; old_var
= TREE_CHAIN (old_var
))
463 /* We can not chain the local static declarations into the unexpanded_var_list
464 as we can't duplicate them or break one decl rule. Go ahead and link
465 them into unexpanded_var_list. */
466 if (!lang_hooks
.tree_inlining
.auto_var_in_fn_p (old_var
, id
->callee
)
467 && !DECL_EXTERNAL (old_var
))
469 cfun
->unexpanded_var_list
= tree_cons (NULL_TREE
, old_var
,
470 cfun
->unexpanded_var_list
);
474 /* Remap the variable. */
475 new_var
= remap_decl (old_var
, id
);
477 /* If we didn't remap this variable, so we can't mess with its
478 TREE_CHAIN. If we remapped this variable to the return slot, it's
479 already declared somewhere else, so don't declare it here. */
480 if (!new_var
|| new_var
== id
->retvar
)
484 gcc_assert (DECL_P (new_var
));
485 TREE_CHAIN (new_var
) = new_decls
;
490 return nreverse (new_decls
);
493 /* Copy the BLOCK to contain remapped versions of the variables
494 therein. And hook the new block into the block-tree. */
497 remap_block (tree
*block
, inline_data
*id
)
503 /* Make the new block. */
505 new_block
= make_node (BLOCK
);
506 TREE_USED (new_block
) = TREE_USED (old_block
);
507 BLOCK_ABSTRACT_ORIGIN (new_block
) = old_block
;
508 BLOCK_SOURCE_LOCATION (new_block
) = BLOCK_SOURCE_LOCATION (old_block
);
511 /* Remap its variables. */
512 BLOCK_VARS (new_block
) = remap_decls (BLOCK_VARS (old_block
), id
);
516 /* We're building a clone; DECL_INITIAL is still
517 error_mark_node, and current_binding_level is the parm
519 lang_hooks
.decls
.insert_block (new_block
);
520 /* Remember the remapped block. */
521 insert_decl_map (id
, old_block
, new_block
);
524 /* Copy the whole block tree and root it in id->block. */
526 remap_blocks (tree block
, inline_data
*id
)
534 remap_block (&new, id
);
535 gcc_assert (new != block
);
536 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
537 add_lexical_block (new, remap_blocks (t
, id
));
542 copy_statement_list (tree
*tp
)
544 tree_stmt_iterator oi
, ni
;
547 new = alloc_stmt_list ();
548 ni
= tsi_start (new);
549 oi
= tsi_start (*tp
);
552 for (; !tsi_end_p (oi
); tsi_next (&oi
))
553 tsi_link_after (&ni
, tsi_stmt (oi
), TSI_NEW_STMT
);
557 copy_bind_expr (tree
*tp
, int *walk_subtrees
, inline_data
*id
)
559 tree block
= BIND_EXPR_BLOCK (*tp
);
560 /* Copy (and replace) the statement. */
561 copy_tree_r (tp
, walk_subtrees
, NULL
);
564 remap_block (&block
, id
);
565 BIND_EXPR_BLOCK (*tp
) = block
;
568 if (BIND_EXPR_VARS (*tp
))
569 /* This will remap a lot of the same decls again, but this should be
571 BIND_EXPR_VARS (*tp
) = remap_decls (BIND_EXPR_VARS (*tp
), id
);
574 /* Called from copy_body_id via walk_tree. DATA is really an
578 copy_body_r (tree
*tp
, int *walk_subtrees
, void *data
)
580 inline_data
*id
= (inline_data
*) data
;
581 tree fn
= id
->callee
;
584 /* Begin by recognizing trees that we'll completely rewrite for the
585 inlining context. Our output for these trees is completely
586 different from out input (e.g. RETURN_EXPR is deleted, and morphs
587 into an edge). Further down, we'll handle trees that get
588 duplicated and/or tweaked. */
590 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
591 GOTO_STMT with the RET_LABEL as its target. */
592 if (TREE_CODE (*tp
) == RETURN_EXPR
&& inlining_p (id
))
594 tree assignment
= TREE_OPERAND (*tp
, 0);
596 /* If we're returning something, just turn that into an
597 assignment into the equivalent of the original RESULT_DECL.
598 If the "assignment" is just the result decl, the result
599 decl has already been set (e.g. a recent "foo (&result_decl,
600 ...)"); just toss the entire RETURN_EXPR. */
601 if (assignment
&& TREE_CODE (assignment
) == MODIFY_EXPR
)
603 /* Replace the RETURN_EXPR with (a copy of) the
604 MODIFY_EXPR hanging underneath. */
605 *tp
= copy_node (assignment
);
607 else /* Else the RETURN_EXPR returns no value. */
613 else if (TREE_CODE (*tp
) == SSA_NAME
)
615 *tp
= remap_ssa_name (*tp
, id
);
620 /* Local variables and labels need to be replaced by equivalent
621 variables. We don't want to copy static variables; there's only
622 one of those, no matter how many times we inline the containing
623 function. Similarly for globals from an outer function. */
624 else if (lang_hooks
.tree_inlining
.auto_var_in_fn_p (*tp
, fn
))
628 /* Remap the declaration. */
629 new_decl
= remap_decl (*tp
, id
);
630 gcc_assert (new_decl
);
631 /* Replace this variable with the copy. */
632 STRIP_TYPE_NOPS (new_decl
);
636 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
637 copy_statement_list (tp
);
638 else if (TREE_CODE (*tp
) == SAVE_EXPR
)
639 remap_save_expr (tp
, id
->decl_map
, walk_subtrees
);
640 else if (TREE_CODE (*tp
) == LABEL_DECL
641 && (! DECL_CONTEXT (*tp
)
642 || decl_function_context (*tp
) == id
->callee
))
643 /* These may need to be remapped for EH handling. */
644 *tp
= remap_decl (*tp
, id
);
645 else if (TREE_CODE (*tp
) == BIND_EXPR
)
646 copy_bind_expr (tp
, walk_subtrees
, id
);
647 /* Types may need remapping as well. */
648 else if (TYPE_P (*tp
))
649 *tp
= remap_type (*tp
, id
);
651 /* If this is a constant, we have to copy the node iff the type will be
652 remapped. copy_tree_r will not copy a constant. */
653 else if (CONSTANT_CLASS_P (*tp
))
655 tree new_type
= remap_type (TREE_TYPE (*tp
), id
);
657 if (new_type
== TREE_TYPE (*tp
))
660 else if (TREE_CODE (*tp
) == INTEGER_CST
)
661 *tp
= build_int_cst_wide (new_type
, TREE_INT_CST_LOW (*tp
),
662 TREE_INT_CST_HIGH (*tp
));
665 *tp
= copy_node (*tp
);
666 TREE_TYPE (*tp
) = new_type
;
670 /* Otherwise, just copy the node. Note that copy_tree_r already
671 knows not to copy VAR_DECLs, etc., so this is safe. */
674 /* Here we handle trees that are not completely rewritten.
675 First we detect some inlining-induced bogosities for
677 if (TREE_CODE (*tp
) == MODIFY_EXPR
678 && TREE_OPERAND (*tp
, 0) == TREE_OPERAND (*tp
, 1)
679 && (lang_hooks
.tree_inlining
.auto_var_in_fn_p
680 (TREE_OPERAND (*tp
, 0), fn
)))
682 /* Some assignments VAR = VAR; don't generate any rtl code
683 and thus don't count as variable modification. Avoid
684 keeping bogosities like 0 = 0. */
685 tree decl
= TREE_OPERAND (*tp
, 0), value
;
688 n
= splay_tree_lookup (id
->decl_map
, (splay_tree_key
) decl
);
691 value
= (tree
) n
->value
;
692 STRIP_TYPE_NOPS (value
);
693 if (TREE_CONSTANT (value
) || TREE_READONLY_DECL_P (value
))
695 *tp
= build_empty_stmt ();
696 return copy_body_r (tp
, walk_subtrees
, data
);
700 else if (TREE_CODE (*tp
) == INDIRECT_REF
701 && !id
->versioning_p
)
703 /* Get rid of *& from inline substitutions that can happen when a
704 pointer argument is an ADDR_EXPR. */
705 tree decl
= TREE_OPERAND (*tp
, 0);
708 n
= splay_tree_lookup (id
->decl_map
, (splay_tree_key
) decl
);
712 /* If we happen to get an ADDR_EXPR in n->value, strip
713 it manually here as we'll eventually get ADDR_EXPRs
714 which lie about their types pointed to. In this case
715 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
716 but we absolutely rely on that. As fold_indirect_ref
717 does other useful transformations, try that first, though. */
718 tree type
= TREE_TYPE (TREE_TYPE ((tree
)n
->value
));
719 new = unshare_expr ((tree
)n
->value
);
720 *tp
= fold_indirect_ref_1 (type
, new);
723 if (TREE_CODE (new) == ADDR_EXPR
)
724 *tp
= TREE_OPERAND (new, 0);
726 *tp
= build1 (INDIRECT_REF
, type
, new);
733 /* Here is the "usual case". Copy this tree node, and then
734 tweak some special cases. */
735 copy_tree_r (tp
, walk_subtrees
, id
->versioning_p
? data
: NULL
);
737 /* Global variables we didn't seen yet needs to go into referenced vars.
739 if (in_ssa_p
&& TREE_CODE (*tp
) == VAR_DECL
)
740 add_referenced_tmp_var (*tp
);
742 /* If EXPR has block defined, map it to newly constructed block.
743 When inlining we want EXPRs without block appear in the block
745 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (*tp
))))
747 new_block
= id
->block
;
748 if (TREE_BLOCK (*tp
))
751 n
= splay_tree_lookup (id
->decl_map
,
752 (splay_tree_key
) TREE_BLOCK (*tp
));
754 new_block
= (tree
) n
->value
;
756 TREE_BLOCK (*tp
) = new_block
;
759 if (TREE_CODE (*tp
) == RESX_EXPR
&& id
->eh_region_offset
)
760 TREE_OPERAND (*tp
, 0) =
763 id
->eh_region_offset
+ TREE_INT_CST_LOW (TREE_OPERAND (*tp
, 0)));
765 TREE_TYPE (*tp
) = remap_type (TREE_TYPE (*tp
), id
);
767 /* The copied TARGET_EXPR has never been expanded, even if the
768 original node was expanded already. */
769 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
771 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
772 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
775 /* Variable substitution need not be simple. In particular, the
776 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
777 and friends are up-to-date. */
778 else if (TREE_CODE (*tp
) == ADDR_EXPR
)
780 walk_tree (&TREE_OPERAND (*tp
, 0), copy_body_r
, id
, NULL
);
781 recompute_tree_invarant_for_addr_expr (*tp
);
786 /* Keep iterating. */
790 /* Copy basic block, scale profile accordingly. Edges will be taken care of
794 copy_bb (inline_data
*id
, basic_block bb
, int frequency_scale
, int count_scale
)
796 block_stmt_iterator bsi
, copy_bsi
;
797 basic_block copy_basic_block
;
799 /* create_basic_block() will append every new block to
800 basic_block_info automatically. */
801 copy_basic_block
= create_basic_block (NULL
, (void *) 0, bb
->prev_bb
->aux
);
802 copy_basic_block
->count
= bb
->count
* count_scale
/ REG_BR_PROB_BASE
;
803 copy_basic_block
->frequency
= (bb
->frequency
804 * frequency_scale
/ REG_BR_PROB_BASE
);
805 copy_bsi
= bsi_start (copy_basic_block
);
807 for (bsi
= bsi_start (bb
);
808 !bsi_end_p (bsi
); bsi_next (&bsi
))
810 tree stmt
= bsi_stmt (bsi
);
811 tree orig_stmt
= stmt
;
813 walk_tree (&stmt
, copy_body_r
, id
, NULL
);
815 /* RETURN_EXPR might be removed,
816 this is signalled by making stmt pointer NULL. */
821 /* fold_stmt (&stmt); */
823 bsi_insert_after (©_bsi
, stmt
, BSI_NEW_STMT
);
824 call
= get_call_expr_in (stmt
);
825 /* We're duplicating a CALL_EXPR. Find any corresponding
826 callgraph edges and update or duplicate them. */
827 if (call
&& (decl
= get_callee_fndecl (call
)))
829 if (!id
->versioning_p
)
831 struct cgraph_edge
*edge
;
833 /* We're cloning or inlining this body; duplicate the
834 associate callgraph nodes. */
835 edge
= cgraph_edge (id
->current_node
, orig_stmt
);
837 cgraph_clone_edge (edge
, id
->node
, stmt
,
838 REG_BR_PROB_BASE
, 1, true);
842 /* Update the call_expr on the edges from the new version
844 struct cgraph_edge
*edge
;
845 edge
= cgraph_edge (id
->node
, orig_stmt
);
848 edge
->call_stmt
= stmt
;
849 if (id
->update_clones_p
)
851 struct cgraph_node
*n
;
852 for (n
= id
->node
->next_clone
; n
; n
= n
->next_clone
)
853 cgraph_edge (n
, orig_stmt
)->call_stmt
= stmt
;
858 /* If you think we can abort here, you are wrong.
859 There is no region 0 in tree land. */
860 gcc_assert (lookup_stmt_eh_region_fn (id
->callee_cfun
, orig_stmt
)
863 if (tree_could_throw_p (stmt
))
865 int region
= lookup_stmt_eh_region_fn (id
->callee_cfun
, orig_stmt
);
866 /* Add an entry for the copied tree in the EH hashtable.
867 When saving or cloning or versioning, use the hashtable in
868 cfun, and just copy the EH number. When inlining, use the
869 hashtable in the caller, and adjust the region number. */
871 add_stmt_to_eh_region (stmt
, region
+ id
->eh_region_offset
);
873 /* If this tree doesn't have a region associated with it,
874 and there is a "current region,"
875 then associate this tree with the current region
876 and add edges associated with this region. */
877 if ((lookup_stmt_eh_region_fn (id
->callee_cfun
,
879 && id
->eh_region
> 0)
880 && tree_could_throw_p (stmt
))
881 add_stmt_to_eh_region (stmt
, id
->eh_region
);
888 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, i
, SSA_OP_DEF
)
889 if (TREE_CODE (def
) == SSA_NAME
)
891 /* Not quite right, orig_stmt might be <retval>=blah.
892 verify_ssa would die later if we really had multiple
894 gcc_assert (!SSA_NAME_DEF_STMT (def)
895 || TREE_CODE (orig_stmt) == RETURN_EXPR);
897 SSA_NAME_DEF_STMT (def
) = stmt
;
902 return copy_basic_block
;
905 /* Copy edges from BB into its copy constructed earlier, scale profile
906 accordingly. Edges will be taken care of later. Assume aux
907 pointers to point to the copies of each BB. */
909 copy_edges_for_bb (basic_block bb
, int count_scale
, basic_block exit_block_map
)
911 basic_block new_bb
= bb
->aux
;
914 block_stmt_iterator bsi
;
917 /* Use the indices from the original blocks to create edges for the
919 FOR_EACH_EDGE (old_edge
, ei
, bb
->succs
)
920 if (!(old_edge
->flags
& EDGE_EH
))
924 flags
= old_edge
->flags
;
926 /* Return edges do get a FALLTHRU flag when the get inlined. */
927 if (old_edge
->dest
->index
== EXIT_BLOCK
&& !old_edge
->flags
928 && old_edge
->dest
->aux
!= EXIT_BLOCK_PTR
)
929 flags
|= EDGE_FALLTHRU
;
930 new = make_edge (new_bb
, old_edge
->dest
->aux
, flags
);
931 new->count
= old_edge
->count
* count_scale
/ REG_BR_PROB_BASE
;
932 new->probability
= old_edge
->probability
;
935 if (bb
->index
== ENTRY_BLOCK
|| bb
->index
== EXIT_BLOCK
)
938 for (bsi
= bsi_start (new_bb
); !bsi_end_p (bsi
);)
942 copy_stmt
= bsi_stmt (bsi
);
943 update_stmt (copy_stmt
);
945 mark_new_vars_to_rename (copy_stmt
);
946 /* Do this before the possible split_block. */
949 /* If this tree could throw an exception, there are two
950 cases where we need to add abnormal edge(s): the
951 tree wasn't in a region and there is a "current
952 region" in the caller; or the original tree had
953 EH edges. In both cases split the block after the tree,
954 and add abnormal edge(s) as needed; we need both
955 those from the callee and the caller.
956 We check whether the copy can throw, because the const
957 propagation can change an INDIRECT_REF which throws
958 into a COMPONENT_REF which doesn't. If the copy
959 can throw, the original could also throw. */
961 if (tree_can_throw_internal (copy_stmt
))
964 if (!bsi_end_p (bsi
))
965 /* Note that bb's predecessor edges aren't necessarily
966 right at this point; split_block doesn't care. */
968 edge e
= split_block (new_bb
, copy_stmt
);
971 new_bb
->aux
= e
->src
->aux
;
972 bsi
= bsi_start (new_bb
);
975 make_eh_edges (copy_stmt
);
977 /* Update PHIs of EH edges destinating to landing pads in function we
979 This can be done by clonning PHI arguments from EH edges originally
980 attached to call statement we are replacing. */
985 FOR_EACH_EDGE (e
, ei
, bb_for_stmt (copy_stmt
)->succs
)
987 || ((basic_block
)e
->dest
->aux
)->index
== ENTRY_BLOCK
)
991 gcc_assert (e
->flags
& EDGE_EH
);
992 for (phi
= phi_nodes (e
->dest
); phi
; phi
= PHI_CHAIN (phi
))
994 /* Lookup original edge from call expression. */
995 edge master
= find_edge (exit_block_map
, e
->dest
);
998 tree new_arg
= PHI_ARG_DEF_FROM_EDGE (phi
, master
);
1000 gcc_assert (master
->flags
& EDGE_EH
);
1001 /* FIXME: Because of hack bellow we might not
1003 gcc_assert (new_arg);*/
1005 add_phi_arg (phi
, new_arg
, e
);
1009 /* FIXME: It is possible that the edge doesn't
1010 exist because we have more information about
1011 EH type and the edge routes lower in EH region
1013 It seems to me that it is not safe to rely
1014 on ssa_update here as the variable might have
1015 overlapping liveranges of SSA_NAMEs and updating
1016 would screw up, but Diego seems to think it is
1017 safe, so lets give it a try now.
1019 If it really is safe, we might just remove
1020 the code and mark all PHI arguments for
1022 mark_sym_for_renaming
1023 (SSA_NAME_VAR (PHI_RESULT (phi
)));
1033 /* Copy the PHIs. All blocks and edges was been copied, some blocks
1034 was possibly splited and new outgoing EH edges inserted.
1035 BB points to the block of original function and AUX pointers links
1036 the original and newly copied blocks. */
1038 copy_phis_for_bb (basic_block bb
, inline_data
*id
)
1040 basic_block new_bb
= bb
->aux
;
1044 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
1046 tree res
= PHI_RESULT (phi
);
1051 if (is_gimple_reg (res
))
1053 walk_tree (&new_res
, copy_body_r
, id
, NULL
);
1054 SSA_NAME_DEF_STMT (new_res
) = new_phi
= create_phi_node (new_res
, new_bb
);
1055 FOR_EACH_EDGE (new_edge
, ei
, new_bb
->preds
)
1057 edge old_edge
= find_edge (new_edge
->src
->aux
, bb
);
1058 tree arg
= PHI_ARG_DEF_FROM_EDGE (phi
, old_edge
);
1061 walk_tree (&new_arg
, copy_body_r
, id
, NULL
);
1062 gcc_assert (new_arg
);
1063 add_phi_arg (new_phi
, new_arg
, new_edge
);
1069 /* Wrapper for remap_decl so it can be used as a callback. */
1071 remap_decl_1 (tree decl
, void *data
)
1073 return remap_decl (decl
, data
);
1077 initialize_cfun (tree new_fndecl
, tree callee_fndecl
, gcov_type count
, int frequency
)
1079 struct function
*new_cfun
= (struct function
*) ggc_alloc_cleared (sizeof (struct function
));
1080 struct function
*callee_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
1081 int count_scale
, frequency_scale
;
1083 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun
)->count
)
1084 count_scale
= (REG_BR_PROB_BASE
* count
1085 / ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun
)->count
);
1089 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun
)->frequency
)
1090 frequency_scale
= (REG_BR_PROB_BASE
* frequency
1092 ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun
)->frequency
);
1094 frequency_scale
= count_scale
;
1096 /* Register specific tree functions. */
1097 tree_register_cfg_hooks ();
1098 *new_cfun
= *DECL_STRUCT_FUNCTION (callee_fndecl
);
1099 new_cfun
->cfg
= NULL
;
1100 new_cfun
->decl
= new_fndecl
/*= copy_node (callee_fndecl)*/;
1101 new_cfun
->ib_boundaries_block
= (varray_type
) 0;
1102 DECL_STRUCT_FUNCTION (new_fndecl
) = new_cfun
;
1103 push_cfun (new_cfun
);
1104 init_empty_tree_cfg ();
1106 ENTRY_BLOCK_PTR
->count
=
1107 (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun
)->count
* count_scale
/
1109 ENTRY_BLOCK_PTR
->frequency
=
1110 (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun
)->frequency
*
1111 frequency_scale
/ REG_BR_PROB_BASE
);
1112 EXIT_BLOCK_PTR
->count
=
1113 (EXIT_BLOCK_PTR_FOR_FUNCTION (callee_cfun
)->count
* count_scale
/
1115 EXIT_BLOCK_PTR
->frequency
=
1116 (EXIT_BLOCK_PTR_FOR_FUNCTION (callee_cfun
)->frequency
*
1117 frequency_scale
/ REG_BR_PROB_BASE
);
1118 if (callee_cfun
->eh
)
1119 init_eh_for_function ();
1121 if (callee_cfun
->ssa
)
1124 cfun
->ssa
->x_in_ssa_p
= true;
1125 init_ssa_operands ();
1130 /* Make a copy of the body of FN so that it can be inserted inline in
1131 another function. Walks FN via CFG, returns new fndecl. */
1134 copy_cfg_body (inline_data
* id
, gcov_type count
, int frequency
,
1135 basic_block entry_block_map
, basic_block exit_block_map
)
1137 tree callee_fndecl
= id
->callee
;
1138 /* Original cfun for the callee, doesn't change. */
1139 struct function
*callee_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
1140 /* Place to copy from; when a copy of the function was saved off earlier,
1141 use that instead of the main copy. */
1142 struct function
*cfun_to_copy
=
1143 (struct function
*) ggc_alloc_cleared (sizeof (struct function
));
1145 tree new_fndecl
= NULL
;
1146 int count_scale
, frequency_scale
;
1148 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun
)->count
)
1149 count_scale
= (REG_BR_PROB_BASE
* count
1150 / ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun
)->count
);
1154 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun
)->frequency
)
1155 frequency_scale
= (REG_BR_PROB_BASE
* frequency
1157 ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun
)->frequency
);
1159 frequency_scale
= count_scale
;
1161 /* Register specific tree functions. */
1162 tree_register_cfg_hooks ();
1164 /* Must have a CFG here at this point. */
1165 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
1166 (DECL_STRUCT_FUNCTION (callee_fndecl
)));
1168 *cfun_to_copy
= *DECL_STRUCT_FUNCTION (callee_fndecl
);
1170 id
->callee_cfun
= cfun_to_copy
;
1172 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy
)->aux
= entry_block_map
;
1173 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy
)->aux
= exit_block_map
;
1174 entry_block_map
->aux
= ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy
);
1175 exit_block_map
->aux
= EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy
);
1178 /* Duplicate any exception-handling regions. */
1181 id
->eh_region_offset
= duplicate_eh_regions (cfun_to_copy
,
1184 gcc_assert (inlining_p (id
) || !id
->eh_region_offset
);
1186 /* Use aux pointers to map the original blocks to copy. */
1187 FOR_EACH_BB_FN (bb
, cfun_to_copy
)
1189 basic_block
new = copy_bb (id
, bb
, frequency_scale
, count_scale
);
1193 /* Now that we've duplicated the blocks, duplicate their edges. */
1194 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
1195 copy_edges_for_bb (bb
, count_scale
, exit_block_map
);
1197 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
1198 copy_phis_for_bb (bb
, id
);
1199 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
1200 ((basic_block
)bb
->aux
)->aux
= bb
->aux
= NULL
;
1201 entry_block_map
->aux
= NULL
;
1202 exit_block_map
->aux
= NULL
;
1207 /* Make a copy of the body of FN so that it can be inserted inline in
1208 another function. */
1211 copy_generic_body (inline_data
*id
)
1214 tree fndecl
= id
->callee
;
1216 body
= DECL_SAVED_TREE (fndecl
);
1217 walk_tree (&body
, copy_body_r
, id
, NULL
);
1223 copy_body (inline_data
*id
, gcov_type count
, int frequency
,
1224 basic_block entry_block_map
, basic_block exit_block_map
)
1226 tree fndecl
= id
->callee
;
1229 /* If this body has a CFG, walk CFG and copy. */
1230 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl
)));
1231 body
= copy_cfg_body (id
, count
, frequency
, entry_block_map
, exit_block_map
);
1236 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
1237 defined in function FN, or of a data member thereof. */
1240 self_inlining_addr_expr (tree value
, tree fn
)
1244 if (TREE_CODE (value
) != ADDR_EXPR
)
1247 var
= get_base_address (TREE_OPERAND (value
, 0));
1249 return var
&& lang_hooks
.tree_inlining
.auto_var_in_fn_p (var
, fn
);
1253 setup_one_parameter (inline_data
*id
, tree p
, tree value
, tree fn
,
1254 basic_block bb
, tree
*vars
)
1259 tree rhs
= value
? fold_convert (TREE_TYPE (p
), value
) : NULL
;
1260 tree def
= in_ssa_p
? default_def_fn (id
->callee_cfun
, p
) : NULL
;
1262 /* If the parameter is never assigned to, has no SSA_NAMEs created,
1263 we may not need to create a new variable here at all. Instead, we may
1264 be able to just use the argument value. */
1265 if (TREE_READONLY (p
)
1266 && !TREE_ADDRESSABLE (p
)
1267 && value
&& !TREE_SIDE_EFFECTS (value
)
1270 /* We may produce non-gimple trees by adding NOPs or introduce
1271 invalid sharing when operand is not really constant.
1272 It is not big deal to prohibit constant propagation here as
1273 we will constant propagate in DOM1 pass anyway. */
1274 if (is_gimple_min_invariant (value
)
1275 && lang_hooks
.types_compatible_p (TREE_TYPE (value
), TREE_TYPE (p
))
1276 /* We have to be very careful about ADDR_EXPR. Make sure
1277 the base variable isn't a local variable of the inlined
1278 function, e.g., when doing recursive inlining, direct or
1279 mutually-recursive or whatever, which is why we don't
1280 just test whether fn == current_function_decl. */
1281 && ! self_inlining_addr_expr (value
, fn
))
1283 insert_decl_map (id
, p
, value
);
1288 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
1289 here since the type of this decl must be visible to the calling
1291 var
= copy_decl_for_dup (p
, fn
, id
->caller
, /*versioning=*/false);
1292 if (in_ssa_p
&& TREE_CODE (var
) == VAR_DECL
)
1295 add_referenced_tmp_var (var
);
1298 /* See if the frontend wants to pass this by invisible reference. If
1299 so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
1300 replace uses of the PARM_DECL with dereferences. */
1301 if (TREE_TYPE (var
) != TREE_TYPE (p
)
1302 && POINTER_TYPE_P (TREE_TYPE (var
))
1303 && TREE_TYPE (TREE_TYPE (var
)) == TREE_TYPE (p
))
1305 insert_decl_map (id
, var
, var
);
1306 var_sub
= build_fold_indirect_ref (var
);
1311 /* Declare this new variable. */
1312 TREE_CHAIN (var
) = *vars
;
1315 /* Make gimplifier happy about this variable. */
1316 DECL_SEEN_IN_BIND_EXPR_P (var
) = 1;
1318 /* Even if P was TREE_READONLY, the new VAR should not be.
1319 In the original code, we would have constructed a
1320 temporary, and then the function body would have never
1321 changed the value of P. However, now, we will be
1322 constructing VAR directly. The constructor body may
1323 change its value multiple times as it is being
1324 constructed. Therefore, it must not be TREE_READONLY;
1325 the back-end assumes that TREE_READONLY variable is
1326 assigned to only once. */
1327 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p
)))
1328 TREE_READONLY (var
) = 0;
1330 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
1331 that way, when the PARM_DECL is encountered, it will be
1332 automatically replaced by the VAR_DECL. */
1333 insert_decl_map (id
, p
, var_sub
);
1335 /* If there is no setup required and we are in SSA, take the easy route.
1336 We need to construct map for the variable anyway as it might be used
1337 in different SSA names when parameter is set in function.
1338 FIXME: This usually kills the last connection in between inlined
1339 function parameter and the actual value in debug info. Can we do
1340 better here? If we just inserted the statement, copy propagation
1341 would kill it anyway as it always did in older versions of GCC... */
1342 if (in_ssa_p
&& rhs
&& def
&& is_gimple_reg (p
)
1343 && (TREE_CODE (rhs
) == SSA_NAME
1344 /* Replacing &""[0] has interesting side effects. Exclude ADDR_EXPRs
1346 || (is_gimple_min_invariant (rhs
) && TREE_CODE (rhs
) != ADDR_EXPR
)))
1348 insert_decl_map (id
, def
, rhs
);
1352 /* Initialize this VAR_DECL from the equivalent argument. Convert
1353 the argument to the proper type in case it was promoted. */
1356 block_stmt_iterator bsi
= bsi_last (bb
);
1358 if (rhs
== error_mark_node
)
1360 insert_decl_map (id
, p
, var_sub
);
1364 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
1365 keep our trees in gimple form. */
1366 if (def
&& is_gimple_reg (p
))
1368 def
= remap_ssa_name (def
, id
);
1369 init_stmt
= build (MODIFY_EXPR
, TREE_TYPE (var
), def
, rhs
);
1370 SSA_NAME_DEF_STMT (def
) = init_stmt
;
1371 /*gcc_assert (IS_EMPTY_STMT (default_def (var)));*/
1372 set_default_def (var
, NULL
);
1375 init_stmt
= build (MODIFY_EXPR
, TREE_TYPE (var
), var
, rhs
);
1377 /* If we did not create a gimple value and we did not create a gimple
1378 cast of a gimple value, then we will need to gimplify INIT_STMTS
1379 at the end. Note that is_gimple_cast only checks the outer
1380 tree code, not its operand. Thus the explicit check that its
1381 operand is a gimple value. */
1382 if (!is_gimple_val (rhs
)
1383 && (!is_gimple_cast (rhs
)
1384 || !is_gimple_val (TREE_OPERAND (rhs
, 0))))
1386 tree_stmt_iterator i
;
1388 push_gimplify_context ();
1389 gimplify_stmt (&init_stmt
);
1390 if (in_ssa_p
&& init_stmt
&& TREE_CODE (init_stmt
) == STATEMENT_LIST
)
1392 /* The replacement can expose previously unreferenced variables. */
1393 for (i
= tsi_start (init_stmt
); !tsi_end_p (i
); tsi_next (&i
))
1394 find_new_referenced_vars (tsi_stmt_ptr (i
));
1396 pop_gimplify_context (NULL
);
1399 /* If VAR represents a zero-sized variable, it's possible that the
1400 assignment statment may result in no gimple statements. */
1402 bsi_insert_after (&bsi
, init_stmt
, BSI_NEW_STMT
);
1404 for (;!bsi_end_p (bsi
); bsi_next (&bsi
))
1405 mark_new_vars_to_rename (bsi_stmt (bsi
));
1409 /* Generate code to initialize the parameters of the function at the
1410 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
1413 initialize_inlined_parameters (inline_data
*id
, tree args
, tree static_chain
,
1414 tree fn
, basic_block bb
)
1419 tree vars
= NULL_TREE
;
1422 /* Figure out what the parameters are. */
1423 parms
= DECL_ARGUMENTS (fn
);
1425 /* Loop through the parameter declarations, replacing each with an
1426 equivalent VAR_DECL, appropriately initialized. */
1427 for (p
= parms
, a
= args
; p
;
1428 a
= a
? TREE_CHAIN (a
) : a
, p
= TREE_CHAIN (p
))
1434 /* Find the initializer. */
1435 value
= lang_hooks
.tree_inlining
.convert_parm_for_inlining
1436 (p
, a
? TREE_VALUE (a
) : NULL_TREE
, fn
, argnum
);
1438 setup_one_parameter (id
, p
, value
, fn
, bb
, &vars
);
1441 /* Initialize the static chain. */
1442 p
= DECL_STRUCT_FUNCTION (fn
)->static_chain_decl
;
1443 gcc_assert (fn
!= current_function_decl
);
1446 /* No static chain? Seems like a bug in tree-nested.c. */
1447 gcc_assert (static_chain
);
1449 setup_one_parameter (id
, p
, static_chain
, fn
, bb
, &vars
);
1452 declare_inline_vars (id
->block
, vars
);
1455 /* Declare a return variable to replace the RESULT_DECL for the
1456 function we are calling. An appropriate DECL_STMT is returned.
1457 The USE_STMT is filled to contain a use of the declaration to
1458 indicate the return value of the function.
1460 RETURN_SLOT_ADDR, if non-null, was a fake parameter that
1461 took the address of the result. MODIFY_DEST, if non-null, was the LHS of
1462 the MODIFY_EXPR to which this call is the RHS.
1464 The return value is a (possibly null) value that is the result of the
1465 function as seen by the callee. *USE_P is a (possibly null) value that
1466 holds the result as seen by the caller. */
1469 declare_return_variable (inline_data
*id
, tree return_slot_addr
,
1470 tree modify_dest
, tree
*use_p
)
1472 tree callee
= id
->callee
;
1473 tree caller
= id
->caller
;
1474 tree result
= DECL_RESULT (callee
);
1475 tree callee_type
= TREE_TYPE (result
);
1476 tree caller_type
= TREE_TYPE (TREE_TYPE (callee
));
1479 /* We don't need to do anything for functions that don't return
1481 if (!result
|| VOID_TYPE_P (callee_type
))
1487 /* If there was a return slot, then the return value is the
1488 dereferenced address of that object. */
1489 if (return_slot_addr
)
1491 /* The front end shouldn't have used both return_slot_addr and
1492 a modify expression. */
1493 gcc_assert (!modify_dest
);
1494 if (DECL_BY_REFERENCE (result
))
1496 tree base_var
= TREE_OPERAND (return_slot_addr
, 0);
1498 /* FIXME: rewriting random variables in SSA form is going
1499 to cause missoptimizations once we start optimizing. */
1500 if (TREE_CODE (base_var
) == SSA_NAME
)
1501 base_var
= SSA_NAME_VAR (base_var
);
1503 mark_sym_for_renaming (base_var
);
1504 var
= return_slot_addr
;
1508 mark_sym_for_renaming (TREE_OPERAND (return_slot_addr
, 0));
1509 var
= build_fold_indirect_ref (return_slot_addr
);
1515 /* All types requiring non-trivial constructors should have been handled. */
1516 gcc_assert (!TREE_ADDRESSABLE (callee_type
));
1518 /* Attempt to avoid creating a new temporary variable. */
1520 /* FIXME: the code to handle undefined return values in
1521 expand_inline_call would overwrite the computed return
1523 && TREE_CODE (modify_dest
) != SSA_NAME
)
1525 bool use_it
= false;
1527 /* We can't use MODIFY_DEST if there's type promotion involved. */
1528 if (!lang_hooks
.types_compatible_p (caller_type
, callee_type
))
1531 /* ??? If we're assigning to a variable sized type, then we must
1532 reuse the destination variable, because we've no good way to
1533 create variable sized temporaries at this point. */
1534 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type
)) != INTEGER_CST
)
1537 /* If the callee cannot possibly modify MODIFY_DEST, then we can
1538 reuse it as the result of the call directly. Don't do this if
1539 it would promote MODIFY_DEST to addressable. */
1540 else if (TREE_ADDRESSABLE (result
))
1544 tree base_m
= get_base_address (modify_dest
);
1546 /* If the base isn't a decl, then it's a pointer, and we don't
1547 know where that's going to go. */
1548 if (!DECL_P (base_m
))
1550 else if (is_global_var (base_m
))
1552 else if (!TREE_ADDRESSABLE (base_m
))
1564 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type
)) == INTEGER_CST
);
1566 var
= copy_decl_for_dup (result
, callee
, caller
, /*versioning=*/false);
1569 /* TODO: We probably can go directly into SSA name here without much
1572 add_referenced_tmp_var (var
);
1575 DECL_SEEN_IN_BIND_EXPR_P (var
) = 1;
1576 DECL_STRUCT_FUNCTION (caller
)->unexpanded_var_list
1577 = tree_cons (NULL_TREE
, var
,
1578 DECL_STRUCT_FUNCTION (caller
)->unexpanded_var_list
);
1580 /* Do not have the rest of GCC warn about this variable as it should
1581 not be visible to the user. */
1582 TREE_NO_WARNING (var
) = 1;
1584 /* Build the use expr. If the return type of the function was
1585 promoted, convert it back to the expected type. */
1587 if (!lang_hooks
.types_compatible_p (TREE_TYPE (var
), caller_type
))
1588 use
= fold_convert (caller_type
, var
);
1591 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
1592 way, when the RESULT_DECL is encountered, it will be
1593 automatically replaced by the VAR_DECL. */
1594 insert_decl_map (id
, result
, var
);
1596 /* Remember this so we can ignore it in remap_decls. */
1603 /* Returns nonzero if a function can be inlined as a tree. */
1606 tree_inlinable_function_p (tree fn
)
1608 return inlinable_function_p (fn
);
1611 static const char *inline_forbidden_reason
;
1614 inline_forbidden_p_1 (tree
*nodep
, int *walk_subtrees ATTRIBUTE_UNUSED
,
1618 tree fn
= (tree
) fnp
;
1621 switch (TREE_CODE (node
))
1624 /* Refuse to inline alloca call unless user explicitly forced so as
1625 this may change program's memory overhead drastically when the
1626 function using alloca is called in loop. In GCC present in
1627 SPEC2000 inlining into schedule_block cause it to require 2GB of
1628 RAM instead of 256MB. */
1629 if (alloca_call_p (node
)
1630 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
)))
1632 inline_forbidden_reason
1633 = G_("function %q+F can never be inlined because it uses "
1634 "alloca (override using the always_inline attribute)");
1637 t
= get_callee_fndecl (node
);
1641 /* We cannot inline functions that call setjmp. */
1642 if (setjmp_call_p (t
))
1644 inline_forbidden_reason
1645 = G_("function %q+F can never be inlined because it uses setjmp");
1649 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
)
1650 switch (DECL_FUNCTION_CODE (t
))
1652 /* We cannot inline functions that take a variable number of
1654 case BUILT_IN_VA_START
:
1655 case BUILT_IN_STDARG_START
:
1656 case BUILT_IN_NEXT_ARG
:
1657 case BUILT_IN_VA_END
:
1658 inline_forbidden_reason
1659 = G_("function %q+F can never be inlined because it "
1660 "uses variable argument lists");
1663 case BUILT_IN_LONGJMP
:
1664 /* We can't inline functions that call __builtin_longjmp at
1665 all. The non-local goto machinery really requires the
1666 destination be in a different function. If we allow the
1667 function calling __builtin_longjmp to be inlined into the
1668 function calling __builtin_setjmp, Things will Go Awry. */
1669 inline_forbidden_reason
1670 = G_("function %q+F can never be inlined because "
1671 "it uses setjmp-longjmp exception handling");
1674 case BUILT_IN_NONLOCAL_GOTO
:
1676 inline_forbidden_reason
1677 = G_("function %q+F can never be inlined because "
1678 "it uses non-local goto");
1681 case BUILT_IN_RETURN
:
1682 case BUILT_IN_APPLY_ARGS
:
1683 /* If a __builtin_apply_args caller would be inlined,
1684 it would be saving arguments of the function it has
1685 been inlined into. Similarly __builtin_return would
1686 return from the function the inline has been inlined into. */
1687 inline_forbidden_reason
1688 = G_("function %q+F can never be inlined because "
1689 "it uses __builtin_return or __builtin_apply_args");
1698 t
= TREE_OPERAND (node
, 0);
1700 /* We will not inline a function which uses computed goto. The
1701 addresses of its local labels, which may be tucked into
1702 global storage, are of course not constant across
1703 instantiations, which causes unexpected behavior. */
1704 if (TREE_CODE (t
) != LABEL_DECL
)
1706 inline_forbidden_reason
1707 = G_("function %q+F can never be inlined "
1708 "because it contains a computed goto");
1714 t
= TREE_OPERAND (node
, 0);
1715 if (DECL_NONLOCAL (t
))
1717 /* We cannot inline a function that receives a non-local goto
1718 because we cannot remap the destination label used in the
1719 function that is performing the non-local goto. */
1720 inline_forbidden_reason
1721 = G_("function %q+F can never be inlined "
1722 "because it receives a non-local goto");
1729 /* We cannot inline a function of the form
1731 void F (int i) { struct S { int ar[i]; } s; }
1733 Attempting to do so produces a catch-22.
1734 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1735 UNION_TYPE nodes, then it goes into infinite recursion on a
1736 structure containing a pointer to its own type. If it doesn't,
1737 then the type node for S doesn't get adjusted properly when
1740 ??? This is likely no longer true, but it's too late in the 4.0
1741 cycle to try to find out. This should be checked for 4.1. */
1742 for (t
= TYPE_FIELDS (node
); t
; t
= TREE_CHAIN (t
))
1743 if (variably_modified_type_p (TREE_TYPE (t
), NULL
))
1745 inline_forbidden_reason
1746 = G_("function %q+F can never be inlined "
1747 "because it uses variable sized variables");
1758 /* Return subexpression representing possible alloca call, if any. */
1760 inline_forbidden_p (tree fndecl
)
1762 location_t saved_loc
= input_location
;
1763 block_stmt_iterator bsi
;
1765 tree ret
= NULL_TREE
;
1767 FOR_EACH_BB_FN (bb
, DECL_STRUCT_FUNCTION (fndecl
))
1768 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
1770 ret
= walk_tree_without_duplicates (bsi_stmt_ptr (bsi
),
1771 inline_forbidden_p_1
, fndecl
);
1777 input_location
= saved_loc
;
1781 /* Returns nonzero if FN is a function that does not have any
1782 fundamental inline blocking properties. */
1785 inlinable_function_p (tree fn
)
1787 bool inlinable
= true;
1789 /* If we've already decided this function shouldn't be inlined,
1790 there's no need to check again. */
1791 if (DECL_UNINLINABLE (fn
))
1794 /* See if there is any language-specific reason it cannot be
1795 inlined. (It is important that this hook be called early because
1796 in C++ it may result in template instantiation.)
1797 If the function is not inlinable for language-specific reasons,
1798 it is left up to the langhook to explain why. */
1799 inlinable
= !lang_hooks
.tree_inlining
.cannot_inline_tree_fn (&fn
);
1801 /* If we don't have the function body available, we can't inline it.
1802 However, this should not be recorded since we also get here for
1803 forward declared inline functions. Therefore, return at once. */
1804 if (!DECL_SAVED_TREE (fn
))
1807 /* If we're not inlining at all, then we cannot inline this function. */
1808 else if (!flag_inline_trees
)
1811 /* Only try to inline functions if DECL_INLINE is set. This should be
1812 true for all functions declared `inline', and for all other functions
1813 as well with -finline-functions.
1815 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1816 it's the front-end that must set DECL_INLINE in this case, because
1817 dwarf2out loses if a function that does not have DECL_INLINE set is
1818 inlined anyway. That is why we have both DECL_INLINE and
1819 DECL_DECLARED_INLINE_P. */
1820 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1821 here should be redundant. */
1822 else if (!DECL_INLINE (fn
) && !flag_unit_at_a_time
)
1825 else if (inline_forbidden_p (fn
))
1827 /* See if we should warn about uninlinable functions. Previously,
1828 some of these warnings would be issued while trying to expand
1829 the function inline, but that would cause multiple warnings
1830 about functions that would for example call alloca. But since
1831 this a property of the function, just one warning is enough.
1832 As a bonus we can now give more details about the reason why a
1833 function is not inlinable.
1834 We only warn for functions declared `inline' by the user. */
1835 bool do_warning
= (warn_inline
1837 && DECL_DECLARED_INLINE_P (fn
)
1838 && !DECL_IN_SYSTEM_HEADER (fn
));
1840 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
)))
1841 sorry (inline_forbidden_reason
, fn
);
1842 else if (do_warning
)
1843 warning (OPT_Winline
, inline_forbidden_reason
, fn
);
1848 /* Squirrel away the result so that we don't have to check again. */
1849 DECL_UNINLINABLE (fn
) = !inlinable
;
1854 /* Estimate the cost of a memory move. Use machine dependent
1855 word size and take possible memcpy call into account. */
1858 estimate_move_cost (tree type
)
1862 size
= int_size_in_bytes (type
);
1864 if (size
< 0 || size
> MOVE_MAX_PIECES
* MOVE_RATIO
)
1865 /* Cost of a memcpy call, 3 arguments and the call. */
1868 return ((size
+ MOVE_MAX_PIECES
- 1) / MOVE_MAX_PIECES
);
1871 /* Used by estimate_num_insns. Estimate number of instructions seen
1872 by given statement. */
1875 estimate_num_insns_1 (tree
*tp
, int *walk_subtrees
, void *data
)
1880 if (IS_TYPE_OR_DECL_P (x
))
1885 /* Assume that constants and references counts nothing. These should
1886 be majorized by amount of operations among them we count later
1887 and are common target of CSE and similar optimizations. */
1888 else if (CONSTANT_CLASS_P (x
) || REFERENCE_CLASS_P (x
))
1891 switch (TREE_CODE (x
))
1893 /* Containers have no cost. */
1900 case ALIGN_INDIRECT_REF
:
1901 case MISALIGNED_INDIRECT_REF
:
1903 case ARRAY_RANGE_REF
:
1905 case EXC_PTR_EXPR
: /* ??? */
1906 case FILTER_EXPR
: /* ??? */
1909 case WITH_CLEANUP_EXPR
:
1911 case VIEW_CONVERT_EXPR
:
1916 case CASE_LABEL_EXPR
:
1919 case EH_FILTER_EXPR
:
1920 case STATEMENT_LIST
:
1922 case NON_LVALUE_EXPR
:
1925 case TRY_CATCH_EXPR
:
1926 case TRY_FINALLY_EXPR
:
1933 case WITH_SIZE_EXPR
:
1936 /* We don't account constants for now. Assume that the cost is amortized
1937 by operations that do use them. We may re-consider this decision once
1938 we are able to optimize the tree before estimating its size and break
1939 out static initializers. */
1940 case IDENTIFIER_NODE
:
1949 /* Try to estimate the cost of assignments. We have three cases to
1951 1) Simple assignments to registers;
1952 2) Stores to things that must live in memory. This includes
1953 "normal" stores to scalars, but also assignments of large
1954 structures, or constructors of big arrays;
1957 Let us look at the first two cases, assuming we have "a = b + C":
1958 <modify_expr <var_decl "a"> <plus_expr <var_decl "b"> <constant C>>
1959 If "a" is a GIMPLE register, the assignment to it is free on almost
1960 any target, because "a" usually ends up in a real register. Hence
1961 the only cost of this expression comes from the PLUS_EXPR, and we
1962 can ignore the MODIFY_EXPR.
1963 If "a" is not a GIMPLE register, the assignment to "a" will most
1964 likely be a real store, so the cost of the MODIFY_EXPR is the cost
1965 of moving something into "a", which we compute using the function
1968 The third case deals with TARGET_EXPRs, for which the semantics are
1969 that a temporary is assigned, unless the TARGET_EXPR itself is being
1970 assigned to something else. In the latter case we do not need the
1971 temporary. E.g. in <modify_expr <var_decl "a"> <target_expr>>, the
1972 MODIFY_EXPR is free. */
1975 /* Is the right and side a TARGET_EXPR? */
1976 if (TREE_CODE (TREE_OPERAND (x
, 1)) == TARGET_EXPR
)
1978 /* ... fall through ... */
1981 x
= TREE_OPERAND (x
, 0);
1982 /* Is this an assignments to a register? */
1983 if (is_gimple_reg (x
))
1985 /* Otherwise it's a store, so fall through to compute the move cost. */
1988 *count
+= estimate_move_cost (TREE_TYPE (x
));
1991 /* Assign cost of 1 to usual operations.
1992 ??? We may consider mapping RTL costs to this. */
2000 case FIX_TRUNC_EXPR
:
2002 case FIX_FLOOR_EXPR
:
2003 case FIX_ROUND_EXPR
:
2015 case VEC_LSHIFT_EXPR
:
2016 case VEC_RSHIFT_EXPR
:
2023 case TRUTH_ANDIF_EXPR
:
2024 case TRUTH_ORIF_EXPR
:
2025 case TRUTH_AND_EXPR
:
2027 case TRUTH_XOR_EXPR
:
2028 case TRUTH_NOT_EXPR
:
2037 case UNORDERED_EXPR
:
2050 case PREDECREMENT_EXPR
:
2051 case PREINCREMENT_EXPR
:
2052 case POSTDECREMENT_EXPR
:
2053 case POSTINCREMENT_EXPR
:
2059 case REALIGN_LOAD_EXPR
:
2061 case REDUC_MAX_EXPR
:
2062 case REDUC_MIN_EXPR
:
2063 case REDUC_PLUS_EXPR
:
2069 /* Few special cases of expensive operations. This is useful
2070 to avoid inlining on functions having too many of these. */
2071 case TRUNC_DIV_EXPR
:
2073 case FLOOR_DIV_EXPR
:
2074 case ROUND_DIV_EXPR
:
2075 case EXACT_DIV_EXPR
:
2076 case TRUNC_MOD_EXPR
:
2078 case FLOOR_MOD_EXPR
:
2079 case ROUND_MOD_EXPR
:
2085 tree decl
= get_callee_fndecl (x
);
2088 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
2089 switch (DECL_FUNCTION_CODE (decl
))
2091 case BUILT_IN_CONSTANT_P
:
2094 case BUILT_IN_EXPECT
:
2100 /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
2101 that does use function declaration to figure out the arguments. */
2104 for (arg
= TREE_OPERAND (x
, 1); arg
; arg
= TREE_CHAIN (arg
))
2105 *count
+= estimate_move_cost (TREE_TYPE (TREE_VALUE (arg
)));
2109 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
2110 *count
+= estimate_move_cost (TREE_TYPE (arg
));
2113 *count
+= PARAM_VALUE (PARAM_INLINE_CALL_COST
);
2122 /* Estimate number of instructions that will be created by expanding EXPR. */
2125 estimate_num_insns (tree expr
)
2128 struct pointer_set_t
*visited_nodes
;
2130 block_stmt_iterator bsi
;
2131 struct function
*my_function
;
2133 /* If we're given an entire function, walk the CFG. */
2134 if (TREE_CODE (expr
) == FUNCTION_DECL
)
2136 my_function
= DECL_STRUCT_FUNCTION (expr
);
2137 gcc_assert (my_function
&& my_function
->cfg
);
2138 visited_nodes
= pointer_set_create ();
2139 FOR_EACH_BB_FN (bb
, my_function
)
2141 for (bsi
= bsi_start (bb
);
2145 walk_tree (bsi_stmt_ptr (bsi
), estimate_num_insns_1
,
2146 &num
, visited_nodes
);
2149 pointer_set_destroy (visited_nodes
);
2152 walk_tree_without_duplicates (&expr
, estimate_num_insns_1
, &num
);
2157 typedef struct function
*function_p
;
2159 DEF_VEC_P(function_p
);
2160 DEF_VEC_ALLOC_P(function_p
,heap
);
2162 /* Initialized with NOGC, making this poisonous to the garbage collector. */
2163 static VEC(function_p
,heap
) *cfun_stack
;
2166 push_cfun (struct function
*new_cfun
)
2168 VEC_safe_push (function_p
, heap
, cfun_stack
, cfun
);
2175 cfun
= VEC_pop (function_p
, cfun_stack
);
2178 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
2180 add_lexical_block (tree current_block
, tree new_block
)
2184 /* Walk to the last sub-block. */
2185 for (blk_p
= &BLOCK_SUBBLOCKS (current_block
);
2187 blk_p
= &TREE_CHAIN (*blk_p
))
2190 BLOCK_SUPERCONTEXT (new_block
) = current_block
;
2193 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
2196 expand_call_inline (basic_block bb
, tree stmt
, tree
*tp
, void *data
)
2204 tree return_slot_addr
;
2206 location_t saved_location
;
2207 struct cgraph_edge
*cg_edge
;
2209 basic_block return_block
;
2211 block_stmt_iterator bsi
, stmt_bsi
;
2212 bool successfully_inlined
= FALSE
;
2215 struct cgraph_node
*old_node
;
2218 /* See what we've got. */
2219 id
= (inline_data
*) data
;
2222 /* Set input_location here so we get the right instantiation context
2223 if we call instantiate_decl from inlinable_function_p. */
2224 saved_location
= input_location
;
2225 if (EXPR_HAS_LOCATION (t
))
2226 input_location
= EXPR_LOCATION (t
);
2228 /* From here on, we're only interested in CALL_EXPRs. */
2229 if (TREE_CODE (t
) != CALL_EXPR
)
2232 /* First, see if we can figure out what function is being called.
2233 If we cannot, then there is no hope of inlining the function. */
2234 fn
= get_callee_fndecl (t
);
2238 /* Turn forward declarations into real ones. */
2239 fn
= cgraph_node (fn
)->decl
;
2241 /* If fn is a declaration of a function in a nested scope that was
2242 globally declared inline, we don't set its DECL_INITIAL.
2243 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
2244 C++ front-end uses it for cdtors to refer to their internal
2245 declarations, that are not real functions. Fortunately those
2246 don't have trees to be saved, so we can tell by checking their
2248 if (! DECL_INITIAL (fn
)
2249 && DECL_ABSTRACT_ORIGIN (fn
)
2250 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn
)))
2251 fn
= DECL_ABSTRACT_ORIGIN (fn
);
2253 /* Objective C and fortran still calls tree_rest_of_compilation directly.
2254 Kill this check once this is fixed. */
2255 if (!id
->current_node
->analyzed
)
2258 cg_edge
= cgraph_edge (id
->current_node
, stmt
);
2260 /* Constant propagation on argument done during previous inlining
2261 may create new direct call. Produce an edge for it. */
2264 struct cgraph_node
*dest
= cgraph_node (fn
);
2266 /* We have missing edge in the callgraph. This can happen in one case
2267 where previous inlining turned indirect call into direct call by
2268 constant propagating arguments. In all other cases we hit a bug
2269 (incorrect node sharing is most common reason for missing edges. */
2270 gcc_assert (dest
->needed
|| !flag_unit_at_a_time
);
2271 cgraph_create_edge (id
->node
, dest
, stmt
,
2272 bb
->count
, bb
->loop_depth
)->inline_failed
2273 = N_("originally indirect function call not considered for inlining");
2277 /* Don't try to inline functions that are not well-suited to
2279 if (!cgraph_inline_p (cg_edge
, &reason
))
2281 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
))
2282 /* Avoid warnings during early inline pass. */
2283 && !id
->early_inlining_p
)
2285 sorry ("inlining failed in call to %q+F: %s", fn
, reason
);
2286 sorry ("called from here");
2288 else if (warn_inline
&& DECL_DECLARED_INLINE_P (fn
)
2289 && !DECL_IN_SYSTEM_HEADER (fn
)
2291 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn
))
2292 /* Avoid warnings during early inline pass. */
2293 && !id
->early_inlining_p
)
2295 warning (OPT_Winline
, "inlining failed in call to %q+F: %s",
2297 warning (OPT_Winline
, "called from here");
2301 fn
= cg_edge
->callee
->decl
;
2303 #ifdef ENABLE_CHECKING
2304 if (cg_edge
->callee
->decl
!= id
->node
->decl
)
2305 verify_cgraph_node (cg_edge
->callee
);
2308 /* We will be inlining this callee. */
2310 id
->eh_region
= lookup_stmt_eh_region (stmt
);
2312 /* Split the block holding the CALL_EXPR. */
2314 e
= split_block (bb
, stmt
);
2316 return_block
= e
->dest
;
2319 /* split_block splits before the statement, work around this by moving
2320 the call into the first half_bb. Not pretty, but seems easier than
2321 doing the CFG manipulation by hand when the CALL_EXPR is in the last
2323 stmt_bsi
= bsi_last (bb
);
2324 bsi
= bsi_start (return_block
);
2325 if (!bsi_end_p (bsi
))
2326 bsi_move_before (&stmt_bsi
, &bsi
);
2329 tree stmt
= bsi_stmt (stmt_bsi
);
2330 bsi_remove (&stmt_bsi
);
2331 bsi_insert_after (&bsi
, stmt
, BSI_NEW_STMT
);
2333 stmt_bsi
= bsi_start (return_block
);
2335 /* Build a block containing code to initialize the arguments, the
2336 actual inline expansion of the body, and a label for the return
2337 statements within the function to jump to. The type of the
2338 statement expression is the return type of the function call. */
2339 id
->block
= make_node (BLOCK
);
2340 BLOCK_ABSTRACT_ORIGIN (id
->block
) = fn
;
2341 BLOCK_SOURCE_LOCATION (id
->block
) = input_location
;
2342 add_lexical_block (TREE_BLOCK (stmt
), id
->block
);
2344 /* Local declarations will be replaced by their equivalents in this
2347 id
->decl_map
= splay_tree_new (splay_tree_compare_pointers
,
2350 /* Initialize the parameters. */
2351 args
= TREE_OPERAND (t
, 1);
2353 /* Record the function we are about to inline. */
2355 id
->callee_cfun
= DECL_STRUCT_FUNCTION (fn
);
2357 initialize_inlined_parameters (id
, args
, TREE_OPERAND (t
, 2), fn
, bb
);
2359 if (DECL_INITIAL (fn
))
2360 add_lexical_block (id
->block
, remap_blocks (DECL_INITIAL (fn
), id
));
2362 /* Return statements in the function body will be replaced by jumps
2363 to the RET_LABEL. */
2365 gcc_assert (DECL_INITIAL (fn
));
2366 gcc_assert (TREE_CODE (DECL_INITIAL (fn
)) == BLOCK
);
2368 /* Find the lhs to which the result of this call is assigned. */
2369 return_slot_addr
= NULL
;
2370 if (TREE_CODE (stmt
) == MODIFY_EXPR
)
2372 modify_dest
= TREE_OPERAND (stmt
, 0);
2374 /* The function which we are inlining might not return a value,
2375 in which case we should issue a warning that the function
2376 does not return a value. In that case the optimizers will
2377 see that the variable to which the value is assigned was not
2378 initialized. We do not want to issue a warning about that
2379 uninitialized variable. */
2380 if (DECL_P (modify_dest
))
2381 TREE_NO_WARNING (modify_dest
) = 1;
2382 if (CALL_EXPR_RETURN_SLOT_OPT (t
))
2384 return_slot_addr
= build_fold_addr_expr (modify_dest
);
2391 /* Declare the return variable for the function. */
2392 decl
= declare_return_variable (id
, return_slot_addr
,
2393 modify_dest
, &use_retvar
);
2394 /* Do this only if declare_return_variable created a new one. */
2395 if (decl
&& !return_slot_addr
&& decl
!= modify_dest
)
2396 declare_inline_vars (id
->block
, decl
);
2398 /* After we've initialized the parameters, we insert the body of the
2400 old_node
= id
->current_node
;
2402 /* Anoint the callee-to-be-duplicated as the "current_node." When
2403 CALL_EXPRs within callee are duplicated, the edges from callee to
2404 callee's callees (caller's grandchildren) will be cloned. */
2405 id
->current_node
= cg_edge
->callee
;
2407 /* This is it. Duplicate the callee body. Assume callee is
2408 pre-gimplified. Note that we must not alter the caller
2409 function in any way before this point, as this CALL_EXPR may be
2410 a self-referential call; if we're calling ourselves, we need to
2411 duplicate our body before altering anything. */
2412 copy_body (id
, bb
->count
, bb
->frequency
, bb
, return_block
);
2413 id
->current_node
= old_node
;
2415 /* Add local vars in this inlined callee to caller. */
2416 t_step
= id
->callee_cfun
->unexpanded_var_list
;
2417 for (; t_step
; t_step
= TREE_CHAIN (t_step
))
2419 var
= TREE_VALUE (t_step
);
2420 if (TREE_STATIC (var
) && !TREE_ASM_WRITTEN (var
))
2421 cfun
->unexpanded_var_list
= tree_cons (NULL_TREE
, var
,
2422 cfun
->unexpanded_var_list
);
2424 cfun
->unexpanded_var_list
= tree_cons (NULL_TREE
, remap_decl (var
, id
),
2425 cfun
->unexpanded_var_list
);
2429 splay_tree_delete (id
->decl_map
);
2432 /* If the inlined function returns a result that we care about,
2433 clobber the CALL_EXPR with a reference to the return variable. */
2434 if (use_retvar
&& (TREE_CODE (bsi_stmt (stmt_bsi
)) != CALL_EXPR
))
2439 mark_new_vars_to_rename (stmt
);
2440 maybe_clean_or_replace_eh_stmt (stmt
, stmt
);
2443 /* We're modifying a TSI owned by gimple_expand_calls_inline();
2444 tsi_delink() will leave the iterator in a sane state. */
2446 /* Handle case of inlining function that miss return statement so return value becomes
2448 if (TREE_CODE (stmt
) == MODIFY_EXPR
2449 && TREE_CODE (TREE_OPERAND (stmt
, 0)) == SSA_NAME
)
2451 tree name
= TREE_OPERAND (stmt
, 0);
2452 tree var
= SSA_NAME_VAR (TREE_OPERAND (stmt
, 0));
2453 tree def
= default_def (var
);
2455 /* If the variable is used undefined, make this name undefined via
2459 TREE_OPERAND (stmt
, 1) = def
;
2462 /* Otherwise make this variable undefined. */
2465 bsi_remove (&stmt_bsi
);
2466 set_default_def (var
, name
);
2467 SSA_NAME_DEF_STMT (name
) = build_empty_stmt ();
2471 bsi_remove (&stmt_bsi
);
2475 if (bsi_end_p (bsi
))
2476 tree_purge_dead_eh_edges (return_block
);
2478 /* If the value of the new expression is ignored, that's OK. We
2479 don't warn about this for CALL_EXPRs, so we shouldn't warn about
2480 the equivalent inlined version either. */
2481 TREE_USED (*tp
) = 1;
2483 /* Output the inlining info for this abstract function, since it has been
2484 inlined. If we don't do this now, we can lose the information about the
2485 variables in the function when the blocks get blown away as soon as we
2486 remove the cgraph node. */
2487 (*debug_hooks
->outlining_inline_function
) (cg_edge
->callee
->decl
);
2489 /* Update callgraph if needed. */
2490 cgraph_remove_node (cg_edge
->callee
);
2492 /* Declare the 'auto' variables added with this inlined body. */
2493 record_vars (BLOCK_VARS (id
->block
));
2494 id
->block
= NULL_TREE
;
2495 successfully_inlined
= TRUE
;
2498 input_location
= saved_location
;
2499 return successfully_inlined
;
2502 /* Expand call statements reachable from STMT_P.
2503 We can only have CALL_EXPRs as the "toplevel" tree code or nested
2504 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
2505 unfortunately not use that function here because we need a pointer
2506 to the CALL_EXPR, not the tree itself. */
2509 gimple_expand_calls_inline (basic_block bb
, inline_data
*id
)
2511 block_stmt_iterator bsi
;
2513 /* Register specific tree functions. */
2514 tree_register_cfg_hooks ();
2515 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
2517 tree
*expr_p
= bsi_stmt_ptr (bsi
);
2518 tree stmt
= *expr_p
;
2520 if (TREE_CODE (*expr_p
) == MODIFY_EXPR
)
2521 expr_p
= &TREE_OPERAND (*expr_p
, 1);
2522 if (TREE_CODE (*expr_p
) == WITH_SIZE_EXPR
)
2523 expr_p
= &TREE_OPERAND (*expr_p
, 0);
2524 if (TREE_CODE (*expr_p
) == CALL_EXPR
)
2525 if (expand_call_inline (bb
, stmt
, expr_p
, id
))
2531 /* Expand calls to inline functions in the body of FN. */
2534 optimize_inline_calls (tree fn
, bool early
)
2539 /* There is no point in performing inlining if errors have already
2540 occurred -- and we might crash if we try to inline invalid
2542 if (errorcount
|| sorrycount
)
2546 memset (&id
, 0, sizeof (id
));
2548 id
.current_node
= id
.node
= cgraph_node (fn
);
2550 id
.early_inlining_p
= early
;
2551 /* Or any functions that aren't finished yet. */
2552 prev_fn
= NULL_TREE
;
2553 if (current_function_decl
)
2555 id
.caller
= current_function_decl
;
2556 prev_fn
= current_function_decl
;
2559 /* Reach the trees by walking over the CFG, and note the
2560 enclosing basic-blocks in the call edges. */
2561 /* We walk the blocks going forward, because inlined function bodies
2562 will split id->current_basic_block, and the new blocks will
2563 follow it; we'll trudge through them, processing their CALL_EXPRs
2566 gimple_expand_calls_inline (bb
, &id
);
2568 /* Renumber the (code) basic_blocks consecutively. */
2570 /* Renumber the lexical scoping (non-code) blocks consecutively. */
2573 #ifdef ENABLE_CHECKING
2575 struct cgraph_edge
*e
;
2577 verify_cgraph_node (id
.node
);
2579 /* Double check that we inlined everything we are supposed to inline. */
2580 for (e
= id
.node
->callees
; e
; e
= e
->next_callee
)
2581 gcc_assert (e
->inline_failed
);
2584 /* We need to rescale frequencies again to peak at REG_BR_PROB_BASE
2585 as inlining loops might increase the maximum. */
2586 if (ENTRY_BLOCK_PTR
->count
)
2588 fold_cond_expr_cond ();
2589 delete_unreachable_blocks ();
2590 #ifdef ENABLE_CHECKING
2592 verify_flow_info ();
2596 update_ssa (TODO_update_ssa
);
2597 #ifdef ENABLE_CHECKING
2600 fold_cond_expr_cond ();
2601 cleanup_tree_cfg ();
2602 rebuild_cgraph_edges ();
2603 if (need_ssa_update_p ())
2604 update_ssa (TODO_update_ssa
);
2606 free_dominance_info (CDI_DOMINATORS
);
2607 free_dominance_info (CDI_POST_DOMINATORS
);
2610 /* FN is a function that has a complete body, and CLONE is a function whose
2611 body is to be set to a copy of FN, mapping argument declarations according
2612 to the ARG_MAP splay_tree. */
2615 clone_body (tree clone
, tree fn
, void *arg_map
)
2619 /* Clone the body, as if we were making an inline call. But, remap the
2620 parameters in the callee to the parameters of caller. */
2621 memset (&id
, 0, sizeof (id
));
2624 id
.callee_cfun
= DECL_STRUCT_FUNCTION (fn
);
2625 id
.decl_map
= (splay_tree
)arg_map
;
2627 /* Cloning is treated slightly differently from inlining. Set
2628 CLONING_P so that it's clear which operation we're performing. */
2629 id
.cloning_p
= true;
2631 /* We're not inside any EH region. */
2634 /* Actually copy the body. */
2635 append_to_statement_list_force (copy_generic_body (&id
), &DECL_SAVED_TREE (clone
));
2638 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2641 copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
2643 enum tree_code code
= TREE_CODE (*tp
);
2644 inline_data
*id
= (inline_data
*) data
;
2646 /* We make copies of most nodes. */
2647 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
2648 || code
== TREE_LIST
2650 || code
== TYPE_DECL
)
2652 /* Because the chain gets clobbered when we make a copy, we save it
2654 tree chain
= TREE_CHAIN (*tp
);
2657 if (id
&& id
->versioning_p
&& replace_ref_tree (id
, tp
))
2662 /* Copy the node. */
2663 new = copy_node (*tp
);
2665 /* Propagate mudflap marked-ness. */
2666 if (flag_mudflap
&& mf_marked_p (*tp
))
2671 /* Now, restore the chain, if appropriate. That will cause
2672 walk_tree to walk into the chain as well. */
2673 if (code
== PARM_DECL
|| code
== TREE_LIST
)
2674 TREE_CHAIN (*tp
) = chain
;
2676 /* For now, we don't update BLOCKs when we make copies. So, we
2677 have to nullify all BIND_EXPRs. */
2678 if (TREE_CODE (*tp
) == BIND_EXPR
)
2679 BIND_EXPR_BLOCK (*tp
) = NULL_TREE
;
2681 else if (code
== CONSTRUCTOR
)
2683 /* CONSTRUCTOR nodes need special handling because
2684 we need to duplicate the vector of elements. */
2687 new = copy_node (*tp
);
2689 /* Propagate mudflap marked-ness. */
2690 if (flag_mudflap
&& mf_marked_p (*tp
))
2693 CONSTRUCTOR_ELTS (new) = VEC_copy (constructor_elt
, gc
,
2694 CONSTRUCTOR_ELTS (*tp
));
2697 else if (TREE_CODE_CLASS (code
) == tcc_type
)
2699 else if (TREE_CODE_CLASS (code
) == tcc_declaration
)
2701 else if (TREE_CODE_CLASS (code
) == tcc_constant
)
2704 gcc_assert (code
!= STATEMENT_LIST
);
2708 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2709 information indicating to what new SAVE_EXPR this one should be mapped,
2710 use that one. Otherwise, create a new node and enter it in ST. FN is
2711 the function into which the copy will be placed. */
2714 remap_save_expr (tree
*tp
, void *st_
, int *walk_subtrees
)
2716 splay_tree st
= (splay_tree
) st_
;
2720 /* See if we already encountered this SAVE_EXPR. */
2721 n
= splay_tree_lookup (st
, (splay_tree_key
) *tp
);
2723 /* If we didn't already remap this SAVE_EXPR, do so now. */
2726 t
= copy_node (*tp
);
2728 /* Remember this SAVE_EXPR. */
2729 splay_tree_insert (st
, (splay_tree_key
) *tp
, (splay_tree_value
) t
);
2730 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2731 splay_tree_insert (st
, (splay_tree_key
) t
, (splay_tree_value
) t
);
2735 /* We've already walked into this SAVE_EXPR; don't do it again. */
2737 t
= (tree
) n
->value
;
2740 /* Replace this SAVE_EXPR with the copy. */
2744 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
2745 copies the declaration and enters it in the splay_tree in DATA (which is
2746 really an `inline_data *'). */
2749 mark_local_for_remap_r (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
2752 inline_data
*id
= (inline_data
*) data
;
2754 /* Don't walk into types. */
2758 else if (TREE_CODE (*tp
) == LABEL_EXPR
)
2760 tree decl
= TREE_OPERAND (*tp
, 0);
2762 /* Copy the decl and remember the copy. */
2763 insert_decl_map (id
, decl
,
2764 copy_decl_for_dup (decl
, DECL_CONTEXT (decl
),
2765 DECL_CONTEXT (decl
), /*versioning=*/false));
2771 /* Perform any modifications to EXPR required when it is unsaved. Does
2772 not recurse into EXPR's subtrees. */
2775 unsave_expr_1 (tree expr
)
2777 switch (TREE_CODE (expr
))
2780 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
2781 It's OK for this to happen if it was part of a subtree that
2782 isn't immediately expanded, such as operand 2 of another
2784 if (TREE_OPERAND (expr
, 1))
2787 TREE_OPERAND (expr
, 1) = TREE_OPERAND (expr
, 3);
2788 TREE_OPERAND (expr
, 3) = NULL_TREE
;
2796 /* Called via walk_tree when an expression is unsaved. Using the
2797 splay_tree pointed to by ST (which is really a `splay_tree'),
2798 remaps all local declarations to appropriate replacements. */
2801 unsave_r (tree
*tp
, int *walk_subtrees
, void *data
)
2803 inline_data
*id
= (inline_data
*) data
;
2804 splay_tree st
= id
->decl_map
;
2807 /* Only a local declaration (variable or label). */
2808 if ((TREE_CODE (*tp
) == VAR_DECL
&& !TREE_STATIC (*tp
))
2809 || TREE_CODE (*tp
) == LABEL_DECL
)
2811 /* Lookup the declaration. */
2812 n
= splay_tree_lookup (st
, (splay_tree_key
) *tp
);
2814 /* If it's there, remap it. */
2816 *tp
= (tree
) n
->value
;
2819 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
2820 copy_statement_list (tp
);
2821 else if (TREE_CODE (*tp
) == BIND_EXPR
)
2822 copy_bind_expr (tp
, walk_subtrees
, id
);
2823 else if (TREE_CODE (*tp
) == SAVE_EXPR
)
2824 remap_save_expr (tp
, st
, walk_subtrees
);
2827 copy_tree_r (tp
, walk_subtrees
, NULL
);
2829 /* Do whatever unsaving is required. */
2830 unsave_expr_1 (*tp
);
2833 /* Keep iterating. */
2837 /* Copies everything in EXPR and replaces variables, labels
2838 and SAVE_EXPRs local to EXPR. */
2841 unsave_expr_now (tree expr
)
2845 /* There's nothing to do for NULL_TREE. */
2850 memset (&id
, 0, sizeof (id
));
2851 id
.callee
= current_function_decl
;
2852 id
.caller
= current_function_decl
;
2853 id
.decl_map
= splay_tree_new (splay_tree_compare_pointers
, NULL
, NULL
);
2855 /* Walk the tree once to find local labels. */
2856 walk_tree_without_duplicates (&expr
, mark_local_for_remap_r
, &id
);
2858 /* Walk the tree again, copying, remapping, and unsaving. */
2859 walk_tree (&expr
, unsave_r
, &id
, NULL
);
2862 splay_tree_delete (id
.decl_map
);
2867 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
2870 debug_find_tree_1 (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
, void *data
)
2879 debug_find_tree (tree top
, tree search
)
2881 return walk_tree_without_duplicates (&top
, debug_find_tree_1
, search
) != 0;
2885 /* Declare the variables created by the inliner. Add all the variables in
2886 VARS to BIND_EXPR. */
2889 declare_inline_vars (tree block
, tree vars
)
2892 for (t
= vars
; t
; t
= TREE_CHAIN (t
))
2893 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
2896 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), vars
);
2900 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
2901 but now it will be in the TO_FN. VERSIONING means that this function
2902 is used by the versioning utility (not inlining or cloning). */
2905 copy_decl_for_dup (tree decl
, tree from_fn
, tree to_fn
, bool versioning
)
2909 gcc_assert (DECL_P (decl
));
2910 /* Copy the declaration. */
2912 && (TREE_CODE (decl
) == PARM_DECL
2913 || TREE_CODE (decl
) == RESULT_DECL
))
2915 tree type
= TREE_TYPE (decl
);
2917 /* For a parameter or result, we must make an equivalent VAR_DECL,
2918 not a new PARM_DECL. */
2919 copy
= build_decl (VAR_DECL
, DECL_NAME (decl
), type
);
2920 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
2921 TREE_READONLY (copy
) = TREE_READONLY (decl
);
2922 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
2923 DECL_COMPLEX_GIMPLE_REG_P (copy
) = DECL_COMPLEX_GIMPLE_REG_P (decl
);
2927 copy
= copy_node (decl
);
2928 /* The COPY is not abstract; it will be generated in TO_FN. */
2929 DECL_ABSTRACT (copy
) = 0;
2930 lang_hooks
.dup_lang_specific_decl (copy
);
2932 /* TREE_ADDRESSABLE isn't used to indicate that a label's
2933 address has been taken; it's for internal bookkeeping in
2934 expand_goto_internal. */
2935 if (TREE_CODE (copy
) == LABEL_DECL
)
2937 TREE_ADDRESSABLE (copy
) = 0;
2938 LABEL_DECL_UID (copy
) = -1;
2942 /* Don't generate debug information for the copy if we wouldn't have
2943 generated it for the copy either. */
2944 DECL_ARTIFICIAL (copy
) = DECL_ARTIFICIAL (decl
);
2945 DECL_IGNORED_P (copy
) = DECL_IGNORED_P (decl
);
2947 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
2948 declaration inspired this copy. */
2949 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (decl
);
2951 /* The new variable/label has no RTL, yet. */
2952 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy
), TS_DECL_WRTL
)
2953 && !TREE_STATIC (copy
) && !DECL_EXTERNAL (copy
))
2954 SET_DECL_RTL (copy
, NULL_RTX
);
2956 /* These args would always appear unused, if not for this. */
2957 TREE_USED (copy
) = 1;
2959 /* Set the context for the new declaration. */
2960 if (!DECL_CONTEXT (decl
))
2961 /* Globals stay global. */
2963 else if (DECL_CONTEXT (decl
) != from_fn
)
2964 /* Things that weren't in the scope of the function we're inlining
2965 from aren't in the scope we're inlining to, either. */
2967 else if (TREE_STATIC (decl
))
2968 /* Function-scoped static variables should stay in the original
2972 /* Ordinary automatic local variables are now in the scope of the
2974 DECL_CONTEXT (copy
) = to_fn
;
2979 /* Return a copy of the function's argument tree. */
2981 copy_arguments_for_versioning (tree orig_parm
, inline_data
* id
)
2983 tree
*arg_copy
, *parg
;
2985 arg_copy
= &orig_parm
;
2986 for (parg
= arg_copy
; *parg
; parg
= &TREE_CHAIN (*parg
))
2988 tree
new = remap_decl (*parg
, id
);
2989 lang_hooks
.dup_lang_specific_decl (new);
2990 TREE_CHAIN (new) = TREE_CHAIN (*parg
);
2996 /* Return a copy of the function's static chain. */
2998 copy_static_chain (tree static_chain
, inline_data
* id
)
3000 tree
*chain_copy
, *pvar
;
3002 chain_copy
= &static_chain
;
3003 for (pvar
= chain_copy
; *pvar
; pvar
= &TREE_CHAIN (*pvar
))
3005 tree
new = remap_decl (*pvar
, id
);
3006 lang_hooks
.dup_lang_specific_decl (new);
3007 TREE_CHAIN (new) = TREE_CHAIN (*pvar
);
3010 return static_chain
;
3013 /* Return true if the function is allowed to be versioned.
3014 This is a guard for the versioning functionality. */
3016 tree_versionable_function_p (tree fndecl
)
3018 if (fndecl
== NULL_TREE
)
3020 /* ??? There are cases where a function is
3021 uninlinable but can be versioned. */
3022 if (!tree_inlinable_function_p (fndecl
))
3028 /* Create a copy of a function's tree.
3029 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
3030 of the original function and the new copied function
3031 respectively. In case we want to replace a DECL
3032 tree with another tree while duplicating the function's
3033 body, TREE_MAP represents the mapping between these
3036 tree_function_versioning (tree old_decl
, tree new_decl
, varray_type tree_map
, bool update_clones
)
3038 struct cgraph_node
*old_version_node
;
3039 struct cgraph_node
*new_version_node
;
3043 struct ipa_replace_map
*replace_info
;
3044 basic_block old_entry_block
;
3047 gcc_assert (TREE_CODE (old_decl
) == FUNCTION_DECL
3048 && TREE_CODE (new_decl
) == FUNCTION_DECL
);
3049 DECL_POSSIBLY_INLINED (old_decl
) = 1;
3051 old_version_node
= cgraph_node (old_decl
);
3052 new_version_node
= cgraph_node (new_decl
);
3054 allocate_struct_function (new_decl
);
3055 /* Cfun points to the new allocated function struct at this point. */
3056 cfun
->function_end_locus
= DECL_SOURCE_LOCATION (new_decl
);
3058 DECL_ARTIFICIAL (new_decl
) = 1;
3059 DECL_ABSTRACT_ORIGIN (new_decl
) = DECL_ORIGIN (old_decl
);
3061 /* Generate a new name for the new version. */
3063 DECL_NAME (new_decl
) =
3064 create_tmp_var_name (NULL
);
3065 /* Create a new SYMBOL_REF rtx for the new name. */
3066 if (DECL_RTL (old_decl
) != NULL
)
3068 SET_DECL_RTL (new_decl
, copy_rtx (DECL_RTL (old_decl
)));
3069 XEXP (DECL_RTL (new_decl
), 0) =
3070 gen_rtx_SYMBOL_REF (GET_MODE (XEXP (DECL_RTL (old_decl
), 0)),
3071 IDENTIFIER_POINTER (DECL_NAME (new_decl
)));
3074 /* Prepare the data structures for the tree copy. */
3075 memset (&id
, 0, sizeof (id
));
3077 /* The new version. */
3078 id
.node
= new_version_node
;
3080 /* The old version. */
3081 id
.current_node
= cgraph_node (old_decl
);
3083 id
.versioning_p
= true;
3084 id
.update_clones_p
= update_clones
;
3085 id
.decl_map
= splay_tree_new (splay_tree_compare_pointers
, NULL
, NULL
);
3086 id
.caller
= new_decl
;
3087 id
.callee
= old_decl
;
3088 id
.callee_cfun
= DECL_STRUCT_FUNCTION (old_decl
);
3090 current_function_decl
= new_decl
;
3091 old_entry_block
= ENTRY_BLOCK_PTR_FOR_FUNCTION
3092 (DECL_STRUCT_FUNCTION (old_decl
));
3093 initialize_cfun (new_decl
, old_decl
,
3094 old_entry_block
->count
,
3095 old_entry_block
->frequency
);
3096 push_cfun (DECL_STRUCT_FUNCTION (new_decl
));
3098 /* Copy the function's static chain. */
3099 p
= DECL_STRUCT_FUNCTION (old_decl
)->static_chain_decl
;
3101 DECL_STRUCT_FUNCTION (new_decl
)->static_chain_decl
=
3102 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl
)->static_chain_decl
,
3104 /* Copy the function's arguments. */
3105 if (DECL_ARGUMENTS (old_decl
) != NULL_TREE
)
3106 DECL_ARGUMENTS (new_decl
) =
3107 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl
), &id
);
3109 /* If there's a tree_map, prepare for substitution. */
3111 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (tree_map
); i
++)
3113 replace_info
= VARRAY_GENERIC_PTR (tree_map
, i
);
3114 if (replace_info
->replace_p
&& !replace_info
->ref_p
)
3115 insert_decl_map (&id
, replace_info
->old_tree
,
3116 replace_info
->new_tree
);
3117 else if (replace_info
->replace_p
&& replace_info
->ref_p
)
3118 id
.ipa_info
= tree_map
;
3121 DECL_INITIAL (new_decl
) = remap_blocks (DECL_INITIAL (id
.callee
), &id
);
3123 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3124 number_blocks (id
.caller
);
3126 if (DECL_STRUCT_FUNCTION (old_decl
)->unexpanded_var_list
!= NULL_TREE
)
3127 /* Add local vars. */
3128 for (t_step
= DECL_STRUCT_FUNCTION (old_decl
)->unexpanded_var_list
;
3129 t_step
; t_step
= TREE_CHAIN (t_step
))
3131 tree var
= TREE_VALUE (t_step
);
3132 if (TREE_STATIC (var
) && !TREE_ASM_WRITTEN (var
))
3133 cfun
->unexpanded_var_list
= tree_cons (NULL_TREE
, var
,
3134 cfun
->unexpanded_var_list
);
3136 cfun
->unexpanded_var_list
=
3137 tree_cons (NULL_TREE
, remap_decl (var
, &id
),
3138 cfun
->unexpanded_var_list
);
3141 /* Copy the Function's body. */
3142 copy_body (&id
, old_entry_block
->count
, old_entry_block
->frequency
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
);
3145 DECL_SAVED_TREE (new_decl
) = DECL_SAVED_TREE (new_fndecl
);
3147 DECL_STRUCT_FUNCTION (new_decl
)->cfg
=
3148 DECL_STRUCT_FUNCTION (new_fndecl
)->cfg
;
3149 DECL_STRUCT_FUNCTION (new_decl
)->eh
= DECL_STRUCT_FUNCTION (new_fndecl
)->eh
;
3150 DECL_STRUCT_FUNCTION (new_decl
)->ib_boundaries_block
=
3151 DECL_STRUCT_FUNCTION (new_fndecl
)->ib_boundaries_block
;
3152 DECL_STRUCT_FUNCTION (new_decl
)->last_label_uid
=
3153 DECL_STRUCT_FUNCTION (new_fndecl
)->last_label_uid
;
3156 if (DECL_RESULT (old_decl
) != NULL_TREE
)
3158 tree
*res_decl
= &DECL_RESULT (old_decl
);
3159 DECL_RESULT (new_decl
) = remap_decl (*res_decl
, &id
);
3160 lang_hooks
.dup_lang_specific_decl (DECL_RESULT (new_decl
));
3163 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3164 number_blocks (new_decl
);
3167 splay_tree_delete (id
.decl_map
);
3168 fold_cond_expr_cond ();
3171 update_ssa (TODO_update_ssa
);
3172 #ifdef ENABLE_CHECKING
3176 free_dominance_info (CDI_DOMINATORS
);
3177 free_dominance_info (CDI_POST_DOMINATORS
);
3179 current_function_decl
= NULL
;
3183 /* Replace an INDIRECT_REF tree of a given DECL tree with a new
3185 ID->ipa_info keeps the old tree and the new tree.
3186 TP points to the INDIRECT REF tree. Return true if
3187 the trees were replaced. */
3189 replace_ref_tree (inline_data
* id
, tree
* tp
)
3191 bool replaced
= false;
3194 if (id
->ipa_info
&& VARRAY_ACTIVE_SIZE (id
->ipa_info
) > 0)
3198 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (id
->ipa_info
); i
++)
3200 struct ipa_replace_map
*replace_info
;
3201 replace_info
= VARRAY_GENERIC_PTR (id
->ipa_info
, i
);
3203 if (replace_info
->replace_p
&& replace_info
->ref_p
)
3205 tree old_tree
= replace_info
->old_tree
;
3206 tree new_tree
= replace_info
->new_tree
;
3208 if (TREE_CODE (*tp
) == INDIRECT_REF
3209 && TREE_OPERAND (*tp
, 0) == old_tree
)
3211 new = copy_node (new_tree
);
3221 /* Return true if we are inlining. */
3223 inlining_p (inline_data
* id
)
3225 return (!id
->cloning_p
&& !id
->versioning_p
);
3228 /* Duplicate a type, fields and all. */
3231 build_duplicate_type (tree type
)
3235 memset (&id
, 0, sizeof (id
));
3236 id
.callee
= current_function_decl
;
3237 id
.caller
= current_function_decl
;
3238 id
.callee_cfun
= cfun
;
3239 id
.decl_map
= splay_tree_new (splay_tree_compare_pointers
, NULL
, NULL
);
3241 type
= remap_type_1 (type
, &id
);
3243 splay_tree_delete (id
.decl_map
);