2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "diagnostic-core.h"
28 #include "tree-inline.h"
32 #include "insn-config.h"
34 #include "langhooks.h"
35 #include "basic-block.h"
36 #include "tree-iterator.h"
39 #include "tree-mudflap.h"
40 #include "tree-flow.h"
42 #include "tree-flow.h"
43 #include "tree-pretty-print.h"
46 #include "pointer-set.h"
48 #include "value-prof.h"
49 #include "tree-pass.h"
51 #include "integrate.h"
53 #include "rtl.h" /* FIXME: For asm_str_count. */
55 /* I'm not real happy about this, but we need to handle gimple and
59 /* Inlining, Cloning, Versioning, Parallelization
61 Inlining: a function body is duplicated, but the PARM_DECLs are
62 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
63 MODIFY_EXPRs that store to a dedicated returned-value variable.
64 The duplicated eh_region info of the copy will later be appended
65 to the info for the caller; the eh_region info in copied throwing
66 statements and RESX statements are adjusted accordingly.
68 Cloning: (only in C++) We have one body for a con/de/structor, and
69 multiple function decls, each with a unique parameter list.
70 Duplicate the body, using the given splay tree; some parameters
71 will become constants (like 0 or 1).
73 Versioning: a function body is duplicated and the result is a new
74 function rather than into blocks of an existing function as with
75 inlining. Some parameters will become constants.
77 Parallelization: a region of a function is duplicated resulting in
78 a new function. Variables may be replaced with complex expressions
79 to enable shared variable semantics.
81 All of these will simultaneously lookup any callgraph edges. If
82 we're going to inline the duplicated function body, and the given
83 function has some cloned callgraph nodes (one for each place this
84 function will be inlined) those callgraph edges will be duplicated.
85 If we're cloning the body, those callgraph edges will be
86 updated to point into the new body. (Note that the original
87 callgraph node and edge list will not be altered.)
89 See the CALL_EXPR handling case in copy_tree_body_r (). */
93 o In order to make inlining-on-trees work, we pessimized
94 function-local static constants. In particular, they are now
95 always output, even when not addressed. Fix this by treating
96 function-local static constants just like global static
97 constants; the back-end already knows not to output them if they
100 o Provide heuristics to clamp inlining of recursive template
104 /* Weights that estimate_num_insns uses to estimate the size of the
107 eni_weights eni_size_weights
;
109 /* Weights that estimate_num_insns uses to estimate the time necessary
110 to execute the produced code. */
112 eni_weights eni_time_weights
;
116 static tree
declare_return_variable (copy_body_data
*, tree
, tree
, basic_block
);
117 static void remap_block (tree
*, copy_body_data
*);
118 static void copy_bind_expr (tree
*, int *, copy_body_data
*);
119 static tree
mark_local_for_remap_r (tree
*, int *, void *);
120 static void unsave_expr_1 (tree
);
121 static tree
unsave_r (tree
*, int *, void *);
122 static void declare_inline_vars (tree
, tree
);
123 static void remap_save_expr (tree
*, void *, int *);
124 static void prepend_lexical_block (tree current_block
, tree new_block
);
125 static tree
copy_decl_to_var (tree
, copy_body_data
*);
126 static tree
copy_result_decl_to_var (tree
, copy_body_data
*);
127 static tree
copy_decl_maybe_to_var (tree
, copy_body_data
*);
128 static gimple
remap_gimple_stmt (gimple
, copy_body_data
*);
129 static bool delete_unreachable_blocks_update_callgraph (copy_body_data
*id
);
131 /* Insert a tree->tree mapping for ID. Despite the name suggests
132 that the trees should be variables, it is used for more than that. */
135 insert_decl_map (copy_body_data
*id
, tree key
, tree value
)
137 *pointer_map_insert (id
->decl_map
, key
) = value
;
139 /* Always insert an identity map as well. If we see this same new
140 node again, we won't want to duplicate it a second time. */
142 *pointer_map_insert (id
->decl_map
, value
) = value
;
145 /* Insert a tree->tree mapping for ID. This is only used for
149 insert_debug_decl_map (copy_body_data
*id
, tree key
, tree value
)
151 if (!gimple_in_ssa_p (id
->src_cfun
))
154 if (!MAY_HAVE_DEBUG_STMTS
)
157 if (!target_for_debug_bind (key
))
160 gcc_assert (TREE_CODE (key
) == PARM_DECL
);
161 gcc_assert (TREE_CODE (value
) == VAR_DECL
);
164 id
->debug_map
= pointer_map_create ();
166 *pointer_map_insert (id
->debug_map
, key
) = value
;
169 /* If nonzero, we're remapping the contents of inlined debug
170 statements. If negative, an error has occurred, such as a
171 reference to a variable that isn't available in the inlined
173 static int processing_debug_stmt
= 0;
175 /* Construct new SSA name for old NAME. ID is the inline context. */
178 remap_ssa_name (tree name
, copy_body_data
*id
)
183 gcc_assert (TREE_CODE (name
) == SSA_NAME
);
185 n
= (tree
*) pointer_map_contains (id
->decl_map
, name
);
187 return unshare_expr (*n
);
189 if (processing_debug_stmt
)
191 if (TREE_CODE (SSA_NAME_VAR (name
)) == PARM_DECL
192 && SSA_NAME_IS_DEFAULT_DEF (name
)
193 && id
->entry_bb
== NULL
194 && single_succ_p (ENTRY_BLOCK_PTR
))
196 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
198 gimple_stmt_iterator gsi
;
199 tree val
= SSA_NAME_VAR (name
);
201 n
= (tree
*) pointer_map_contains (id
->decl_map
, val
);
204 if (TREE_CODE (val
) != PARM_DECL
)
206 processing_debug_stmt
= -1;
209 def_temp
= gimple_build_debug_source_bind (vexpr
, val
, NULL
);
210 DECL_ARTIFICIAL (vexpr
) = 1;
211 TREE_TYPE (vexpr
) = TREE_TYPE (name
);
212 DECL_MODE (vexpr
) = DECL_MODE (SSA_NAME_VAR (name
));
213 gsi
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR
));
214 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
218 processing_debug_stmt
= -1;
222 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
224 new_tree
= remap_decl (SSA_NAME_VAR (name
), id
);
226 /* We might've substituted constant or another SSA_NAME for
229 Replace the SSA name representing RESULT_DECL by variable during
230 inlining: this saves us from need to introduce PHI node in a case
231 return value is just partly initialized. */
232 if ((TREE_CODE (new_tree
) == VAR_DECL
|| TREE_CODE (new_tree
) == PARM_DECL
)
233 && (TREE_CODE (SSA_NAME_VAR (name
)) != RESULT_DECL
234 || !id
->transform_return_to_modify
))
236 struct ptr_info_def
*pi
;
237 new_tree
= make_ssa_name (new_tree
, NULL
);
238 insert_decl_map (id
, name
, new_tree
);
239 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree
)
240 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
);
241 TREE_TYPE (new_tree
) = TREE_TYPE (SSA_NAME_VAR (new_tree
));
242 /* At least IPA points-to info can be directly transferred. */
243 if (id
->src_cfun
->gimple_df
244 && id
->src_cfun
->gimple_df
->ipa_pta
245 && (pi
= SSA_NAME_PTR_INFO (name
))
248 struct ptr_info_def
*new_pi
= get_ptr_info (new_tree
);
251 if (gimple_nop_p (SSA_NAME_DEF_STMT (name
)))
253 /* By inlining function having uninitialized variable, we might
254 extend the lifetime (variable might get reused). This cause
255 ICE in the case we end up extending lifetime of SSA name across
256 abnormal edge, but also increase register pressure.
258 We simply initialize all uninitialized vars by 0 except
259 for case we are inlining to very first BB. We can avoid
260 this for all BBs that are not inside strongly connected
261 regions of the CFG, but this is expensive to test. */
263 && is_gimple_reg (SSA_NAME_VAR (name
))
264 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
)
265 && TREE_CODE (SSA_NAME_VAR (name
)) != PARM_DECL
266 && (id
->entry_bb
!= EDGE_SUCC (ENTRY_BLOCK_PTR
, 0)->dest
267 || EDGE_COUNT (id
->entry_bb
->preds
) != 1))
269 gimple_stmt_iterator gsi
= gsi_last_bb (id
->entry_bb
);
271 tree zero
= build_zero_cst (TREE_TYPE (new_tree
));
273 init_stmt
= gimple_build_assign (new_tree
, zero
);
274 gsi_insert_after (&gsi
, init_stmt
, GSI_NEW_STMT
);
275 SSA_NAME_IS_DEFAULT_DEF (new_tree
) = 0;
279 SSA_NAME_DEF_STMT (new_tree
) = gimple_build_nop ();
280 if (gimple_default_def (id
->src_cfun
, SSA_NAME_VAR (name
))
282 set_default_def (SSA_NAME_VAR (new_tree
), new_tree
);
287 insert_decl_map (id
, name
, new_tree
);
291 /* Remap DECL during the copying of the BLOCK tree for the function. */
294 remap_decl (tree decl
, copy_body_data
*id
)
298 /* We only remap local variables in the current function. */
300 /* See if we have remapped this declaration. */
302 n
= (tree
*) pointer_map_contains (id
->decl_map
, decl
);
304 if (!n
&& processing_debug_stmt
)
306 processing_debug_stmt
= -1;
310 /* If we didn't already have an equivalent for this declaration,
314 /* Make a copy of the variable or label. */
315 tree t
= id
->copy_decl (decl
, id
);
317 /* Remember it, so that if we encounter this local entity again
318 we can reuse this copy. Do this early because remap_type may
319 need this decl for TYPE_STUB_DECL. */
320 insert_decl_map (id
, decl
, t
);
325 /* Remap types, if necessary. */
326 TREE_TYPE (t
) = remap_type (TREE_TYPE (t
), id
);
327 if (TREE_CODE (t
) == TYPE_DECL
)
328 DECL_ORIGINAL_TYPE (t
) = remap_type (DECL_ORIGINAL_TYPE (t
), id
);
330 /* Remap sizes as necessary. */
331 walk_tree (&DECL_SIZE (t
), copy_tree_body_r
, id
, NULL
);
332 walk_tree (&DECL_SIZE_UNIT (t
), copy_tree_body_r
, id
, NULL
);
334 /* If fields, do likewise for offset and qualifier. */
335 if (TREE_CODE (t
) == FIELD_DECL
)
337 walk_tree (&DECL_FIELD_OFFSET (t
), copy_tree_body_r
, id
, NULL
);
338 if (TREE_CODE (DECL_CONTEXT (t
)) == QUAL_UNION_TYPE
)
339 walk_tree (&DECL_QUALIFIER (t
), copy_tree_body_r
, id
, NULL
);
342 if ((TREE_CODE (t
) == VAR_DECL
343 || TREE_CODE (t
) == RESULT_DECL
344 || TREE_CODE (t
) == PARM_DECL
)
345 && id
->src_fn
&& DECL_STRUCT_FUNCTION (id
->src_fn
)
346 && gimple_referenced_vars (DECL_STRUCT_FUNCTION (id
->src_fn
))
347 /* We don't want to mark as referenced VAR_DECLs that were
348 not marked as such in the src function. */
349 && (TREE_CODE (decl
) != VAR_DECL
350 || referenced_var_lookup (DECL_STRUCT_FUNCTION (id
->src_fn
),
352 add_referenced_var (t
);
356 if (id
->do_not_unshare
)
359 return unshare_expr (*n
);
363 remap_type_1 (tree type
, copy_body_data
*id
)
367 /* We do need a copy. build and register it now. If this is a pointer or
368 reference type, remap the designated type and make a new pointer or
370 if (TREE_CODE (type
) == POINTER_TYPE
)
372 new_tree
= build_pointer_type_for_mode (remap_type (TREE_TYPE (type
), id
),
374 TYPE_REF_CAN_ALIAS_ALL (type
));
375 if (TYPE_ATTRIBUTES (type
) || TYPE_QUALS (type
))
376 new_tree
= build_type_attribute_qual_variant (new_tree
,
377 TYPE_ATTRIBUTES (type
),
379 insert_decl_map (id
, type
, new_tree
);
382 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
384 new_tree
= build_reference_type_for_mode (remap_type (TREE_TYPE (type
), id
),
386 TYPE_REF_CAN_ALIAS_ALL (type
));
387 if (TYPE_ATTRIBUTES (type
) || TYPE_QUALS (type
))
388 new_tree
= build_type_attribute_qual_variant (new_tree
,
389 TYPE_ATTRIBUTES (type
),
391 insert_decl_map (id
, type
, new_tree
);
395 new_tree
= copy_node (type
);
397 insert_decl_map (id
, type
, new_tree
);
399 /* This is a new type, not a copy of an old type. Need to reassociate
400 variants. We can handle everything except the main variant lazily. */
401 t
= TYPE_MAIN_VARIANT (type
);
404 t
= remap_type (t
, id
);
405 TYPE_MAIN_VARIANT (new_tree
) = t
;
406 TYPE_NEXT_VARIANT (new_tree
) = TYPE_NEXT_VARIANT (t
);
407 TYPE_NEXT_VARIANT (t
) = new_tree
;
411 TYPE_MAIN_VARIANT (new_tree
) = new_tree
;
412 TYPE_NEXT_VARIANT (new_tree
) = NULL
;
415 if (TYPE_STUB_DECL (type
))
416 TYPE_STUB_DECL (new_tree
) = remap_decl (TYPE_STUB_DECL (type
), id
);
418 /* Lazily create pointer and reference types. */
419 TYPE_POINTER_TO (new_tree
) = NULL
;
420 TYPE_REFERENCE_TO (new_tree
) = NULL
;
422 switch (TREE_CODE (new_tree
))
426 case FIXED_POINT_TYPE
:
429 t
= TYPE_MIN_VALUE (new_tree
);
430 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
431 walk_tree (&TYPE_MIN_VALUE (new_tree
), copy_tree_body_r
, id
, NULL
);
433 t
= TYPE_MAX_VALUE (new_tree
);
434 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
435 walk_tree (&TYPE_MAX_VALUE (new_tree
), copy_tree_body_r
, id
, NULL
);
439 TREE_TYPE (new_tree
) = remap_type (TREE_TYPE (new_tree
), id
);
440 walk_tree (&TYPE_ARG_TYPES (new_tree
), copy_tree_body_r
, id
, NULL
);
444 TREE_TYPE (new_tree
) = remap_type (TREE_TYPE (new_tree
), id
);
445 TYPE_DOMAIN (new_tree
) = remap_type (TYPE_DOMAIN (new_tree
), id
);
450 case QUAL_UNION_TYPE
:
454 for (f
= TYPE_FIELDS (new_tree
); f
; f
= DECL_CHAIN (f
))
456 t
= remap_decl (f
, id
);
457 DECL_CONTEXT (t
) = new_tree
;
461 TYPE_FIELDS (new_tree
) = nreverse (nf
);
467 /* Shouldn't have been thought variable sized. */
471 walk_tree (&TYPE_SIZE (new_tree
), copy_tree_body_r
, id
, NULL
);
472 walk_tree (&TYPE_SIZE_UNIT (new_tree
), copy_tree_body_r
, id
, NULL
);
478 remap_type (tree type
, copy_body_data
*id
)
486 /* See if we have remapped this type. */
487 node
= (tree
*) pointer_map_contains (id
->decl_map
, type
);
491 /* The type only needs remapping if it's variably modified. */
492 if (! variably_modified_type_p (type
, id
->src_fn
))
494 insert_decl_map (id
, type
, type
);
498 id
->remapping_type_depth
++;
499 tmp
= remap_type_1 (type
, id
);
500 id
->remapping_type_depth
--;
505 /* Return previously remapped type of TYPE in ID. Return NULL if TYPE
506 is NULL or TYPE has not been remapped before. */
509 remapped_type (tree type
, copy_body_data
*id
)
516 /* See if we have remapped this type. */
517 node
= (tree
*) pointer_map_contains (id
->decl_map
, type
);
524 /* The type only needs remapping if it's variably modified. */
525 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
528 can_be_nonlocal (tree decl
, copy_body_data
*id
)
530 /* We can not duplicate function decls. */
531 if (TREE_CODE (decl
) == FUNCTION_DECL
)
534 /* Local static vars must be non-local or we get multiple declaration
536 if (TREE_CODE (decl
) == VAR_DECL
537 && !auto_var_in_fn_p (decl
, id
->src_fn
))
540 /* At the moment dwarf2out can handle only these types of nodes. We
541 can support more later. */
542 if (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != PARM_DECL
)
545 /* We must use global type. We call remapped_type instead of
546 remap_type since we don't want to remap this type here if it
547 hasn't been remapped before. */
548 if (TREE_TYPE (decl
) != remapped_type (TREE_TYPE (decl
), id
))
551 /* Wihtout SSA we can't tell if variable is used. */
552 if (!gimple_in_ssa_p (cfun
))
555 /* Live variables must be copied so we can attach DECL_RTL. */
563 remap_decls (tree decls
, VEC(tree
,gc
) **nonlocalized_list
, copy_body_data
*id
)
566 tree new_decls
= NULL_TREE
;
568 /* Remap its variables. */
569 for (old_var
= decls
; old_var
; old_var
= DECL_CHAIN (old_var
))
573 if (can_be_nonlocal (old_var
, id
))
575 if (TREE_CODE (old_var
) == VAR_DECL
576 && ! DECL_EXTERNAL (old_var
)
577 && (var_ann (old_var
) || !gimple_in_ssa_p (cfun
)))
578 add_local_decl (cfun
, old_var
);
579 if ((!optimize
|| debug_info_level
> DINFO_LEVEL_TERSE
)
580 && !DECL_IGNORED_P (old_var
)
581 && nonlocalized_list
)
582 VEC_safe_push (tree
, gc
, *nonlocalized_list
, old_var
);
586 /* Remap the variable. */
587 new_var
= remap_decl (old_var
, id
);
589 /* If we didn't remap this variable, we can't mess with its
590 TREE_CHAIN. If we remapped this variable to the return slot, it's
591 already declared somewhere else, so don't declare it here. */
593 if (new_var
== id
->retvar
)
597 if ((!optimize
|| debug_info_level
> DINFO_LEVEL_TERSE
)
598 && !DECL_IGNORED_P (old_var
)
599 && nonlocalized_list
)
600 VEC_safe_push (tree
, gc
, *nonlocalized_list
, old_var
);
604 gcc_assert (DECL_P (new_var
));
605 DECL_CHAIN (new_var
) = new_decls
;
608 /* Also copy value-expressions. */
609 if (TREE_CODE (new_var
) == VAR_DECL
610 && DECL_HAS_VALUE_EXPR_P (new_var
))
612 tree tem
= DECL_VALUE_EXPR (new_var
);
613 bool old_regimplify
= id
->regimplify
;
614 id
->remapping_type_depth
++;
615 walk_tree (&tem
, copy_tree_body_r
, id
, NULL
);
616 id
->remapping_type_depth
--;
617 id
->regimplify
= old_regimplify
;
618 SET_DECL_VALUE_EXPR (new_var
, tem
);
623 return nreverse (new_decls
);
626 /* Copy the BLOCK to contain remapped versions of the variables
627 therein. And hook the new block into the block-tree. */
630 remap_block (tree
*block
, copy_body_data
*id
)
635 /* Make the new block. */
637 new_block
= make_node (BLOCK
);
638 TREE_USED (new_block
) = TREE_USED (old_block
);
639 BLOCK_ABSTRACT_ORIGIN (new_block
) = old_block
;
640 BLOCK_SOURCE_LOCATION (new_block
) = BLOCK_SOURCE_LOCATION (old_block
);
641 BLOCK_NONLOCALIZED_VARS (new_block
)
642 = VEC_copy (tree
, gc
, BLOCK_NONLOCALIZED_VARS (old_block
));
645 /* Remap its variables. */
646 BLOCK_VARS (new_block
) = remap_decls (BLOCK_VARS (old_block
),
647 &BLOCK_NONLOCALIZED_VARS (new_block
),
650 if (id
->transform_lang_insert_block
)
651 id
->transform_lang_insert_block (new_block
);
653 /* Remember the remapped block. */
654 insert_decl_map (id
, old_block
, new_block
);
657 /* Copy the whole block tree and root it in id->block. */
659 remap_blocks (tree block
, copy_body_data
*id
)
662 tree new_tree
= block
;
667 remap_block (&new_tree
, id
);
668 gcc_assert (new_tree
!= block
);
669 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
670 prepend_lexical_block (new_tree
, remap_blocks (t
, id
));
671 /* Blocks are in arbitrary order, but make things slightly prettier and do
672 not swap order when producing a copy. */
673 BLOCK_SUBBLOCKS (new_tree
) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree
));
678 copy_statement_list (tree
*tp
)
680 tree_stmt_iterator oi
, ni
;
683 new_tree
= alloc_stmt_list ();
684 ni
= tsi_start (new_tree
);
685 oi
= tsi_start (*tp
);
686 TREE_TYPE (new_tree
) = TREE_TYPE (*tp
);
689 for (; !tsi_end_p (oi
); tsi_next (&oi
))
691 tree stmt
= tsi_stmt (oi
);
692 if (TREE_CODE (stmt
) == STATEMENT_LIST
)
693 /* This copy is not redundant; tsi_link_after will smash this
694 STATEMENT_LIST into the end of the one we're building, and we
695 don't want to do that with the original. */
696 copy_statement_list (&stmt
);
697 tsi_link_after (&ni
, stmt
, TSI_CONTINUE_LINKING
);
702 copy_bind_expr (tree
*tp
, int *walk_subtrees
, copy_body_data
*id
)
704 tree block
= BIND_EXPR_BLOCK (*tp
);
705 /* Copy (and replace) the statement. */
706 copy_tree_r (tp
, walk_subtrees
, NULL
);
709 remap_block (&block
, id
);
710 BIND_EXPR_BLOCK (*tp
) = block
;
713 if (BIND_EXPR_VARS (*tp
))
714 /* This will remap a lot of the same decls again, but this should be
716 BIND_EXPR_VARS (*tp
) = remap_decls (BIND_EXPR_VARS (*tp
), NULL
, id
);
720 /* Create a new gimple_seq by remapping all the statements in BODY
721 using the inlining information in ID. */
724 remap_gimple_seq (gimple_seq body
, copy_body_data
*id
)
726 gimple_stmt_iterator si
;
727 gimple_seq new_body
= NULL
;
729 for (si
= gsi_start (body
); !gsi_end_p (si
); gsi_next (&si
))
731 gimple new_stmt
= remap_gimple_stmt (gsi_stmt (si
), id
);
732 gimple_seq_add_stmt (&new_body
, new_stmt
);
739 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
740 block using the mapping information in ID. */
743 copy_gimple_bind (gimple stmt
, copy_body_data
*id
)
746 tree new_block
, new_vars
;
747 gimple_seq body
, new_body
;
749 /* Copy the statement. Note that we purposely don't use copy_stmt
750 here because we need to remap statements as we copy. */
751 body
= gimple_bind_body (stmt
);
752 new_body
= remap_gimple_seq (body
, id
);
754 new_block
= gimple_bind_block (stmt
);
756 remap_block (&new_block
, id
);
758 /* This will remap a lot of the same decls again, but this should be
760 new_vars
= gimple_bind_vars (stmt
);
762 new_vars
= remap_decls (new_vars
, NULL
, id
);
764 new_bind
= gimple_build_bind (new_vars
, new_body
, new_block
);
770 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
771 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
772 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
773 recursing into the children nodes of *TP. */
776 remap_gimple_op_r (tree
*tp
, int *walk_subtrees
, void *data
)
778 struct walk_stmt_info
*wi_p
= (struct walk_stmt_info
*) data
;
779 copy_body_data
*id
= (copy_body_data
*) wi_p
->info
;
780 tree fn
= id
->src_fn
;
782 if (TREE_CODE (*tp
) == SSA_NAME
)
784 *tp
= remap_ssa_name (*tp
, id
);
788 else if (auto_var_in_fn_p (*tp
, fn
))
790 /* Local variables and labels need to be replaced by equivalent
791 variables. We don't want to copy static variables; there's
792 only one of those, no matter how many times we inline the
793 containing function. Similarly for globals from an outer
797 /* Remap the declaration. */
798 new_decl
= remap_decl (*tp
, id
);
799 gcc_assert (new_decl
);
800 /* Replace this variable with the copy. */
801 STRIP_TYPE_NOPS (new_decl
);
802 /* ??? The C++ frontend uses void * pointer zero to initialize
803 any other type. This confuses the middle-end type verification.
804 As cloned bodies do not go through gimplification again the fixup
805 there doesn't trigger. */
806 if (TREE_CODE (new_decl
) == INTEGER_CST
807 && !useless_type_conversion_p (TREE_TYPE (*tp
), TREE_TYPE (new_decl
)))
808 new_decl
= fold_convert (TREE_TYPE (*tp
), new_decl
);
812 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
814 else if (TREE_CODE (*tp
) == SAVE_EXPR
)
816 else if (TREE_CODE (*tp
) == LABEL_DECL
817 && (!DECL_CONTEXT (*tp
)
818 || decl_function_context (*tp
) == id
->src_fn
))
819 /* These may need to be remapped for EH handling. */
820 *tp
= remap_decl (*tp
, id
);
821 else if (TYPE_P (*tp
))
822 /* Types may need remapping as well. */
823 *tp
= remap_type (*tp
, id
);
824 else if (CONSTANT_CLASS_P (*tp
))
826 /* If this is a constant, we have to copy the node iff the type
827 will be remapped. copy_tree_r will not copy a constant. */
828 tree new_type
= remap_type (TREE_TYPE (*tp
), id
);
830 if (new_type
== TREE_TYPE (*tp
))
833 else if (TREE_CODE (*tp
) == INTEGER_CST
)
834 *tp
= build_int_cst_wide (new_type
, TREE_INT_CST_LOW (*tp
),
835 TREE_INT_CST_HIGH (*tp
));
838 *tp
= copy_node (*tp
);
839 TREE_TYPE (*tp
) = new_type
;
844 /* Otherwise, just copy the node. Note that copy_tree_r already
845 knows not to copy VAR_DECLs, etc., so this is safe. */
847 /* We should never have TREE_BLOCK set on non-statements. */
849 gcc_assert (!TREE_BLOCK (*tp
));
851 if (TREE_CODE (*tp
) == MEM_REF
)
853 tree ptr
= TREE_OPERAND (*tp
, 0);
854 tree type
= remap_type (TREE_TYPE (*tp
), id
);
858 /* We need to re-canonicalize MEM_REFs from inline substitutions
859 that can happen when a pointer argument is an ADDR_EXPR.
860 Recurse here manually to allow that. */
861 walk_tree (&ptr
, remap_gimple_op_r
, data
, NULL
);
862 if ((tem
= maybe_fold_offset_to_reference (EXPR_LOCATION (*tp
),
864 TREE_OPERAND (*tp
, 1),
866 && TREE_THIS_VOLATILE (tem
) == TREE_THIS_VOLATILE (old
))
868 tree
*tem_basep
= &tem
;
869 while (handled_component_p (*tem_basep
))
870 tem_basep
= &TREE_OPERAND (*tem_basep
, 0);
871 if (TREE_CODE (*tem_basep
) == MEM_REF
)
873 = build2 (MEM_REF
, TREE_TYPE (*tem_basep
),
874 TREE_OPERAND (*tem_basep
, 0),
875 fold_convert (TREE_TYPE (TREE_OPERAND (*tp
, 1)),
876 TREE_OPERAND (*tem_basep
, 1)));
879 = build2 (MEM_REF
, TREE_TYPE (*tem_basep
),
880 build_fold_addr_expr (*tem_basep
),
882 (TREE_TYPE (TREE_OPERAND (*tp
, 1)), 0));
887 *tp
= fold_build2 (MEM_REF
, type
,
888 ptr
, TREE_OPERAND (*tp
, 1));
889 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
890 TREE_THIS_NOTRAP (*tp
) = TREE_THIS_NOTRAP (old
);
892 TREE_NO_WARNING (*tp
) = TREE_NO_WARNING (old
);
897 /* Here is the "usual case". Copy this tree node, and then
898 tweak some special cases. */
899 copy_tree_r (tp
, walk_subtrees
, NULL
);
901 if (TREE_CODE (*tp
) != OMP_CLAUSE
)
902 TREE_TYPE (*tp
) = remap_type (TREE_TYPE (*tp
), id
);
904 /* Global variables we haven't seen yet need to go into referenced
905 vars. If not referenced from types only. */
906 if (gimple_in_ssa_p (cfun
)
907 && TREE_CODE (*tp
) == VAR_DECL
908 && id
->remapping_type_depth
== 0
909 && !processing_debug_stmt
)
910 add_referenced_var (*tp
);
912 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
914 /* The copied TARGET_EXPR has never been expanded, even if the
915 original node was expanded already. */
916 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
917 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
919 else if (TREE_CODE (*tp
) == ADDR_EXPR
)
921 /* Variable substitution need not be simple. In particular,
922 the MEM_REF substitution above. Make sure that
923 TREE_CONSTANT and friends are up-to-date. But make sure
924 to not improperly set TREE_BLOCK on some sub-expressions. */
925 int invariant
= is_gimple_min_invariant (*tp
);
926 tree block
= id
->block
;
927 id
->block
= NULL_TREE
;
928 walk_tree (&TREE_OPERAND (*tp
, 0), remap_gimple_op_r
, data
, NULL
);
930 recompute_tree_invariant_for_addr_expr (*tp
);
932 /* If this used to be invariant, but is not any longer,
933 then regimplification is probably needed. */
934 if (invariant
&& !is_gimple_min_invariant (*tp
))
935 id
->regimplify
= true;
941 /* Keep iterating. */
946 /* Called from copy_body_id via walk_tree. DATA is really a
947 `copy_body_data *'. */
950 copy_tree_body_r (tree
*tp
, int *walk_subtrees
, void *data
)
952 copy_body_data
*id
= (copy_body_data
*) data
;
953 tree fn
= id
->src_fn
;
956 /* Begin by recognizing trees that we'll completely rewrite for the
957 inlining context. Our output for these trees is completely
958 different from out input (e.g. RETURN_EXPR is deleted, and morphs
959 into an edge). Further down, we'll handle trees that get
960 duplicated and/or tweaked. */
962 /* When requested, RETURN_EXPRs should be transformed to just the
963 contained MODIFY_EXPR. The branch semantics of the return will
964 be handled elsewhere by manipulating the CFG rather than a statement. */
965 if (TREE_CODE (*tp
) == RETURN_EXPR
&& id
->transform_return_to_modify
)
967 tree assignment
= TREE_OPERAND (*tp
, 0);
969 /* If we're returning something, just turn that into an
970 assignment into the equivalent of the original RESULT_DECL.
971 If the "assignment" is just the result decl, the result
972 decl has already been set (e.g. a recent "foo (&result_decl,
973 ...)"); just toss the entire RETURN_EXPR. */
974 if (assignment
&& TREE_CODE (assignment
) == MODIFY_EXPR
)
976 /* Replace the RETURN_EXPR with (a copy of) the
977 MODIFY_EXPR hanging underneath. */
978 *tp
= copy_node (assignment
);
980 else /* Else the RETURN_EXPR returns no value. */
983 return (tree
) (void *)1;
986 else if (TREE_CODE (*tp
) == SSA_NAME
)
988 *tp
= remap_ssa_name (*tp
, id
);
993 /* Local variables and labels need to be replaced by equivalent
994 variables. We don't want to copy static variables; there's only
995 one of those, no matter how many times we inline the containing
996 function. Similarly for globals from an outer function. */
997 else if (auto_var_in_fn_p (*tp
, fn
))
1001 /* Remap the declaration. */
1002 new_decl
= remap_decl (*tp
, id
);
1003 gcc_assert (new_decl
);
1004 /* Replace this variable with the copy. */
1005 STRIP_TYPE_NOPS (new_decl
);
1009 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
1010 copy_statement_list (tp
);
1011 else if (TREE_CODE (*tp
) == SAVE_EXPR
1012 || TREE_CODE (*tp
) == TARGET_EXPR
)
1013 remap_save_expr (tp
, id
->decl_map
, walk_subtrees
);
1014 else if (TREE_CODE (*tp
) == LABEL_DECL
1015 && (! DECL_CONTEXT (*tp
)
1016 || decl_function_context (*tp
) == id
->src_fn
))
1017 /* These may need to be remapped for EH handling. */
1018 *tp
= remap_decl (*tp
, id
);
1019 else if (TREE_CODE (*tp
) == BIND_EXPR
)
1020 copy_bind_expr (tp
, walk_subtrees
, id
);
1021 /* Types may need remapping as well. */
1022 else if (TYPE_P (*tp
))
1023 *tp
= remap_type (*tp
, id
);
1025 /* If this is a constant, we have to copy the node iff the type will be
1026 remapped. copy_tree_r will not copy a constant. */
1027 else if (CONSTANT_CLASS_P (*tp
))
1029 tree new_type
= remap_type (TREE_TYPE (*tp
), id
);
1031 if (new_type
== TREE_TYPE (*tp
))
1034 else if (TREE_CODE (*tp
) == INTEGER_CST
)
1035 *tp
= build_int_cst_wide (new_type
, TREE_INT_CST_LOW (*tp
),
1036 TREE_INT_CST_HIGH (*tp
));
1039 *tp
= copy_node (*tp
);
1040 TREE_TYPE (*tp
) = new_type
;
1044 /* Otherwise, just copy the node. Note that copy_tree_r already
1045 knows not to copy VAR_DECLs, etc., so this is safe. */
1048 /* Here we handle trees that are not completely rewritten.
1049 First we detect some inlining-induced bogosities for
1051 if (TREE_CODE (*tp
) == MODIFY_EXPR
1052 && TREE_OPERAND (*tp
, 0) == TREE_OPERAND (*tp
, 1)
1053 && (auto_var_in_fn_p (TREE_OPERAND (*tp
, 0), fn
)))
1055 /* Some assignments VAR = VAR; don't generate any rtl code
1056 and thus don't count as variable modification. Avoid
1057 keeping bogosities like 0 = 0. */
1058 tree decl
= TREE_OPERAND (*tp
, 0), value
;
1061 n
= (tree
*) pointer_map_contains (id
->decl_map
, decl
);
1065 STRIP_TYPE_NOPS (value
);
1066 if (TREE_CONSTANT (value
) || TREE_READONLY (value
))
1068 *tp
= build_empty_stmt (EXPR_LOCATION (*tp
));
1069 return copy_tree_body_r (tp
, walk_subtrees
, data
);
1073 else if (TREE_CODE (*tp
) == INDIRECT_REF
)
1075 /* Get rid of *& from inline substitutions that can happen when a
1076 pointer argument is an ADDR_EXPR. */
1077 tree decl
= TREE_OPERAND (*tp
, 0);
1080 n
= (tree
*) pointer_map_contains (id
->decl_map
, decl
);
1085 /* If we happen to get an ADDR_EXPR in n->value, strip
1086 it manually here as we'll eventually get ADDR_EXPRs
1087 which lie about their types pointed to. In this case
1088 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1089 but we absolutely rely on that. As fold_indirect_ref
1090 does other useful transformations, try that first, though. */
1091 tree type
= TREE_TYPE (TREE_TYPE (*n
));
1092 if (id
->do_not_unshare
)
1095 new_tree
= unshare_expr (*n
);
1097 *tp
= gimple_fold_indirect_ref (new_tree
);
1100 if (TREE_CODE (new_tree
) == ADDR_EXPR
)
1102 *tp
= fold_indirect_ref_1 (EXPR_LOCATION (new_tree
),
1104 /* ??? We should either assert here or build
1105 a VIEW_CONVERT_EXPR instead of blindly leaking
1106 incompatible types to our IL. */
1108 *tp
= TREE_OPERAND (new_tree
, 0);
1112 *tp
= build1 (INDIRECT_REF
, type
, new_tree
);
1113 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1114 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1115 TREE_READONLY (*tp
) = TREE_READONLY (old
);
1116 TREE_THIS_NOTRAP (*tp
) = TREE_THIS_NOTRAP (old
);
1123 else if (TREE_CODE (*tp
) == MEM_REF
)
1125 /* We need to re-canonicalize MEM_REFs from inline substitutions
1126 that can happen when a pointer argument is an ADDR_EXPR. */
1127 tree decl
= TREE_OPERAND (*tp
, 0);
1130 n
= (tree
*) pointer_map_contains (id
->decl_map
, decl
);
1134 *tp
= fold_build2 (MEM_REF
, TREE_TYPE (*tp
),
1135 unshare_expr (*n
), TREE_OPERAND (*tp
, 1));
1136 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1137 TREE_NO_WARNING (*tp
) = TREE_NO_WARNING (old
);
1143 /* Here is the "usual case". Copy this tree node, and then
1144 tweak some special cases. */
1145 copy_tree_r (tp
, walk_subtrees
, NULL
);
1147 /* Global variables we haven't seen yet needs to go into referenced
1148 vars. If not referenced from types or debug stmts only. */
1149 if (gimple_in_ssa_p (cfun
)
1150 && TREE_CODE (*tp
) == VAR_DECL
1151 && id
->remapping_type_depth
== 0
1152 && !processing_debug_stmt
)
1153 add_referenced_var (*tp
);
1155 /* If EXPR has block defined, map it to newly constructed block.
1156 When inlining we want EXPRs without block appear in the block
1157 of function call if we are not remapping a type. */
1160 new_block
= id
->remapping_type_depth
== 0 ? id
->block
: NULL
;
1161 if (TREE_BLOCK (*tp
))
1164 n
= (tree
*) pointer_map_contains (id
->decl_map
,
1166 gcc_assert (n
|| id
->remapping_type_depth
!= 0);
1170 TREE_BLOCK (*tp
) = new_block
;
1173 if (TREE_CODE (*tp
) != OMP_CLAUSE
)
1174 TREE_TYPE (*tp
) = remap_type (TREE_TYPE (*tp
), id
);
1176 /* The copied TARGET_EXPR has never been expanded, even if the
1177 original node was expanded already. */
1178 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
1180 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
1181 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
1184 /* Variable substitution need not be simple. In particular, the
1185 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1186 and friends are up-to-date. */
1187 else if (TREE_CODE (*tp
) == ADDR_EXPR
)
1189 int invariant
= is_gimple_min_invariant (*tp
);
1190 walk_tree (&TREE_OPERAND (*tp
, 0), copy_tree_body_r
, id
, NULL
);
1192 /* Handle the case where we substituted an INDIRECT_REF
1193 into the operand of the ADDR_EXPR. */
1194 if (TREE_CODE (TREE_OPERAND (*tp
, 0)) == INDIRECT_REF
)
1195 *tp
= TREE_OPERAND (TREE_OPERAND (*tp
, 0), 0);
1197 recompute_tree_invariant_for_addr_expr (*tp
);
1199 /* If this used to be invariant, but is not any longer,
1200 then regimplification is probably needed. */
1201 if (invariant
&& !is_gimple_min_invariant (*tp
))
1202 id
->regimplify
= true;
1208 /* Keep iterating. */
1212 /* Helper for remap_gimple_stmt. Given an EH region number for the
1213 source function, map that to the duplicate EH region number in
1214 the destination function. */
1217 remap_eh_region_nr (int old_nr
, copy_body_data
*id
)
1219 eh_region old_r
, new_r
;
1222 old_r
= get_eh_region_from_number_fn (id
->src_cfun
, old_nr
);
1223 slot
= pointer_map_contains (id
->eh_map
, old_r
);
1224 new_r
= (eh_region
) *slot
;
1226 return new_r
->index
;
1229 /* Similar, but operate on INTEGER_CSTs. */
1232 remap_eh_region_tree_nr (tree old_t_nr
, copy_body_data
*id
)
1236 old_nr
= tree_low_cst (old_t_nr
, 0);
1237 new_nr
= remap_eh_region_nr (old_nr
, id
);
1239 return build_int_cst (integer_type_node
, new_nr
);
1242 /* Helper for copy_bb. Remap statement STMT using the inlining
1243 information in ID. Return the new statement copy. */
1246 remap_gimple_stmt (gimple stmt
, copy_body_data
*id
)
1249 struct walk_stmt_info wi
;
1251 bool skip_first
= false;
1253 /* Begin by recognizing trees that we'll completely rewrite for the
1254 inlining context. Our output for these trees is completely
1255 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1256 into an edge). Further down, we'll handle trees that get
1257 duplicated and/or tweaked. */
1259 /* When requested, GIMPLE_RETURNs should be transformed to just the
1260 contained GIMPLE_ASSIGN. The branch semantics of the return will
1261 be handled elsewhere by manipulating the CFG rather than the
1263 if (gimple_code (stmt
) == GIMPLE_RETURN
&& id
->transform_return_to_modify
)
1265 tree retval
= gimple_return_retval (stmt
);
1267 /* If we're returning something, just turn that into an
1268 assignment into the equivalent of the original RESULT_DECL.
1269 If RETVAL is just the result decl, the result decl has
1270 already been set (e.g. a recent "foo (&result_decl, ...)");
1271 just toss the entire GIMPLE_RETURN. */
1273 && (TREE_CODE (retval
) != RESULT_DECL
1274 && (TREE_CODE (retval
) != SSA_NAME
1275 || TREE_CODE (SSA_NAME_VAR (retval
)) != RESULT_DECL
)))
1277 copy
= gimple_build_assign (id
->retvar
, retval
);
1278 /* id->retvar is already substituted. Skip it on later remapping. */
1282 return gimple_build_nop ();
1284 else if (gimple_has_substatements (stmt
))
1288 /* When cloning bodies from the C++ front end, we will be handed bodies
1289 in High GIMPLE form. Handle here all the High GIMPLE statements that
1290 have embedded statements. */
1291 switch (gimple_code (stmt
))
1294 copy
= copy_gimple_bind (stmt
, id
);
1298 s1
= remap_gimple_seq (gimple_catch_handler (stmt
), id
);
1299 copy
= gimple_build_catch (gimple_catch_types (stmt
), s1
);
1302 case GIMPLE_EH_FILTER
:
1303 s1
= remap_gimple_seq (gimple_eh_filter_failure (stmt
), id
);
1304 copy
= gimple_build_eh_filter (gimple_eh_filter_types (stmt
), s1
);
1308 s1
= remap_gimple_seq (gimple_try_eval (stmt
), id
);
1309 s2
= remap_gimple_seq (gimple_try_cleanup (stmt
), id
);
1310 copy
= gimple_build_try (s1
, s2
, gimple_try_kind (stmt
));
1313 case GIMPLE_WITH_CLEANUP_EXPR
:
1314 s1
= remap_gimple_seq (gimple_wce_cleanup (stmt
), id
);
1315 copy
= gimple_build_wce (s1
);
1318 case GIMPLE_OMP_PARALLEL
:
1319 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1320 copy
= gimple_build_omp_parallel
1322 gimple_omp_parallel_clauses (stmt
),
1323 gimple_omp_parallel_child_fn (stmt
),
1324 gimple_omp_parallel_data_arg (stmt
));
1327 case GIMPLE_OMP_TASK
:
1328 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1329 copy
= gimple_build_omp_task
1331 gimple_omp_task_clauses (stmt
),
1332 gimple_omp_task_child_fn (stmt
),
1333 gimple_omp_task_data_arg (stmt
),
1334 gimple_omp_task_copy_fn (stmt
),
1335 gimple_omp_task_arg_size (stmt
),
1336 gimple_omp_task_arg_align (stmt
));
1339 case GIMPLE_OMP_FOR
:
1340 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1341 s2
= remap_gimple_seq (gimple_omp_for_pre_body (stmt
), id
);
1342 copy
= gimple_build_omp_for (s1
, gimple_omp_for_clauses (stmt
),
1343 gimple_omp_for_collapse (stmt
), s2
);
1346 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
1348 gimple_omp_for_set_index (copy
, i
,
1349 gimple_omp_for_index (stmt
, i
));
1350 gimple_omp_for_set_initial (copy
, i
,
1351 gimple_omp_for_initial (stmt
, i
));
1352 gimple_omp_for_set_final (copy
, i
,
1353 gimple_omp_for_final (stmt
, i
));
1354 gimple_omp_for_set_incr (copy
, i
,
1355 gimple_omp_for_incr (stmt
, i
));
1356 gimple_omp_for_set_cond (copy
, i
,
1357 gimple_omp_for_cond (stmt
, i
));
1362 case GIMPLE_OMP_MASTER
:
1363 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1364 copy
= gimple_build_omp_master (s1
);
1367 case GIMPLE_OMP_ORDERED
:
1368 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1369 copy
= gimple_build_omp_ordered (s1
);
1372 case GIMPLE_OMP_SECTION
:
1373 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1374 copy
= gimple_build_omp_section (s1
);
1377 case GIMPLE_OMP_SECTIONS
:
1378 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1379 copy
= gimple_build_omp_sections
1380 (s1
, gimple_omp_sections_clauses (stmt
));
1383 case GIMPLE_OMP_SINGLE
:
1384 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1385 copy
= gimple_build_omp_single
1386 (s1
, gimple_omp_single_clauses (stmt
));
1389 case GIMPLE_OMP_CRITICAL
:
1390 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1392 = gimple_build_omp_critical (s1
, gimple_omp_critical_name (stmt
));
1401 if (gimple_assign_copy_p (stmt
)
1402 && gimple_assign_lhs (stmt
) == gimple_assign_rhs1 (stmt
)
1403 && auto_var_in_fn_p (gimple_assign_lhs (stmt
), id
->src_fn
))
1405 /* Here we handle statements that are not completely rewritten.
1406 First we detect some inlining-induced bogosities for
1409 /* Some assignments VAR = VAR; don't generate any rtl code
1410 and thus don't count as variable modification. Avoid
1411 keeping bogosities like 0 = 0. */
1412 tree decl
= gimple_assign_lhs (stmt
), value
;
1415 n
= (tree
*) pointer_map_contains (id
->decl_map
, decl
);
1419 STRIP_TYPE_NOPS (value
);
1420 if (TREE_CONSTANT (value
) || TREE_READONLY (value
))
1421 return gimple_build_nop ();
1425 if (gimple_debug_bind_p (stmt
))
1427 copy
= gimple_build_debug_bind (gimple_debug_bind_get_var (stmt
),
1428 gimple_debug_bind_get_value (stmt
),
1430 VEC_safe_push (gimple
, heap
, id
->debug_stmts
, copy
);
1433 if (gimple_debug_source_bind_p (stmt
))
1435 copy
= gimple_build_debug_source_bind
1436 (gimple_debug_source_bind_get_var (stmt
),
1437 gimple_debug_source_bind_get_value (stmt
), stmt
);
1438 VEC_safe_push (gimple
, heap
, id
->debug_stmts
, copy
);
1442 /* Create a new deep copy of the statement. */
1443 copy
= gimple_copy (stmt
);
1445 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1446 RESX and EH_DISPATCH. */
1448 switch (gimple_code (copy
))
1452 tree r
, fndecl
= gimple_call_fndecl (copy
);
1453 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
1454 switch (DECL_FUNCTION_CODE (fndecl
))
1456 case BUILT_IN_EH_COPY_VALUES
:
1457 r
= gimple_call_arg (copy
, 1);
1458 r
= remap_eh_region_tree_nr (r
, id
);
1459 gimple_call_set_arg (copy
, 1, r
);
1462 case BUILT_IN_EH_POINTER
:
1463 case BUILT_IN_EH_FILTER
:
1464 r
= gimple_call_arg (copy
, 0);
1465 r
= remap_eh_region_tree_nr (r
, id
);
1466 gimple_call_set_arg (copy
, 0, r
);
1473 /* Reset alias info if we didn't apply measures to
1474 keep it valid over inlining by setting DECL_PT_UID. */
1475 if (!id
->src_cfun
->gimple_df
1476 || !id
->src_cfun
->gimple_df
->ipa_pta
)
1477 gimple_call_reset_alias_info (copy
);
1483 int r
= gimple_resx_region (copy
);
1484 r
= remap_eh_region_nr (r
, id
);
1485 gimple_resx_set_region (copy
, r
);
1489 case GIMPLE_EH_DISPATCH
:
1491 int r
= gimple_eh_dispatch_region (copy
);
1492 r
= remap_eh_region_nr (r
, id
);
1493 gimple_eh_dispatch_set_region (copy
, r
);
1502 /* If STMT has a block defined, map it to the newly constructed
1503 block. When inlining we want statements without a block to
1504 appear in the block of the function call. */
1505 new_block
= id
->block
;
1506 if (gimple_block (copy
))
1509 n
= (tree
*) pointer_map_contains (id
->decl_map
, gimple_block (copy
));
1514 gimple_set_block (copy
, new_block
);
1516 if (gimple_debug_bind_p (copy
) || gimple_debug_source_bind_p (copy
))
1519 /* Remap all the operands in COPY. */
1520 memset (&wi
, 0, sizeof (wi
));
1523 walk_tree (gimple_op_ptr (copy
, 1), remap_gimple_op_r
, &wi
, NULL
);
1525 walk_gimple_op (copy
, remap_gimple_op_r
, &wi
);
1527 /* Clear the copied virtual operands. We are not remapping them here
1528 but are going to recreate them from scratch. */
1529 if (gimple_has_mem_ops (copy
))
1531 gimple_set_vdef (copy
, NULL_TREE
);
1532 gimple_set_vuse (copy
, NULL_TREE
);
1539 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1543 copy_bb (copy_body_data
*id
, basic_block bb
, int frequency_scale
,
1544 gcov_type count_scale
)
1546 gimple_stmt_iterator gsi
, copy_gsi
, seq_gsi
;
1547 basic_block copy_basic_block
;
1552 /* Search for previous copied basic block. */
1555 prev
= prev
->prev_bb
;
1557 /* create_basic_block() will append every new block to
1558 basic_block_info automatically. */
1559 copy_basic_block
= create_basic_block (NULL
, (void *) 0,
1560 (basic_block
) prev
->aux
);
1561 copy_basic_block
->count
= bb
->count
* count_scale
/ REG_BR_PROB_BASE
;
1563 /* We are going to rebuild frequencies from scratch. These values
1564 have just small importance to drive canonicalize_loop_headers. */
1565 freq
= ((gcov_type
)bb
->frequency
* frequency_scale
/ REG_BR_PROB_BASE
);
1567 /* We recompute frequencies after inlining, so this is quite safe. */
1568 if (freq
> BB_FREQ_MAX
)
1570 copy_basic_block
->frequency
= freq
;
1572 copy_gsi
= gsi_start_bb (copy_basic_block
);
1574 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1576 gimple stmt
= gsi_stmt (gsi
);
1577 gimple orig_stmt
= stmt
;
1579 id
->regimplify
= false;
1580 stmt
= remap_gimple_stmt (stmt
, id
);
1581 if (gimple_nop_p (stmt
))
1584 gimple_duplicate_stmt_histograms (cfun
, stmt
, id
->src_cfun
, orig_stmt
);
1587 /* With return slot optimization we can end up with
1588 non-gimple (foo *)&this->m, fix that here. */
1589 if (is_gimple_assign (stmt
)
1590 && gimple_assign_rhs_code (stmt
) == NOP_EXPR
1591 && !is_gimple_val (gimple_assign_rhs1 (stmt
)))
1594 new_rhs
= force_gimple_operand_gsi (&seq_gsi
,
1595 gimple_assign_rhs1 (stmt
),
1597 GSI_CONTINUE_LINKING
);
1598 gimple_assign_set_rhs1 (stmt
, new_rhs
);
1599 id
->regimplify
= false;
1602 gsi_insert_after (&seq_gsi
, stmt
, GSI_NEW_STMT
);
1605 gimple_regimplify_operands (stmt
, &seq_gsi
);
1607 /* If copy_basic_block has been empty at the start of this iteration,
1608 call gsi_start_bb again to get at the newly added statements. */
1609 if (gsi_end_p (copy_gsi
))
1610 copy_gsi
= gsi_start_bb (copy_basic_block
);
1612 gsi_next (©_gsi
);
1614 /* Process the new statement. The call to gimple_regimplify_operands
1615 possibly turned the statement into multiple statements, we
1616 need to process all of them. */
1621 stmt
= gsi_stmt (copy_gsi
);
1622 if (is_gimple_call (stmt
)
1623 && gimple_call_va_arg_pack_p (stmt
)
1626 /* __builtin_va_arg_pack () should be replaced by
1627 all arguments corresponding to ... in the caller. */
1630 VEC(tree
, heap
) *argarray
;
1631 size_t nargs
= gimple_call_num_args (id
->gimple_call
);
1634 for (p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
1637 /* Create the new array of arguments. */
1638 n
= nargs
+ gimple_call_num_args (stmt
);
1639 argarray
= VEC_alloc (tree
, heap
, n
);
1640 VEC_safe_grow (tree
, heap
, argarray
, n
);
1642 /* Copy all the arguments before '...' */
1643 memcpy (VEC_address (tree
, argarray
),
1644 gimple_call_arg_ptr (stmt
, 0),
1645 gimple_call_num_args (stmt
) * sizeof (tree
));
1647 /* Append the arguments passed in '...' */
1648 memcpy (VEC_address(tree
, argarray
) + gimple_call_num_args (stmt
),
1649 gimple_call_arg_ptr (id
->gimple_call
, 0)
1650 + (gimple_call_num_args (id
->gimple_call
) - nargs
),
1651 nargs
* sizeof (tree
));
1653 new_call
= gimple_build_call_vec (gimple_call_fn (stmt
),
1656 VEC_free (tree
, heap
, argarray
);
1658 /* Copy all GIMPLE_CALL flags, location and block, except
1659 GF_CALL_VA_ARG_PACK. */
1660 gimple_call_copy_flags (new_call
, stmt
);
1661 gimple_call_set_va_arg_pack (new_call
, false);
1662 gimple_set_location (new_call
, gimple_location (stmt
));
1663 gimple_set_block (new_call
, gimple_block (stmt
));
1664 gimple_call_set_lhs (new_call
, gimple_call_lhs (stmt
));
1666 gsi_replace (©_gsi
, new_call
, false);
1669 else if (is_gimple_call (stmt
)
1671 && (decl
= gimple_call_fndecl (stmt
))
1672 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
1673 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_VA_ARG_PACK_LEN
)
1675 /* __builtin_va_arg_pack_len () should be replaced by
1676 the number of anonymous arguments. */
1677 size_t nargs
= gimple_call_num_args (id
->gimple_call
);
1681 for (p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
1684 count
= build_int_cst (integer_type_node
, nargs
);
1685 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
), count
);
1686 gsi_replace (©_gsi
, new_stmt
, false);
1690 /* Statements produced by inlining can be unfolded, especially
1691 when we constant propagated some operands. We can't fold
1692 them right now for two reasons:
1693 1) folding require SSA_NAME_DEF_STMTs to be correct
1694 2) we can't change function calls to builtins.
1695 So we just mark statement for later folding. We mark
1696 all new statements, instead just statements that has changed
1697 by some nontrivial substitution so even statements made
1698 foldable indirectly are updated. If this turns out to be
1699 expensive, copy_body can be told to watch for nontrivial
1701 if (id
->statements_to_fold
)
1702 pointer_set_insert (id
->statements_to_fold
, stmt
);
1704 /* We're duplicating a CALL_EXPR. Find any corresponding
1705 callgraph edges and update or duplicate them. */
1706 if (is_gimple_call (stmt
))
1708 struct cgraph_edge
*edge
;
1711 switch (id
->transform_call_graph_edges
)
1713 case CB_CGE_DUPLICATE
:
1714 edge
= cgraph_edge (id
->src_node
, orig_stmt
);
1717 int edge_freq
= edge
->frequency
;
1718 edge
= cgraph_clone_edge (edge
, id
->dst_node
, stmt
,
1720 REG_BR_PROB_BASE
, CGRAPH_FREQ_BASE
,
1722 /* We could also just rescale the frequency, but
1723 doing so would introduce roundoff errors and make
1724 verifier unhappy. */
1726 = compute_call_stmt_bb_frequency (id
->dst_node
->decl
,
1729 && profile_status_for_function (cfun
) != PROFILE_ABSENT
1730 && (edge_freq
> edge
->frequency
+ 10
1731 || edge_freq
< edge
->frequency
- 10))
1733 fprintf (dump_file
, "Edge frequency estimated by "
1734 "cgraph %i diverge from inliner's estimate %i\n",
1738 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1741 copy_basic_block
->frequency
);
1743 stmt
= cgraph_redirect_edge_call_stmt_to_callee (edge
);
1747 case CB_CGE_MOVE_CLONES
:
1748 cgraph_set_call_stmt_including_clones (id
->dst_node
,
1750 edge
= cgraph_edge (id
->dst_node
, stmt
);
1754 edge
= cgraph_edge (id
->dst_node
, orig_stmt
);
1756 cgraph_set_call_stmt (edge
, stmt
);
1763 /* Constant propagation on argument done during inlining
1764 may create new direct call. Produce an edge for it. */
1766 || (edge
->indirect_inlining_edge
1767 && id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
))
1768 && id
->dst_node
->analyzed
1769 && (fn
= gimple_call_fndecl (stmt
)) != NULL
)
1771 struct cgraph_node
*dest
= cgraph_get_node (fn
);
1773 /* We have missing edge in the callgraph. This can happen
1774 when previous inlining turned an indirect call into a
1775 direct call by constant propagating arguments or we are
1776 producing dead clone (for further cloning). In all
1777 other cases we hit a bug (incorrect node sharing is the
1778 most common reason for missing edges). */
1779 gcc_assert (dest
->needed
|| !dest
->analyzed
1780 || dest
->address_taken
1781 || !id
->src_node
->analyzed
1782 || !id
->dst_node
->analyzed
);
1783 if (id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
)
1784 cgraph_create_edge_including_clones
1785 (id
->dst_node
, dest
, orig_stmt
, stmt
, bb
->count
,
1786 compute_call_stmt_bb_frequency (id
->dst_node
->decl
,
1788 CIF_ORIGINALLY_INDIRECT_CALL
);
1790 cgraph_create_edge (id
->dst_node
, dest
, stmt
,
1792 compute_call_stmt_bb_frequency
1793 (id
->dst_node
->decl
, copy_basic_block
))->inline_failed
1794 = CIF_ORIGINALLY_INDIRECT_CALL
;
1797 fprintf (dump_file
, "Created new direct edge to %s\n",
1798 cgraph_node_name (dest
));
1802 flags
= gimple_call_flags (stmt
);
1803 if (flags
& ECF_MAY_BE_ALLOCA
)
1804 cfun
->calls_alloca
= true;
1805 if (flags
& ECF_RETURNS_TWICE
)
1806 cfun
->calls_setjmp
= true;
1809 maybe_duplicate_eh_stmt_fn (cfun
, stmt
, id
->src_cfun
, orig_stmt
,
1810 id
->eh_map
, id
->eh_lp_nr
);
1812 if (gimple_in_ssa_p (cfun
) && !is_gimple_debug (stmt
))
1817 find_new_referenced_vars (gsi_stmt (copy_gsi
));
1818 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, i
, SSA_OP_DEF
)
1819 if (TREE_CODE (def
) == SSA_NAME
)
1820 SSA_NAME_DEF_STMT (def
) = stmt
;
1823 gsi_next (©_gsi
);
1825 while (!gsi_end_p (copy_gsi
));
1827 copy_gsi
= gsi_last_bb (copy_basic_block
);
1830 return copy_basic_block
;
1833 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1834 form is quite easy, since dominator relationship for old basic blocks does
1837 There is however exception where inlining might change dominator relation
1838 across EH edges from basic block within inlined functions destinating
1839 to landing pads in function we inline into.
1841 The function fills in PHI_RESULTs of such PHI nodes if they refer
1842 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1843 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1844 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1845 set, and this means that there will be no overlapping live ranges
1846 for the underlying symbol.
1848 This might change in future if we allow redirecting of EH edges and
1849 we might want to change way build CFG pre-inlining to include
1850 all the possible edges then. */
1852 update_ssa_across_abnormal_edges (basic_block bb
, basic_block ret_bb
,
1853 bool can_throw
, bool nonlocal_goto
)
1858 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1860 || ((basic_block
)e
->dest
->aux
)->index
== ENTRY_BLOCK
)
1863 gimple_stmt_iterator si
;
1866 gcc_assert (e
->flags
& EDGE_EH
);
1869 gcc_assert (!(e
->flags
& EDGE_EH
));
1871 for (si
= gsi_start_phis (e
->dest
); !gsi_end_p (si
); gsi_next (&si
))
1875 phi
= gsi_stmt (si
);
1877 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1878 gcc_assert (!e
->dest
->aux
);
1880 gcc_assert ((e
->flags
& EDGE_EH
)
1881 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)));
1883 if (!is_gimple_reg (PHI_RESULT (phi
)))
1885 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi
)));
1889 re
= find_edge (ret_bb
, e
->dest
);
1891 gcc_assert ((re
->flags
& (EDGE_EH
| EDGE_ABNORMAL
))
1892 == (e
->flags
& (EDGE_EH
| EDGE_ABNORMAL
)));
1894 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
),
1895 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, re
)));
1901 /* Copy edges from BB into its copy constructed earlier, scale profile
1902 accordingly. Edges will be taken care of later. Assume aux
1903 pointers to point to the copies of each BB. Return true if any
1904 debug stmts are left after a statement that must end the basic block. */
1907 copy_edges_for_bb (basic_block bb
, gcov_type count_scale
, basic_block ret_bb
)
1909 basic_block new_bb
= (basic_block
) bb
->aux
;
1912 gimple_stmt_iterator si
;
1914 bool need_debug_cleanup
= false;
1916 /* Use the indices from the original blocks to create edges for the
1918 FOR_EACH_EDGE (old_edge
, ei
, bb
->succs
)
1919 if (!(old_edge
->flags
& EDGE_EH
))
1923 flags
= old_edge
->flags
;
1925 /* Return edges do get a FALLTHRU flag when the get inlined. */
1926 if (old_edge
->dest
->index
== EXIT_BLOCK
&& !old_edge
->flags
1927 && old_edge
->dest
->aux
!= EXIT_BLOCK_PTR
)
1928 flags
|= EDGE_FALLTHRU
;
1929 new_edge
= make_edge (new_bb
, (basic_block
) old_edge
->dest
->aux
, flags
);
1930 new_edge
->count
= old_edge
->count
* count_scale
/ REG_BR_PROB_BASE
;
1931 new_edge
->probability
= old_edge
->probability
;
1934 if (bb
->index
== ENTRY_BLOCK
|| bb
->index
== EXIT_BLOCK
)
1937 for (si
= gsi_start_bb (new_bb
); !gsi_end_p (si
);)
1940 bool can_throw
, nonlocal_goto
;
1942 copy_stmt
= gsi_stmt (si
);
1943 if (!is_gimple_debug (copy_stmt
))
1945 update_stmt (copy_stmt
);
1946 if (gimple_in_ssa_p (cfun
))
1947 mark_symbols_for_renaming (copy_stmt
);
1950 /* Do this before the possible split_block. */
1953 /* If this tree could throw an exception, there are two
1954 cases where we need to add abnormal edge(s): the
1955 tree wasn't in a region and there is a "current
1956 region" in the caller; or the original tree had
1957 EH edges. In both cases split the block after the tree,
1958 and add abnormal edge(s) as needed; we need both
1959 those from the callee and the caller.
1960 We check whether the copy can throw, because the const
1961 propagation can change an INDIRECT_REF which throws
1962 into a COMPONENT_REF which doesn't. If the copy
1963 can throw, the original could also throw. */
1964 can_throw
= stmt_can_throw_internal (copy_stmt
);
1965 nonlocal_goto
= stmt_can_make_abnormal_goto (copy_stmt
);
1967 if (can_throw
|| nonlocal_goto
)
1969 if (!gsi_end_p (si
))
1971 while (!gsi_end_p (si
) && is_gimple_debug (gsi_stmt (si
)))
1974 need_debug_cleanup
= true;
1976 if (!gsi_end_p (si
))
1977 /* Note that bb's predecessor edges aren't necessarily
1978 right at this point; split_block doesn't care. */
1980 edge e
= split_block (new_bb
, copy_stmt
);
1983 new_bb
->aux
= e
->src
->aux
;
1984 si
= gsi_start_bb (new_bb
);
1988 if (gimple_code (copy_stmt
) == GIMPLE_EH_DISPATCH
)
1989 make_eh_dispatch_edges (copy_stmt
);
1991 make_eh_edges (copy_stmt
);
1994 make_abnormal_goto_edges (gimple_bb (copy_stmt
), true);
1996 if ((can_throw
|| nonlocal_goto
)
1997 && gimple_in_ssa_p (cfun
))
1998 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt
), ret_bb
,
1999 can_throw
, nonlocal_goto
);
2001 return need_debug_cleanup
;
2004 /* Copy the PHIs. All blocks and edges are copied, some blocks
2005 was possibly split and new outgoing EH edges inserted.
2006 BB points to the block of original function and AUX pointers links
2007 the original and newly copied blocks. */
2010 copy_phis_for_bb (basic_block bb
, copy_body_data
*id
)
2012 basic_block
const new_bb
= (basic_block
) bb
->aux
;
2015 gimple_stmt_iterator si
;
2017 bool inserted
= false;
2019 for (si
= gsi_start (phi_nodes (bb
)); !gsi_end_p (si
); gsi_next (&si
))
2024 phi
= gsi_stmt (si
);
2025 res
= PHI_RESULT (phi
);
2027 if (is_gimple_reg (res
))
2029 walk_tree (&new_res
, copy_tree_body_r
, id
, NULL
);
2030 SSA_NAME_DEF_STMT (new_res
)
2031 = new_phi
= create_phi_node (new_res
, new_bb
);
2032 FOR_EACH_EDGE (new_edge
, ei
, new_bb
->preds
)
2034 edge old_edge
= find_edge ((basic_block
) new_edge
->src
->aux
, bb
);
2037 tree block
= id
->block
;
2040 /* When doing partial cloning, we allow PHIs on the entry block
2041 as long as all the arguments are the same. Find any input
2042 edge to see argument to copy. */
2044 FOR_EACH_EDGE (old_edge
, ei2
, bb
->preds
)
2045 if (!old_edge
->src
->aux
)
2048 arg
= PHI_ARG_DEF_FROM_EDGE (phi
, old_edge
);
2050 id
->block
= NULL_TREE
;
2051 walk_tree (&new_arg
, copy_tree_body_r
, id
, NULL
);
2053 gcc_assert (new_arg
);
2054 /* With return slot optimization we can end up with
2055 non-gimple (foo *)&this->m, fix that here. */
2056 if (TREE_CODE (new_arg
) != SSA_NAME
2057 && TREE_CODE (new_arg
) != FUNCTION_DECL
2058 && !is_gimple_val (new_arg
))
2060 gimple_seq stmts
= NULL
;
2061 new_arg
= force_gimple_operand (new_arg
, &stmts
, true, NULL
);
2062 gsi_insert_seq_on_edge (new_edge
, stmts
);
2065 add_phi_arg (new_phi
, new_arg
, new_edge
,
2066 gimple_phi_arg_location_from_edge (phi
, old_edge
));
2071 /* Commit the delayed edge insertions. */
2073 FOR_EACH_EDGE (new_edge
, ei
, new_bb
->preds
)
2074 gsi_commit_one_edge_insert (new_edge
, NULL
);
2078 /* Wrapper for remap_decl so it can be used as a callback. */
2081 remap_decl_1 (tree decl
, void *data
)
2083 return remap_decl (decl
, (copy_body_data
*) data
);
2086 /* Build struct function and associated datastructures for the new clone
2087 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
2090 initialize_cfun (tree new_fndecl
, tree callee_fndecl
, gcov_type count
)
2092 struct function
*src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2093 gcov_type count_scale
;
2095 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->count
)
2096 count_scale
= (REG_BR_PROB_BASE
* count
2097 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->count
);
2099 count_scale
= REG_BR_PROB_BASE
;
2101 /* Register specific tree functions. */
2102 gimple_register_cfg_hooks ();
2104 /* Get clean struct function. */
2105 push_struct_function (new_fndecl
);
2107 /* We will rebuild these, so just sanity check that they are empty. */
2108 gcc_assert (VALUE_HISTOGRAMS (cfun
) == NULL
);
2109 gcc_assert (cfun
->local_decls
== NULL
);
2110 gcc_assert (cfun
->cfg
== NULL
);
2111 gcc_assert (cfun
->decl
== new_fndecl
);
2113 /* Copy items we preserve during cloning. */
2114 cfun
->static_chain_decl
= src_cfun
->static_chain_decl
;
2115 cfun
->nonlocal_goto_save_area
= src_cfun
->nonlocal_goto_save_area
;
2116 cfun
->function_end_locus
= src_cfun
->function_end_locus
;
2117 cfun
->curr_properties
= src_cfun
->curr_properties
;
2118 cfun
->last_verified
= src_cfun
->last_verified
;
2119 cfun
->va_list_gpr_size
= src_cfun
->va_list_gpr_size
;
2120 cfun
->va_list_fpr_size
= src_cfun
->va_list_fpr_size
;
2121 cfun
->has_nonlocal_label
= src_cfun
->has_nonlocal_label
;
2122 cfun
->stdarg
= src_cfun
->stdarg
;
2123 cfun
->after_inlining
= src_cfun
->after_inlining
;
2124 cfun
->can_throw_non_call_exceptions
2125 = src_cfun
->can_throw_non_call_exceptions
;
2126 cfun
->returns_struct
= src_cfun
->returns_struct
;
2127 cfun
->returns_pcc_struct
= src_cfun
->returns_pcc_struct
;
2128 cfun
->after_tree_profile
= src_cfun
->after_tree_profile
;
2130 init_empty_tree_cfg ();
2132 profile_status_for_function (cfun
) = profile_status_for_function (src_cfun
);
2133 ENTRY_BLOCK_PTR
->count
=
2134 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->count
* count_scale
/
2136 ENTRY_BLOCK_PTR
->frequency
2137 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->frequency
;
2138 EXIT_BLOCK_PTR
->count
=
2139 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->count
* count_scale
/
2141 EXIT_BLOCK_PTR
->frequency
=
2142 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->frequency
;
2144 init_eh_for_function ();
2146 if (src_cfun
->gimple_df
)
2148 init_tree_ssa (cfun
);
2149 cfun
->gimple_df
->in_ssa_p
= true;
2150 init_ssa_operands ();
2155 /* Helper function for copy_cfg_body. Move debug stmts from the end
2156 of NEW_BB to the beginning of successor basic blocks when needed. If the
2157 successor has multiple predecessors, reset them, otherwise keep
2161 maybe_move_debug_stmts_to_successors (copy_body_data
*id
, basic_block new_bb
)
2165 gimple_stmt_iterator si
= gsi_last_nondebug_bb (new_bb
);
2168 || gsi_one_before_end_p (si
)
2169 || !(stmt_can_throw_internal (gsi_stmt (si
))
2170 || stmt_can_make_abnormal_goto (gsi_stmt (si
))))
2173 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
2175 gimple_stmt_iterator ssi
= gsi_last_bb (new_bb
);
2176 gimple_stmt_iterator dsi
= gsi_after_labels (e
->dest
);
2177 while (is_gimple_debug (gsi_stmt (ssi
)))
2179 gimple stmt
= gsi_stmt (ssi
), new_stmt
;
2183 /* For the last edge move the debug stmts instead of copying
2185 if (ei_one_before_end_p (ei
))
2189 if (!single_pred_p (e
->dest
) && gimple_debug_bind_p (stmt
))
2190 gimple_debug_bind_reset_value (stmt
);
2191 gsi_remove (&si
, false);
2192 gsi_insert_before (&dsi
, stmt
, GSI_SAME_STMT
);
2196 if (gimple_debug_bind_p (stmt
))
2198 var
= gimple_debug_bind_get_var (stmt
);
2199 if (single_pred_p (e
->dest
))
2201 value
= gimple_debug_bind_get_value (stmt
);
2202 value
= unshare_expr (value
);
2206 new_stmt
= gimple_build_debug_bind (var
, value
, stmt
);
2208 else if (gimple_debug_source_bind_p (stmt
))
2210 var
= gimple_debug_source_bind_get_var (stmt
);
2211 value
= gimple_debug_source_bind_get_value (stmt
);
2212 new_stmt
= gimple_build_debug_source_bind (var
, value
, stmt
);
2216 gsi_insert_before (&dsi
, new_stmt
, GSI_SAME_STMT
);
2217 VEC_safe_push (gimple
, heap
, id
->debug_stmts
, new_stmt
);
2223 /* Make a copy of the body of FN so that it can be inserted inline in
2224 another function. Walks FN via CFG, returns new fndecl. */
2227 copy_cfg_body (copy_body_data
* id
, gcov_type count
, int frequency_scale
,
2228 basic_block entry_block_map
, basic_block exit_block_map
,
2229 bitmap blocks_to_copy
, basic_block new_entry
)
2231 tree callee_fndecl
= id
->src_fn
;
2232 /* Original cfun for the callee, doesn't change. */
2233 struct function
*src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2234 struct function
*cfun_to_copy
;
2236 tree new_fndecl
= NULL
;
2237 bool need_debug_cleanup
= false;
2238 gcov_type count_scale
;
2240 int incoming_frequency
= 0;
2241 gcov_type incoming_count
= 0;
2243 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->count
)
2244 count_scale
= (REG_BR_PROB_BASE
* count
2245 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->count
);
2247 count_scale
= REG_BR_PROB_BASE
;
2249 /* Register specific tree functions. */
2250 gimple_register_cfg_hooks ();
2252 /* If we are inlining just region of the function, make sure to connect new entry
2253 to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute
2254 frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
2255 probabilities of edges incoming from nonduplicated region. */
2261 FOR_EACH_EDGE (e
, ei
, new_entry
->preds
)
2264 incoming_frequency
+= EDGE_FREQUENCY (e
);
2265 incoming_count
+= e
->count
;
2267 incoming_count
= incoming_count
* count_scale
/ REG_BR_PROB_BASE
;
2269 = incoming_frequency
* frequency_scale
/ REG_BR_PROB_BASE
;
2270 ENTRY_BLOCK_PTR
->count
= incoming_count
;
2271 ENTRY_BLOCK_PTR
->frequency
= incoming_frequency
;
2274 /* Must have a CFG here at this point. */
2275 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2276 (DECL_STRUCT_FUNCTION (callee_fndecl
)));
2278 cfun_to_copy
= id
->src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2280 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy
)->aux
= entry_block_map
;
2281 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy
)->aux
= exit_block_map
;
2282 entry_block_map
->aux
= ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy
);
2283 exit_block_map
->aux
= EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy
);
2285 /* Duplicate any exception-handling regions. */
2287 id
->eh_map
= duplicate_eh_regions (cfun_to_copy
, NULL
, id
->eh_lp_nr
,
2290 /* Use aux pointers to map the original blocks to copy. */
2291 FOR_EACH_BB_FN (bb
, cfun_to_copy
)
2292 if (!blocks_to_copy
|| bitmap_bit_p (blocks_to_copy
, bb
->index
))
2294 basic_block new_bb
= copy_bb (id
, bb
, frequency_scale
, count_scale
);
2299 last
= last_basic_block
;
2301 /* Now that we've duplicated the blocks, duplicate their edges. */
2302 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2304 || (bb
->index
> 0 && bitmap_bit_p (blocks_to_copy
, bb
->index
)))
2305 need_debug_cleanup
|= copy_edges_for_bb (bb
, count_scale
, exit_block_map
);
2309 edge e
= make_edge (entry_block_map
, (basic_block
)new_entry
->aux
, EDGE_FALLTHRU
);
2310 e
->probability
= REG_BR_PROB_BASE
;
2311 e
->count
= incoming_count
;
2314 if (gimple_in_ssa_p (cfun
))
2315 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2317 || (bb
->index
> 0 && bitmap_bit_p (blocks_to_copy
, bb
->index
)))
2318 copy_phis_for_bb (bb
, id
);
2320 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2323 if (need_debug_cleanup
2324 && bb
->index
!= ENTRY_BLOCK
2325 && bb
->index
!= EXIT_BLOCK
)
2326 maybe_move_debug_stmts_to_successors (id
, (basic_block
) bb
->aux
);
2327 ((basic_block
)bb
->aux
)->aux
= NULL
;
2331 /* Zero out AUX fields of newly created block during EH edge
2333 for (; last
< last_basic_block
; last
++)
2335 if (need_debug_cleanup
)
2336 maybe_move_debug_stmts_to_successors (id
, BASIC_BLOCK (last
));
2337 BASIC_BLOCK (last
)->aux
= NULL
;
2339 entry_block_map
->aux
= NULL
;
2340 exit_block_map
->aux
= NULL
;
2344 pointer_map_destroy (id
->eh_map
);
2351 /* Copy the debug STMT using ID. We deal with these statements in a
2352 special way: if any variable in their VALUE expression wasn't
2353 remapped yet, we won't remap it, because that would get decl uids
2354 out of sync, causing codegen differences between -g and -g0. If
2355 this arises, we drop the VALUE expression altogether. */
2358 copy_debug_stmt (gimple stmt
, copy_body_data
*id
)
2361 struct walk_stmt_info wi
;
2364 if (gimple_block (stmt
))
2366 n
= (tree
*) pointer_map_contains (id
->decl_map
, gimple_block (stmt
));
2370 gimple_set_block (stmt
, t
);
2372 /* Remap all the operands in COPY. */
2373 memset (&wi
, 0, sizeof (wi
));
2376 processing_debug_stmt
= 1;
2378 if (gimple_debug_source_bind_p (stmt
))
2379 t
= gimple_debug_source_bind_get_var (stmt
);
2381 t
= gimple_debug_bind_get_var (stmt
);
2383 if (TREE_CODE (t
) == PARM_DECL
&& id
->debug_map
2384 && (n
= (tree
*) pointer_map_contains (id
->debug_map
, t
)))
2386 gcc_assert (TREE_CODE (*n
) == VAR_DECL
);
2389 else if (TREE_CODE (t
) == VAR_DECL
2391 && gimple_in_ssa_p (cfun
)
2392 && !pointer_map_contains (id
->decl_map
, t
)
2394 /* T is a non-localized variable. */;
2396 walk_tree (&t
, remap_gimple_op_r
, &wi
, NULL
);
2398 if (gimple_debug_bind_p (stmt
))
2400 gimple_debug_bind_set_var (stmt
, t
);
2402 if (gimple_debug_bind_has_value_p (stmt
))
2403 walk_tree (gimple_debug_bind_get_value_ptr (stmt
),
2404 remap_gimple_op_r
, &wi
, NULL
);
2406 /* Punt if any decl couldn't be remapped. */
2407 if (processing_debug_stmt
< 0)
2408 gimple_debug_bind_reset_value (stmt
);
2410 else if (gimple_debug_source_bind_p (stmt
))
2412 gimple_debug_source_bind_set_var (stmt
, t
);
2413 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt
),
2414 remap_gimple_op_r
, &wi
, NULL
);
2417 processing_debug_stmt
= 0;
2420 if (gimple_in_ssa_p (cfun
))
2421 mark_symbols_for_renaming (stmt
);
2424 /* Process deferred debug stmts. In order to give values better odds
2425 of being successfully remapped, we delay the processing of debug
2426 stmts until all other stmts that might require remapping are
2430 copy_debug_stmts (copy_body_data
*id
)
2435 if (!id
->debug_stmts
)
2438 FOR_EACH_VEC_ELT (gimple
, id
->debug_stmts
, i
, stmt
)
2439 copy_debug_stmt (stmt
, id
);
2441 VEC_free (gimple
, heap
, id
->debug_stmts
);
2444 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2445 another function. */
2448 copy_tree_body (copy_body_data
*id
)
2450 tree fndecl
= id
->src_fn
;
2451 tree body
= DECL_SAVED_TREE (fndecl
);
2453 walk_tree (&body
, copy_tree_body_r
, id
, NULL
);
2458 /* Make a copy of the body of FN so that it can be inserted inline in
2459 another function. */
2462 copy_body (copy_body_data
*id
, gcov_type count
, int frequency_scale
,
2463 basic_block entry_block_map
, basic_block exit_block_map
,
2464 bitmap blocks_to_copy
, basic_block new_entry
)
2466 tree fndecl
= id
->src_fn
;
2469 /* If this body has a CFG, walk CFG and copy. */
2470 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl
)));
2471 body
= copy_cfg_body (id
, count
, frequency_scale
, entry_block_map
, exit_block_map
,
2472 blocks_to_copy
, new_entry
);
2473 copy_debug_stmts (id
);
2478 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2479 defined in function FN, or of a data member thereof. */
2482 self_inlining_addr_expr (tree value
, tree fn
)
2486 if (TREE_CODE (value
) != ADDR_EXPR
)
2489 var
= get_base_address (TREE_OPERAND (value
, 0));
2491 return var
&& auto_var_in_fn_p (var
, fn
);
2494 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2495 lexical block and line number information from base_stmt, if given,
2496 or from the last stmt of the block otherwise. */
2499 insert_init_debug_bind (copy_body_data
*id
,
2500 basic_block bb
, tree var
, tree value
,
2504 gimple_stmt_iterator gsi
;
2507 if (!gimple_in_ssa_p (id
->src_cfun
))
2510 if (!MAY_HAVE_DEBUG_STMTS
)
2513 tracked_var
= target_for_debug_bind (var
);
2519 gsi
= gsi_last_bb (bb
);
2520 if (!base_stmt
&& !gsi_end_p (gsi
))
2521 base_stmt
= gsi_stmt (gsi
);
2524 note
= gimple_build_debug_bind (tracked_var
, value
, base_stmt
);
2528 if (!gsi_end_p (gsi
))
2529 gsi_insert_after (&gsi
, note
, GSI_SAME_STMT
);
2531 gsi_insert_before (&gsi
, note
, GSI_SAME_STMT
);
2538 insert_init_stmt (copy_body_data
*id
, basic_block bb
, gimple init_stmt
)
2540 /* If VAR represents a zero-sized variable, it's possible that the
2541 assignment statement may result in no gimple statements. */
2544 gimple_stmt_iterator si
= gsi_last_bb (bb
);
2546 /* We can end up with init statements that store to a non-register
2547 from a rhs with a conversion. Handle that here by forcing the
2548 rhs into a temporary. gimple_regimplify_operands is not
2549 prepared to do this for us. */
2550 if (!is_gimple_debug (init_stmt
)
2551 && !is_gimple_reg (gimple_assign_lhs (init_stmt
))
2552 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt
)))
2553 && gimple_assign_rhs_class (init_stmt
) == GIMPLE_UNARY_RHS
)
2555 tree rhs
= build1 (gimple_assign_rhs_code (init_stmt
),
2556 gimple_expr_type (init_stmt
),
2557 gimple_assign_rhs1 (init_stmt
));
2558 rhs
= force_gimple_operand_gsi (&si
, rhs
, true, NULL_TREE
, false,
2560 gimple_assign_set_rhs_code (init_stmt
, TREE_CODE (rhs
));
2561 gimple_assign_set_rhs1 (init_stmt
, rhs
);
2563 gsi_insert_after (&si
, init_stmt
, GSI_NEW_STMT
);
2564 gimple_regimplify_operands (init_stmt
, &si
);
2565 mark_symbols_for_renaming (init_stmt
);
2567 if (!is_gimple_debug (init_stmt
) && MAY_HAVE_DEBUG_STMTS
)
2569 tree var
, def
= gimple_assign_lhs (init_stmt
);
2571 if (TREE_CODE (def
) == SSA_NAME
)
2572 var
= SSA_NAME_VAR (def
);
2576 insert_init_debug_bind (id
, bb
, var
, def
, init_stmt
);
2581 /* Initialize parameter P with VALUE. If needed, produce init statement
2582 at the end of BB. When BB is NULL, we return init statement to be
2585 setup_one_parameter (copy_body_data
*id
, tree p
, tree value
, tree fn
,
2586 basic_block bb
, tree
*vars
)
2588 gimple init_stmt
= NULL
;
2591 tree def
= (gimple_in_ssa_p (cfun
)
2592 ? gimple_default_def (id
->src_cfun
, p
) : NULL
);
2595 && value
!= error_mark_node
2596 && !useless_type_conversion_p (TREE_TYPE (p
), TREE_TYPE (value
)))
2598 if (fold_convertible_p (TREE_TYPE (p
), value
))
2599 rhs
= fold_build1 (NOP_EXPR
, TREE_TYPE (p
), value
);
2601 /* ??? For valid (GIMPLE) programs we should not end up here.
2602 Still if something has gone wrong and we end up with truly
2603 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2604 to not leak invalid GIMPLE to the following passes. */
2605 rhs
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (p
), value
);
2608 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2609 here since the type of this decl must be visible to the calling
2611 var
= copy_decl_to_var (p
, id
);
2613 /* We're actually using the newly-created var. */
2614 if (gimple_in_ssa_p (cfun
) && TREE_CODE (var
) == VAR_DECL
)
2615 add_referenced_var (var
);
2617 /* Declare this new variable. */
2618 DECL_CHAIN (var
) = *vars
;
2621 /* Make gimplifier happy about this variable. */
2622 DECL_SEEN_IN_BIND_EXPR_P (var
) = 1;
2624 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2625 we would not need to create a new variable here at all, if it
2626 weren't for debug info. Still, we can just use the argument
2628 if (TREE_READONLY (p
)
2629 && !TREE_ADDRESSABLE (p
)
2630 && value
&& !TREE_SIDE_EFFECTS (value
)
2633 /* We may produce non-gimple trees by adding NOPs or introduce
2634 invalid sharing when operand is not really constant.
2635 It is not big deal to prohibit constant propagation here as
2636 we will constant propagate in DOM1 pass anyway. */
2637 if (is_gimple_min_invariant (value
)
2638 && useless_type_conversion_p (TREE_TYPE (p
),
2640 /* We have to be very careful about ADDR_EXPR. Make sure
2641 the base variable isn't a local variable of the inlined
2642 function, e.g., when doing recursive inlining, direct or
2643 mutually-recursive or whatever, which is why we don't
2644 just test whether fn == current_function_decl. */
2645 && ! self_inlining_addr_expr (value
, fn
))
2647 insert_decl_map (id
, p
, value
);
2648 insert_debug_decl_map (id
, p
, var
);
2649 return insert_init_debug_bind (id
, bb
, var
, value
, NULL
);
2653 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2654 that way, when the PARM_DECL is encountered, it will be
2655 automatically replaced by the VAR_DECL. */
2656 insert_decl_map (id
, p
, var
);
2658 /* Even if P was TREE_READONLY, the new VAR should not be.
2659 In the original code, we would have constructed a
2660 temporary, and then the function body would have never
2661 changed the value of P. However, now, we will be
2662 constructing VAR directly. The constructor body may
2663 change its value multiple times as it is being
2664 constructed. Therefore, it must not be TREE_READONLY;
2665 the back-end assumes that TREE_READONLY variable is
2666 assigned to only once. */
2667 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p
)))
2668 TREE_READONLY (var
) = 0;
2670 /* If there is no setup required and we are in SSA, take the easy route
2671 replacing all SSA names representing the function parameter by the
2672 SSA name passed to function.
2674 We need to construct map for the variable anyway as it might be used
2675 in different SSA names when parameter is set in function.
2677 Do replacement at -O0 for const arguments replaced by constant.
2678 This is important for builtin_constant_p and other construct requiring
2679 constant argument to be visible in inlined function body. */
2680 if (gimple_in_ssa_p (cfun
) && rhs
&& def
&& is_gimple_reg (p
)
2682 || (TREE_READONLY (p
)
2683 && is_gimple_min_invariant (rhs
)))
2684 && (TREE_CODE (rhs
) == SSA_NAME
2685 || is_gimple_min_invariant (rhs
))
2686 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
2688 insert_decl_map (id
, def
, rhs
);
2689 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
2692 /* If the value of argument is never used, don't care about initializing
2694 if (optimize
&& gimple_in_ssa_p (cfun
) && !def
&& is_gimple_reg (p
))
2696 gcc_assert (!value
|| !TREE_SIDE_EFFECTS (value
));
2697 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
2700 /* Initialize this VAR_DECL from the equivalent argument. Convert
2701 the argument to the proper type in case it was promoted. */
2704 if (rhs
== error_mark_node
)
2706 insert_decl_map (id
, p
, var
);
2707 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
2710 STRIP_USELESS_TYPE_CONVERSION (rhs
);
2712 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2713 keep our trees in gimple form. */
2714 if (def
&& gimple_in_ssa_p (cfun
) && is_gimple_reg (p
))
2716 def
= remap_ssa_name (def
, id
);
2717 init_stmt
= gimple_build_assign (def
, rhs
);
2718 SSA_NAME_IS_DEFAULT_DEF (def
) = 0;
2719 set_default_def (var
, NULL
);
2722 init_stmt
= gimple_build_assign (var
, rhs
);
2724 if (bb
&& init_stmt
)
2725 insert_init_stmt (id
, bb
, init_stmt
);
2730 /* Generate code to initialize the parameters of the function at the
2731 top of the stack in ID from the GIMPLE_CALL STMT. */
2734 initialize_inlined_parameters (copy_body_data
*id
, gimple stmt
,
2735 tree fn
, basic_block bb
)
2740 tree vars
= NULL_TREE
;
2741 tree static_chain
= gimple_call_chain (stmt
);
2743 /* Figure out what the parameters are. */
2744 parms
= DECL_ARGUMENTS (fn
);
2746 /* Loop through the parameter declarations, replacing each with an
2747 equivalent VAR_DECL, appropriately initialized. */
2748 for (p
= parms
, i
= 0; p
; p
= DECL_CHAIN (p
), i
++)
2751 val
= i
< gimple_call_num_args (stmt
) ? gimple_call_arg (stmt
, i
) : NULL
;
2752 setup_one_parameter (id
, p
, val
, fn
, bb
, &vars
);
2754 /* After remapping parameters remap their types. This has to be done
2755 in a second loop over all parameters to appropriately remap
2756 variable sized arrays when the size is specified in a
2757 parameter following the array. */
2758 for (p
= parms
, i
= 0; p
; p
= DECL_CHAIN (p
), i
++)
2760 tree
*varp
= (tree
*) pointer_map_contains (id
->decl_map
, p
);
2762 && TREE_CODE (*varp
) == VAR_DECL
)
2764 tree def
= (gimple_in_ssa_p (cfun
) && is_gimple_reg (p
)
2765 ? gimple_default_def (id
->src_cfun
, p
) : NULL
);
2767 TREE_TYPE (var
) = remap_type (TREE_TYPE (var
), id
);
2768 /* Also remap the default definition if it was remapped
2769 to the default definition of the parameter replacement
2770 by the parameter setup. */
2773 tree
*defp
= (tree
*) pointer_map_contains (id
->decl_map
, def
);
2775 && TREE_CODE (*defp
) == SSA_NAME
2776 && SSA_NAME_VAR (*defp
) == var
)
2777 TREE_TYPE (*defp
) = TREE_TYPE (var
);
2782 /* Initialize the static chain. */
2783 p
= DECL_STRUCT_FUNCTION (fn
)->static_chain_decl
;
2784 gcc_assert (fn
!= current_function_decl
);
2787 /* No static chain? Seems like a bug in tree-nested.c. */
2788 gcc_assert (static_chain
);
2790 setup_one_parameter (id
, p
, static_chain
, fn
, bb
, &vars
);
2793 declare_inline_vars (id
->block
, vars
);
2797 /* Declare a return variable to replace the RESULT_DECL for the
2798 function we are calling. An appropriate DECL_STMT is returned.
2799 The USE_STMT is filled to contain a use of the declaration to
2800 indicate the return value of the function.
2802 RETURN_SLOT, if non-null is place where to store the result. It
2803 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2804 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2806 The return value is a (possibly null) value that holds the result
2807 as seen by the caller. */
2810 declare_return_variable (copy_body_data
*id
, tree return_slot
, tree modify_dest
,
2811 basic_block entry_bb
)
2813 tree callee
= id
->src_fn
;
2814 tree result
= DECL_RESULT (callee
);
2815 tree callee_type
= TREE_TYPE (result
);
2819 /* Handle type-mismatches in the function declaration return type
2820 vs. the call expression. */
2822 caller_type
= TREE_TYPE (modify_dest
);
2824 caller_type
= TREE_TYPE (TREE_TYPE (callee
));
2826 /* We don't need to do anything for functions that don't return
2828 if (!result
|| VOID_TYPE_P (callee_type
))
2831 /* If there was a return slot, then the return value is the
2832 dereferenced address of that object. */
2835 /* The front end shouldn't have used both return_slot and
2836 a modify expression. */
2837 gcc_assert (!modify_dest
);
2838 if (DECL_BY_REFERENCE (result
))
2840 tree return_slot_addr
= build_fold_addr_expr (return_slot
);
2841 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr
);
2843 /* We are going to construct *&return_slot and we can't do that
2844 for variables believed to be not addressable.
2846 FIXME: This check possibly can match, because values returned
2847 via return slot optimization are not believed to have address
2848 taken by alias analysis. */
2849 gcc_assert (TREE_CODE (return_slot
) != SSA_NAME
);
2850 var
= return_slot_addr
;
2855 gcc_assert (TREE_CODE (var
) != SSA_NAME
);
2856 TREE_ADDRESSABLE (var
) |= TREE_ADDRESSABLE (result
);
2858 if ((TREE_CODE (TREE_TYPE (result
)) == COMPLEX_TYPE
2859 || TREE_CODE (TREE_TYPE (result
)) == VECTOR_TYPE
)
2860 && !DECL_GIMPLE_REG_P (result
)
2862 DECL_GIMPLE_REG_P (var
) = 0;
2867 /* All types requiring non-trivial constructors should have been handled. */
2868 gcc_assert (!TREE_ADDRESSABLE (callee_type
));
2870 /* Attempt to avoid creating a new temporary variable. */
2872 && TREE_CODE (modify_dest
) != SSA_NAME
)
2874 bool use_it
= false;
2876 /* We can't use MODIFY_DEST if there's type promotion involved. */
2877 if (!useless_type_conversion_p (callee_type
, caller_type
))
2880 /* ??? If we're assigning to a variable sized type, then we must
2881 reuse the destination variable, because we've no good way to
2882 create variable sized temporaries at this point. */
2883 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type
)) != INTEGER_CST
)
2886 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2887 reuse it as the result of the call directly. Don't do this if
2888 it would promote MODIFY_DEST to addressable. */
2889 else if (TREE_ADDRESSABLE (result
))
2893 tree base_m
= get_base_address (modify_dest
);
2895 /* If the base isn't a decl, then it's a pointer, and we don't
2896 know where that's going to go. */
2897 if (!DECL_P (base_m
))
2899 else if (is_global_var (base_m
))
2901 else if ((TREE_CODE (TREE_TYPE (result
)) == COMPLEX_TYPE
2902 || TREE_CODE (TREE_TYPE (result
)) == VECTOR_TYPE
)
2903 && !DECL_GIMPLE_REG_P (result
)
2904 && DECL_GIMPLE_REG_P (base_m
))
2906 else if (!TREE_ADDRESSABLE (base_m
))
2918 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type
)) == INTEGER_CST
);
2920 var
= copy_result_decl_to_var (result
, id
);
2921 if (gimple_in_ssa_p (cfun
))
2922 add_referenced_var (var
);
2924 DECL_SEEN_IN_BIND_EXPR_P (var
) = 1;
2926 /* Do not have the rest of GCC warn about this variable as it should
2927 not be visible to the user. */
2928 TREE_NO_WARNING (var
) = 1;
2930 declare_inline_vars (id
->block
, var
);
2932 /* Build the use expr. If the return type of the function was
2933 promoted, convert it back to the expected type. */
2935 if (!useless_type_conversion_p (caller_type
, TREE_TYPE (var
)))
2936 use
= fold_convert (caller_type
, var
);
2938 STRIP_USELESS_TYPE_CONVERSION (use
);
2940 if (DECL_BY_REFERENCE (result
))
2942 TREE_ADDRESSABLE (var
) = 1;
2943 var
= build_fold_addr_expr (var
);
2947 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2948 way, when the RESULT_DECL is encountered, it will be
2949 automatically replaced by the VAR_DECL.
2951 When returning by reference, ensure that RESULT_DECL remaps to
2953 if (DECL_BY_REFERENCE (result
)
2954 && !is_gimple_val (var
))
2956 tree temp
= create_tmp_var (TREE_TYPE (result
), "retvalptr");
2957 if (gimple_in_ssa_p (id
->src_cfun
))
2958 add_referenced_var (temp
);
2959 insert_decl_map (id
, result
, temp
);
2960 /* When RESULT_DECL is in SSA form, we need to use it's default_def
2962 if (gimple_in_ssa_p (id
->src_cfun
) && gimple_default_def (id
->src_cfun
, result
))
2963 temp
= remap_ssa_name (gimple_default_def (id
->src_cfun
, result
), id
);
2964 insert_init_stmt (id
, entry_bb
, gimple_build_assign (temp
, var
));
2967 insert_decl_map (id
, result
, var
);
2969 /* Remember this so we can ignore it in remap_decls. */
2975 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2976 to a local label. */
2979 has_label_address_in_static_1 (tree
*nodep
, int *walk_subtrees
, void *fnp
)
2982 tree fn
= (tree
) fnp
;
2984 if (TREE_CODE (node
) == LABEL_DECL
&& DECL_CONTEXT (node
) == fn
)
2993 /* Determine if the function can be copied. If so return NULL. If
2994 not return a string describng the reason for failure. */
2997 copy_forbidden (struct function
*fun
, tree fndecl
)
2999 const char *reason
= fun
->cannot_be_copied_reason
;
3003 /* Only examine the function once. */
3004 if (fun
->cannot_be_copied_set
)
3007 /* We cannot copy a function that receives a non-local goto
3008 because we cannot remap the destination label used in the
3009 function that is performing the non-local goto. */
3010 /* ??? Actually, this should be possible, if we work at it.
3011 No doubt there's just a handful of places that simply
3012 assume it doesn't happen and don't substitute properly. */
3013 if (fun
->has_nonlocal_label
)
3015 reason
= G_("function %q+F can never be copied "
3016 "because it receives a non-local goto");
3020 FOR_EACH_LOCAL_DECL (fun
, ix
, decl
)
3021 if (TREE_CODE (decl
) == VAR_DECL
3022 && TREE_STATIC (decl
)
3023 && !DECL_EXTERNAL (decl
)
3024 && DECL_INITIAL (decl
)
3025 && walk_tree_without_duplicates (&DECL_INITIAL (decl
),
3026 has_label_address_in_static_1
,
3029 reason
= G_("function %q+F can never be copied because it saves "
3030 "address of local label in a static variable");
3035 fun
->cannot_be_copied_reason
= reason
;
3036 fun
->cannot_be_copied_set
= true;
3041 static const char *inline_forbidden_reason
;
3043 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3044 iff a function can not be inlined. Also sets the reason why. */
3047 inline_forbidden_p_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3048 struct walk_stmt_info
*wip
)
3050 tree fn
= (tree
) wip
->info
;
3052 gimple stmt
= gsi_stmt (*gsi
);
3054 switch (gimple_code (stmt
))
3057 /* Refuse to inline alloca call unless user explicitly forced so as
3058 this may change program's memory overhead drastically when the
3059 function using alloca is called in loop. In GCC present in
3060 SPEC2000 inlining into schedule_block cause it to require 2GB of
3061 RAM instead of 256MB. Don't do so for alloca calls emitted for
3062 VLA objects as those can't cause unbounded growth (they're always
3063 wrapped inside stack_save/stack_restore regions. */
3064 if (gimple_alloca_call_p (stmt
)
3065 && !gimple_call_alloca_for_var_p (stmt
)
3066 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
)))
3068 inline_forbidden_reason
3069 = G_("function %q+F can never be inlined because it uses "
3070 "alloca (override using the always_inline attribute)");
3071 *handled_ops_p
= true;
3075 t
= gimple_call_fndecl (stmt
);
3079 /* We cannot inline functions that call setjmp. */
3080 if (setjmp_call_p (t
))
3082 inline_forbidden_reason
3083 = G_("function %q+F can never be inlined because it uses setjmp");
3084 *handled_ops_p
= true;
3088 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
)
3089 switch (DECL_FUNCTION_CODE (t
))
3091 /* We cannot inline functions that take a variable number of
3093 case BUILT_IN_VA_START
:
3094 case BUILT_IN_NEXT_ARG
:
3095 case BUILT_IN_VA_END
:
3096 inline_forbidden_reason
3097 = G_("function %q+F can never be inlined because it "
3098 "uses variable argument lists");
3099 *handled_ops_p
= true;
3102 case BUILT_IN_LONGJMP
:
3103 /* We can't inline functions that call __builtin_longjmp at
3104 all. The non-local goto machinery really requires the
3105 destination be in a different function. If we allow the
3106 function calling __builtin_longjmp to be inlined into the
3107 function calling __builtin_setjmp, Things will Go Awry. */
3108 inline_forbidden_reason
3109 = G_("function %q+F can never be inlined because "
3110 "it uses setjmp-longjmp exception handling");
3111 *handled_ops_p
= true;
3114 case BUILT_IN_NONLOCAL_GOTO
:
3116 inline_forbidden_reason
3117 = G_("function %q+F can never be inlined because "
3118 "it uses non-local goto");
3119 *handled_ops_p
= true;
3122 case BUILT_IN_RETURN
:
3123 case BUILT_IN_APPLY_ARGS
:
3124 /* If a __builtin_apply_args caller would be inlined,
3125 it would be saving arguments of the function it has
3126 been inlined into. Similarly __builtin_return would
3127 return from the function the inline has been inlined into. */
3128 inline_forbidden_reason
3129 = G_("function %q+F can never be inlined because "
3130 "it uses __builtin_return or __builtin_apply_args");
3131 *handled_ops_p
= true;
3140 t
= gimple_goto_dest (stmt
);
3142 /* We will not inline a function which uses computed goto. The
3143 addresses of its local labels, which may be tucked into
3144 global storage, are of course not constant across
3145 instantiations, which causes unexpected behavior. */
3146 if (TREE_CODE (t
) != LABEL_DECL
)
3148 inline_forbidden_reason
3149 = G_("function %q+F can never be inlined "
3150 "because it contains a computed goto");
3151 *handled_ops_p
= true;
3160 *handled_ops_p
= false;
3164 /* Return true if FNDECL is a function that cannot be inlined into
3168 inline_forbidden_p (tree fndecl
)
3170 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
3171 struct walk_stmt_info wi
;
3172 struct pointer_set_t
*visited_nodes
;
3174 bool forbidden_p
= false;
3176 /* First check for shared reasons not to copy the code. */
3177 inline_forbidden_reason
= copy_forbidden (fun
, fndecl
);
3178 if (inline_forbidden_reason
!= NULL
)
3181 /* Next, walk the statements of the function looking for
3182 constraucts we can't handle, or are non-optimal for inlining. */
3183 visited_nodes
= pointer_set_create ();
3184 memset (&wi
, 0, sizeof (wi
));
3185 wi
.info
= (void *) fndecl
;
3186 wi
.pset
= visited_nodes
;
3188 FOR_EACH_BB_FN (bb
, fun
)
3191 gimple_seq seq
= bb_seq (bb
);
3192 ret
= walk_gimple_seq (seq
, inline_forbidden_p_stmt
, NULL
, &wi
);
3193 forbidden_p
= (ret
!= NULL
);
3198 pointer_set_destroy (visited_nodes
);
3202 /* Returns nonzero if FN is a function that does not have any
3203 fundamental inline blocking properties. */
3206 tree_inlinable_function_p (tree fn
)
3208 bool inlinable
= true;
3212 /* If we've already decided this function shouldn't be inlined,
3213 there's no need to check again. */
3214 if (DECL_UNINLINABLE (fn
))
3217 /* We only warn for functions declared `inline' by the user. */
3218 do_warning
= (warn_inline
3219 && DECL_DECLARED_INLINE_P (fn
)
3220 && !DECL_NO_INLINE_WARNING_P (fn
)
3221 && !DECL_IN_SYSTEM_HEADER (fn
));
3223 always_inline
= lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
));
3226 && always_inline
== NULL
)
3229 warning (OPT_Winline
, "function %q+F can never be inlined because it "
3230 "is suppressed using -fno-inline", fn
);
3234 else if (!function_attribute_inlinable_p (fn
))
3237 warning (OPT_Winline
, "function %q+F can never be inlined because it "
3238 "uses attributes conflicting with inlining", fn
);
3242 else if (inline_forbidden_p (fn
))
3244 /* See if we should warn about uninlinable functions. Previously,
3245 some of these warnings would be issued while trying to expand
3246 the function inline, but that would cause multiple warnings
3247 about functions that would for example call alloca. But since
3248 this a property of the function, just one warning is enough.
3249 As a bonus we can now give more details about the reason why a
3250 function is not inlinable. */
3252 error (inline_forbidden_reason
, fn
);
3253 else if (do_warning
)
3254 warning (OPT_Winline
, inline_forbidden_reason
, fn
);
3259 /* Squirrel away the result so that we don't have to check again. */
3260 DECL_UNINLINABLE (fn
) = !inlinable
;
3265 /* Estimate the cost of a memory move. Use machine dependent
3266 word size and take possible memcpy call into account. */
3269 estimate_move_cost (tree type
)
3273 gcc_assert (!VOID_TYPE_P (type
));
3275 if (TREE_CODE (type
) == VECTOR_TYPE
)
3277 enum machine_mode inner
= TYPE_MODE (TREE_TYPE (type
));
3278 enum machine_mode simd
3279 = targetm
.vectorize
.preferred_simd_mode (inner
);
3280 int simd_mode_size
= GET_MODE_SIZE (simd
);
3281 return ((GET_MODE_SIZE (TYPE_MODE (type
)) + simd_mode_size
- 1)
3285 size
= int_size_in_bytes (type
);
3287 if (size
< 0 || size
> MOVE_MAX_PIECES
* MOVE_RATIO (!optimize_size
))
3288 /* Cost of a memcpy call, 3 arguments and the call. */
3291 return ((size
+ MOVE_MAX_PIECES
- 1) / MOVE_MAX_PIECES
);
3294 /* Returns cost of operation CODE, according to WEIGHTS */
3297 estimate_operator_cost (enum tree_code code
, eni_weights
*weights
,
3298 tree op1 ATTRIBUTE_UNUSED
, tree op2
)
3302 /* These are "free" conversions, or their presumed cost
3303 is folded into other operations. */
3308 case VIEW_CONVERT_EXPR
:
3311 /* Assign cost of 1 to usual operations.
3312 ??? We may consider mapping RTL costs to this. */
3317 case POINTER_PLUS_EXPR
:
3322 case ADDR_SPACE_CONVERT_EXPR
:
3323 case FIXED_CONVERT_EXPR
:
3324 case FIX_TRUNC_EXPR
:
3336 case VEC_LSHIFT_EXPR
:
3337 case VEC_RSHIFT_EXPR
:
3344 case TRUTH_ANDIF_EXPR
:
3345 case TRUTH_ORIF_EXPR
:
3346 case TRUTH_AND_EXPR
:
3348 case TRUTH_XOR_EXPR
:
3349 case TRUTH_NOT_EXPR
:
3358 case UNORDERED_EXPR
:
3369 case PREDECREMENT_EXPR
:
3370 case PREINCREMENT_EXPR
:
3371 case POSTDECREMENT_EXPR
:
3372 case POSTINCREMENT_EXPR
:
3374 case REALIGN_LOAD_EXPR
:
3376 case REDUC_MAX_EXPR
:
3377 case REDUC_MIN_EXPR
:
3378 case REDUC_PLUS_EXPR
:
3379 case WIDEN_SUM_EXPR
:
3380 case WIDEN_MULT_EXPR
:
3382 case WIDEN_MULT_PLUS_EXPR
:
3383 case WIDEN_MULT_MINUS_EXPR
:
3385 case VEC_WIDEN_MULT_HI_EXPR
:
3386 case VEC_WIDEN_MULT_LO_EXPR
:
3387 case VEC_UNPACK_HI_EXPR
:
3388 case VEC_UNPACK_LO_EXPR
:
3389 case VEC_UNPACK_FLOAT_HI_EXPR
:
3390 case VEC_UNPACK_FLOAT_LO_EXPR
:
3391 case VEC_PACK_TRUNC_EXPR
:
3392 case VEC_PACK_SAT_EXPR
:
3393 case VEC_PACK_FIX_TRUNC_EXPR
:
3394 case VEC_EXTRACT_EVEN_EXPR
:
3395 case VEC_EXTRACT_ODD_EXPR
:
3396 case VEC_INTERLEAVE_HIGH_EXPR
:
3397 case VEC_INTERLEAVE_LOW_EXPR
:
3401 /* Few special cases of expensive operations. This is useful
3402 to avoid inlining on functions having too many of these. */
3403 case TRUNC_DIV_EXPR
:
3405 case FLOOR_DIV_EXPR
:
3406 case ROUND_DIV_EXPR
:
3407 case EXACT_DIV_EXPR
:
3408 case TRUNC_MOD_EXPR
:
3410 case FLOOR_MOD_EXPR
:
3411 case ROUND_MOD_EXPR
:
3413 if (TREE_CODE (op2
) != INTEGER_CST
)
3414 return weights
->div_mod_cost
;
3418 /* We expect a copy assignment with no operator. */
3419 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_SINGLE_RHS
);
3425 /* Estimate number of instructions that will be created by expanding
3426 the statements in the statement sequence STMTS.
3427 WEIGHTS contains weights attributed to various constructs. */
3430 int estimate_num_insns_seq (gimple_seq stmts
, eni_weights
*weights
)
3433 gimple_stmt_iterator gsi
;
3436 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3437 cost
+= estimate_num_insns (gsi_stmt (gsi
), weights
);
3443 /* Estimate number of instructions that will be created by expanding STMT.
3444 WEIGHTS contains weights attributed to various constructs. */
3447 estimate_num_insns (gimple stmt
, eni_weights
*weights
)
3450 enum gimple_code code
= gimple_code (stmt
);
3457 /* Try to estimate the cost of assignments. We have three cases to
3459 1) Simple assignments to registers;
3460 2) Stores to things that must live in memory. This includes
3461 "normal" stores to scalars, but also assignments of large
3462 structures, or constructors of big arrays;
3464 Let us look at the first two cases, assuming we have "a = b + C":
3465 <GIMPLE_ASSIGN <var_decl "a">
3466 <plus_expr <var_decl "b"> <constant C>>
3467 If "a" is a GIMPLE register, the assignment to it is free on almost
3468 any target, because "a" usually ends up in a real register. Hence
3469 the only cost of this expression comes from the PLUS_EXPR, and we
3470 can ignore the GIMPLE_ASSIGN.
3471 If "a" is not a GIMPLE register, the assignment to "a" will most
3472 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3473 of moving something into "a", which we compute using the function
3474 estimate_move_cost. */
3475 lhs
= gimple_assign_lhs (stmt
);
3476 rhs
= gimple_assign_rhs1 (stmt
);
3478 if (is_gimple_reg (lhs
))
3481 cost
= estimate_move_cost (TREE_TYPE (lhs
));
3483 if (!is_gimple_reg (rhs
) && !is_gimple_min_invariant (rhs
))
3484 cost
+= estimate_move_cost (TREE_TYPE (rhs
));
3486 cost
+= estimate_operator_cost (gimple_assign_rhs_code (stmt
), weights
,
3487 gimple_assign_rhs1 (stmt
),
3488 get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
3489 == GIMPLE_BINARY_RHS
3490 ? gimple_assign_rhs2 (stmt
) : NULL
);
3494 cost
= 1 + estimate_operator_cost (gimple_cond_code (stmt
), weights
,
3495 gimple_op (stmt
, 0),
3496 gimple_op (stmt
, 1));
3500 /* Take into account cost of the switch + guess 2 conditional jumps for
3503 TODO: once the switch expansion logic is sufficiently separated, we can
3504 do better job on estimating cost of the switch. */
3505 if (weights
->time_based
)
3506 cost
= floor_log2 (gimple_switch_num_labels (stmt
)) * 2;
3508 cost
= gimple_switch_num_labels (stmt
) * 2;
3513 tree decl
= gimple_call_fndecl (stmt
);
3514 struct cgraph_node
*node
;
3516 /* Do not special case builtins where we see the body.
3517 This just confuse inliner. */
3518 if (!decl
|| !(node
= cgraph_get_node (decl
)) || node
->analyzed
)
3520 /* For buitins that are likely expanded to nothing or
3521 inlined do not account operand costs. */
3522 else if (is_simple_builtin (decl
))
3524 else if (is_inexpensive_builtin (decl
))
3525 return weights
->target_builtin_call_cost
;
3526 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
3528 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3529 specialize the cheap expansion we do here.
3530 ??? This asks for a more general solution. */
3531 switch (DECL_FUNCTION_CODE (decl
))
3536 if (TREE_CODE (gimple_call_arg (stmt
, 1)) == REAL_CST
3537 && REAL_VALUES_EQUAL
3538 (TREE_REAL_CST (gimple_call_arg (stmt
, 1)), dconst2
))
3539 return estimate_operator_cost (MULT_EXPR
, weights
,
3540 gimple_call_arg (stmt
, 0),
3541 gimple_call_arg (stmt
, 0));
3549 cost
= weights
->call_cost
;
3550 if (gimple_call_lhs (stmt
))
3551 cost
+= estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt
)));
3552 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
3554 tree arg
= gimple_call_arg (stmt
, i
);
3555 cost
+= estimate_move_cost (TREE_TYPE (arg
));
3561 return weights
->return_cost
;
3567 case GIMPLE_PREDICT
:
3572 return asm_str_count (gimple_asm_string (stmt
));
3575 /* This is either going to be an external function call with one
3576 argument, or two register copy statements plus a goto. */
3579 case GIMPLE_EH_DISPATCH
:
3580 /* ??? This is going to turn into a switch statement. Ideally
3581 we'd have a look at the eh region and estimate the number of
3586 return estimate_num_insns_seq (gimple_bind_body (stmt
), weights
);
3588 case GIMPLE_EH_FILTER
:
3589 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt
), weights
);
3592 return estimate_num_insns_seq (gimple_catch_handler (stmt
), weights
);
3595 return (estimate_num_insns_seq (gimple_try_eval (stmt
), weights
)
3596 + estimate_num_insns_seq (gimple_try_cleanup (stmt
), weights
));
3598 /* OpenMP directives are generally very expensive. */
3600 case GIMPLE_OMP_RETURN
:
3601 case GIMPLE_OMP_SECTIONS_SWITCH
:
3602 case GIMPLE_OMP_ATOMIC_STORE
:
3603 case GIMPLE_OMP_CONTINUE
:
3604 /* ...except these, which are cheap. */
3607 case GIMPLE_OMP_ATOMIC_LOAD
:
3608 return weights
->omp_cost
;
3610 case GIMPLE_OMP_FOR
:
3611 return (weights
->omp_cost
3612 + estimate_num_insns_seq (gimple_omp_body (stmt
), weights
)
3613 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt
), weights
));
3615 case GIMPLE_OMP_PARALLEL
:
3616 case GIMPLE_OMP_TASK
:
3617 case GIMPLE_OMP_CRITICAL
:
3618 case GIMPLE_OMP_MASTER
:
3619 case GIMPLE_OMP_ORDERED
:
3620 case GIMPLE_OMP_SECTION
:
3621 case GIMPLE_OMP_SECTIONS
:
3622 case GIMPLE_OMP_SINGLE
:
3623 return (weights
->omp_cost
3624 + estimate_num_insns_seq (gimple_omp_body (stmt
), weights
));
3633 /* Estimate number of instructions that will be created by expanding
3634 function FNDECL. WEIGHTS contains weights attributed to various
3638 estimate_num_insns_fn (tree fndecl
, eni_weights
*weights
)
3640 struct function
*my_function
= DECL_STRUCT_FUNCTION (fndecl
);
3641 gimple_stmt_iterator bsi
;
3645 gcc_assert (my_function
&& my_function
->cfg
);
3646 FOR_EACH_BB_FN (bb
, my_function
)
3648 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
3649 n
+= estimate_num_insns (gsi_stmt (bsi
), weights
);
3656 /* Initializes weights used by estimate_num_insns. */
3659 init_inline_once (void)
3661 eni_size_weights
.call_cost
= 1;
3662 eni_size_weights
.target_builtin_call_cost
= 1;
3663 eni_size_weights
.div_mod_cost
= 1;
3664 eni_size_weights
.omp_cost
= 40;
3665 eni_size_weights
.time_based
= false;
3666 eni_size_weights
.return_cost
= 1;
3668 /* Estimating time for call is difficult, since we have no idea what the
3669 called function does. In the current uses of eni_time_weights,
3670 underestimating the cost does less harm than overestimating it, so
3671 we choose a rather small value here. */
3672 eni_time_weights
.call_cost
= 10;
3673 eni_time_weights
.target_builtin_call_cost
= 1;
3674 eni_time_weights
.div_mod_cost
= 10;
3675 eni_time_weights
.omp_cost
= 40;
3676 eni_time_weights
.time_based
= true;
3677 eni_time_weights
.return_cost
= 2;
3680 /* Estimate the number of instructions in a gimple_seq. */
3683 count_insns_seq (gimple_seq seq
, eni_weights
*weights
)
3685 gimple_stmt_iterator gsi
;
3687 for (gsi
= gsi_start (seq
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3688 n
+= estimate_num_insns (gsi_stmt (gsi
), weights
);
3694 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3697 prepend_lexical_block (tree current_block
, tree new_block
)
3699 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (current_block
);
3700 BLOCK_SUBBLOCKS (current_block
) = new_block
;
3701 BLOCK_SUPERCONTEXT (new_block
) = current_block
;
3704 /* Add local variables from CALLEE to CALLER. */
3707 add_local_variables (struct function
*callee
, struct function
*caller
,
3708 copy_body_data
*id
, bool check_var_ann
)
3713 FOR_EACH_LOCAL_DECL (callee
, ix
, var
)
3714 if (TREE_STATIC (var
) && !TREE_ASM_WRITTEN (var
))
3717 || (var_ann (var
) && add_referenced_var (var
)))
3718 add_local_decl (caller
, var
);
3720 else if (!can_be_nonlocal (var
, id
))
3722 tree new_var
= remap_decl (var
, id
);
3724 /* Remap debug-expressions. */
3725 if (TREE_CODE (new_var
) == VAR_DECL
3726 && DECL_DEBUG_EXPR_IS_FROM (new_var
)
3729 tree tem
= DECL_DEBUG_EXPR (var
);
3730 bool old_regimplify
= id
->regimplify
;
3731 id
->remapping_type_depth
++;
3732 walk_tree (&tem
, copy_tree_body_r
, id
, NULL
);
3733 id
->remapping_type_depth
--;
3734 id
->regimplify
= old_regimplify
;
3735 SET_DECL_DEBUG_EXPR (new_var
, tem
);
3737 add_local_decl (caller
, new_var
);
3741 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3744 expand_call_inline (basic_block bb
, gimple stmt
, copy_body_data
*id
)
3748 struct pointer_map_t
*st
, *dst
;
3751 location_t saved_location
;
3752 struct cgraph_edge
*cg_edge
;
3753 cgraph_inline_failed_t reason
;
3754 basic_block return_block
;
3756 gimple_stmt_iterator gsi
, stmt_gsi
;
3757 bool successfully_inlined
= FALSE
;
3758 bool purge_dead_abnormal_edges
;
3760 /* Set input_location here so we get the right instantiation context
3761 if we call instantiate_decl from inlinable_function_p. */
3762 saved_location
= input_location
;
3763 if (gimple_has_location (stmt
))
3764 input_location
= gimple_location (stmt
);
3766 /* From here on, we're only interested in CALL_EXPRs. */
3767 if (gimple_code (stmt
) != GIMPLE_CALL
)
3770 cg_edge
= cgraph_edge (id
->dst_node
, stmt
);
3771 gcc_checking_assert (cg_edge
);
3772 /* First, see if we can figure out what function is being called.
3773 If we cannot, then there is no hope of inlining the function. */
3774 if (cg_edge
->indirect_unknown_callee
)
3776 fn
= cg_edge
->callee
->decl
;
3777 gcc_checking_assert (fn
);
3779 /* If FN is a declaration of a function in a nested scope that was
3780 globally declared inline, we don't set its DECL_INITIAL.
3781 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3782 C++ front-end uses it for cdtors to refer to their internal
3783 declarations, that are not real functions. Fortunately those
3784 don't have trees to be saved, so we can tell by checking their
3786 if (!DECL_INITIAL (fn
)
3787 && DECL_ABSTRACT_ORIGIN (fn
)
3788 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn
)))
3789 fn
= DECL_ABSTRACT_ORIGIN (fn
);
3791 /* Don't try to inline functions that are not well-suited to inlining. */
3792 if (!cgraph_inline_p (cg_edge
, &reason
))
3794 /* If this call was originally indirect, we do not want to emit any
3795 inlining related warnings or sorry messages because there are no
3796 guarantees regarding those. */
3797 if (cg_edge
->indirect_inlining_edge
)
3800 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
))
3801 /* Avoid warnings during early inline pass. */
3802 && cgraph_global_info_ready
3803 /* PR 20090218-1_0.c. Body can be provided by another module. */
3804 && (reason
!= CIF_BODY_NOT_AVAILABLE
|| !flag_generate_lto
))
3806 error ("inlining failed in call to always_inline %q+F: %s", fn
,
3807 cgraph_inline_failed_string (reason
));
3808 error ("called from here");
3810 else if (warn_inline
3811 && DECL_DECLARED_INLINE_P (fn
)
3812 && !DECL_NO_INLINE_WARNING_P (fn
)
3813 && !DECL_IN_SYSTEM_HEADER (fn
)
3814 && reason
!= CIF_UNSPECIFIED
3815 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn
))
3816 /* Do not warn about not inlined recursive calls. */
3817 && !cgraph_edge_recursive_p (cg_edge
)
3818 /* Avoid warnings during early inline pass. */
3819 && cgraph_global_info_ready
)
3821 warning (OPT_Winline
, "inlining failed in call to %q+F: %s",
3822 fn
, _(cgraph_inline_failed_string (reason
)));
3823 warning (OPT_Winline
, "called from here");
3827 fn
= cg_edge
->callee
->decl
;
3829 #ifdef ENABLE_CHECKING
3830 if (cg_edge
->callee
->decl
!= id
->dst_node
->decl
)
3831 verify_cgraph_node (cg_edge
->callee
);
3834 /* We will be inlining this callee. */
3835 id
->eh_lp_nr
= lookup_stmt_eh_lp (stmt
);
3837 /* Update the callers EH personality. */
3838 if (DECL_FUNCTION_PERSONALITY (cg_edge
->callee
->decl
))
3839 DECL_FUNCTION_PERSONALITY (cg_edge
->caller
->decl
)
3840 = DECL_FUNCTION_PERSONALITY (cg_edge
->callee
->decl
);
3842 /* Split the block holding the GIMPLE_CALL. */
3843 e
= split_block (bb
, stmt
);
3845 return_block
= e
->dest
;
3848 /* split_block splits after the statement; work around this by
3849 moving the call into the second block manually. Not pretty,
3850 but seems easier than doing the CFG manipulation by hand
3851 when the GIMPLE_CALL is in the last statement of BB. */
3852 stmt_gsi
= gsi_last_bb (bb
);
3853 gsi_remove (&stmt_gsi
, false);
3855 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3856 been the source of abnormal edges. In this case, schedule
3857 the removal of dead abnormal edges. */
3858 gsi
= gsi_start_bb (return_block
);
3859 if (gsi_end_p (gsi
))
3861 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
3862 purge_dead_abnormal_edges
= true;
3866 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
3867 purge_dead_abnormal_edges
= false;
3870 stmt_gsi
= gsi_start_bb (return_block
);
3872 /* Build a block containing code to initialize the arguments, the
3873 actual inline expansion of the body, and a label for the return
3874 statements within the function to jump to. The type of the
3875 statement expression is the return type of the function call. */
3876 id
->block
= make_node (BLOCK
);
3877 BLOCK_ABSTRACT_ORIGIN (id
->block
) = fn
;
3878 BLOCK_SOURCE_LOCATION (id
->block
) = input_location
;
3879 prepend_lexical_block (gimple_block (stmt
), id
->block
);
3881 /* Local declarations will be replaced by their equivalents in this
3884 id
->decl_map
= pointer_map_create ();
3885 dst
= id
->debug_map
;
3886 id
->debug_map
= NULL
;
3888 /* Record the function we are about to inline. */
3890 id
->src_node
= cg_edge
->callee
;
3891 id
->src_cfun
= DECL_STRUCT_FUNCTION (fn
);
3892 id
->gimple_call
= stmt
;
3894 gcc_assert (!id
->src_cfun
->after_inlining
);
3897 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn
)))
3899 gimple_stmt_iterator si
= gsi_last_bb (bb
);
3900 gsi_insert_after (&si
, gimple_build_predict (PRED_COLD_FUNCTION
,
3904 initialize_inlined_parameters (id
, stmt
, fn
, bb
);
3906 if (DECL_INITIAL (fn
))
3907 prepend_lexical_block (id
->block
, remap_blocks (DECL_INITIAL (fn
), id
));
3909 /* Return statements in the function body will be replaced by jumps
3910 to the RET_LABEL. */
3911 gcc_assert (DECL_INITIAL (fn
));
3912 gcc_assert (TREE_CODE (DECL_INITIAL (fn
)) == BLOCK
);
3914 /* Find the LHS to which the result of this call is assigned. */
3916 if (gimple_call_lhs (stmt
))
3918 modify_dest
= gimple_call_lhs (stmt
);
3920 /* The function which we are inlining might not return a value,
3921 in which case we should issue a warning that the function
3922 does not return a value. In that case the optimizers will
3923 see that the variable to which the value is assigned was not
3924 initialized. We do not want to issue a warning about that
3925 uninitialized variable. */
3926 if (DECL_P (modify_dest
))
3927 TREE_NO_WARNING (modify_dest
) = 1;
3929 if (gimple_call_return_slot_opt_p (stmt
))
3931 return_slot
= modify_dest
;
3938 /* If we are inlining a call to the C++ operator new, we don't want
3939 to use type based alias analysis on the return value. Otherwise
3940 we may get confused if the compiler sees that the inlined new
3941 function returns a pointer which was just deleted. See bug
3943 if (DECL_IS_OPERATOR_NEW (fn
))
3949 /* Declare the return variable for the function. */
3950 use_retvar
= declare_return_variable (id
, return_slot
, modify_dest
, bb
);
3952 /* Add local vars in this inlined callee to caller. */
3953 add_local_variables (id
->src_cfun
, cfun
, id
, true);
3955 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3957 fprintf (dump_file
, "Inlining ");
3958 print_generic_expr (dump_file
, id
->src_fn
, 0);
3959 fprintf (dump_file
, " to ");
3960 print_generic_expr (dump_file
, id
->dst_fn
, 0);
3961 fprintf (dump_file
, " with frequency %i\n", cg_edge
->frequency
);
3964 /* This is it. Duplicate the callee body. Assume callee is
3965 pre-gimplified. Note that we must not alter the caller
3966 function in any way before this point, as this CALL_EXPR may be
3967 a self-referential call; if we're calling ourselves, we need to
3968 duplicate our body before altering anything. */
3969 copy_body (id
, bb
->count
,
3970 cg_edge
->frequency
* REG_BR_PROB_BASE
/ CGRAPH_FREQ_BASE
,
3971 bb
, return_block
, NULL
, NULL
);
3973 /* Reset the escaped solution. */
3974 if (cfun
->gimple_df
)
3975 pt_solution_reset (&cfun
->gimple_df
->escaped
);
3980 pointer_map_destroy (id
->debug_map
);
3981 id
->debug_map
= dst
;
3983 pointer_map_destroy (id
->decl_map
);
3986 /* Unlink the calls virtual operands before replacing it. */
3987 unlink_stmt_vdef (stmt
);
3989 /* If the inlined function returns a result that we care about,
3990 substitute the GIMPLE_CALL with an assignment of the return
3991 variable to the LHS of the call. That is, if STMT was
3992 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3993 if (use_retvar
&& gimple_call_lhs (stmt
))
3995 gimple old_stmt
= stmt
;
3996 stmt
= gimple_build_assign (gimple_call_lhs (stmt
), use_retvar
);
3997 gsi_replace (&stmt_gsi
, stmt
, false);
3998 if (gimple_in_ssa_p (cfun
))
3999 mark_symbols_for_renaming (stmt
);
4000 maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
);
4004 /* Handle the case of inlining a function with no return
4005 statement, which causes the return value to become undefined. */
4006 if (gimple_call_lhs (stmt
)
4007 && TREE_CODE (gimple_call_lhs (stmt
)) == SSA_NAME
)
4009 tree name
= gimple_call_lhs (stmt
);
4010 tree var
= SSA_NAME_VAR (name
);
4011 tree def
= gimple_default_def (cfun
, var
);
4015 /* If the variable is used undefined, make this name
4016 undefined via a move. */
4017 stmt
= gimple_build_assign (gimple_call_lhs (stmt
), def
);
4018 gsi_replace (&stmt_gsi
, stmt
, true);
4022 /* Otherwise make this variable undefined. */
4023 gsi_remove (&stmt_gsi
, true);
4024 set_default_def (var
, name
);
4025 SSA_NAME_DEF_STMT (name
) = gimple_build_nop ();
4029 gsi_remove (&stmt_gsi
, true);
4032 if (purge_dead_abnormal_edges
)
4034 gimple_purge_dead_eh_edges (return_block
);
4035 gimple_purge_dead_abnormal_call_edges (return_block
);
4038 /* If the value of the new expression is ignored, that's OK. We
4039 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4040 the equivalent inlined version either. */
4041 if (is_gimple_assign (stmt
))
4043 gcc_assert (gimple_assign_single_p (stmt
)
4044 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)));
4045 TREE_USED (gimple_assign_rhs1 (stmt
)) = 1;
4048 /* Output the inlining info for this abstract function, since it has been
4049 inlined. If we don't do this now, we can lose the information about the
4050 variables in the function when the blocks get blown away as soon as we
4051 remove the cgraph node. */
4052 (*debug_hooks
->outlining_inline_function
) (cg_edge
->callee
->decl
);
4054 /* Update callgraph if needed. */
4055 cgraph_remove_node (cg_edge
->callee
);
4057 id
->block
= NULL_TREE
;
4058 successfully_inlined
= TRUE
;
4061 input_location
= saved_location
;
4062 return successfully_inlined
;
4065 /* Expand call statements reachable from STMT_P.
4066 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4067 in a MODIFY_EXPR. See gimple.c:get_call_expr_in(). We can
4068 unfortunately not use that function here because we need a pointer
4069 to the CALL_EXPR, not the tree itself. */
4072 gimple_expand_calls_inline (basic_block bb
, copy_body_data
*id
)
4074 gimple_stmt_iterator gsi
;
4076 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4078 gimple stmt
= gsi_stmt (gsi
);
4080 if (is_gimple_call (stmt
)
4081 && expand_call_inline (bb
, stmt
, id
))
4089 /* Walk all basic blocks created after FIRST and try to fold every statement
4090 in the STATEMENTS pointer set. */
4093 fold_marked_statements (int first
, struct pointer_set_t
*statements
)
4095 for (; first
< n_basic_blocks
; first
++)
4096 if (BASIC_BLOCK (first
))
4098 gimple_stmt_iterator gsi
;
4100 for (gsi
= gsi_start_bb (BASIC_BLOCK (first
));
4103 if (pointer_set_contains (statements
, gsi_stmt (gsi
)))
4105 gimple old_stmt
= gsi_stmt (gsi
);
4106 tree old_decl
= is_gimple_call (old_stmt
) ? gimple_call_fndecl (old_stmt
) : 0;
4108 if (old_decl
&& DECL_BUILT_IN (old_decl
))
4110 /* Folding builtins can create multiple instructions,
4111 we need to look at all of them. */
4112 gimple_stmt_iterator i2
= gsi
;
4114 if (fold_stmt (&gsi
))
4117 /* If a builtin at the end of a bb folded into nothing,
4118 the following loop won't work. */
4119 if (gsi_end_p (gsi
))
4121 cgraph_update_edges_for_call_stmt (old_stmt
,
4126 i2
= gsi_start_bb (BASIC_BLOCK (first
));
4131 new_stmt
= gsi_stmt (i2
);
4132 update_stmt (new_stmt
);
4133 cgraph_update_edges_for_call_stmt (old_stmt
, old_decl
,
4136 if (new_stmt
== gsi_stmt (gsi
))
4138 /* It is okay to check only for the very last
4139 of these statements. If it is a throwing
4140 statement nothing will change. If it isn't
4141 this can remove EH edges. If that weren't
4142 correct then because some intermediate stmts
4143 throw, but not the last one. That would mean
4144 we'd have to split the block, which we can't
4145 here and we'd loose anyway. And as builtins
4146 probably never throw, this all
4148 if (maybe_clean_or_replace_eh_stmt (old_stmt
,
4150 gimple_purge_dead_eh_edges (BASIC_BLOCK (first
));
4157 else if (fold_stmt (&gsi
))
4159 /* Re-read the statement from GSI as fold_stmt() may
4161 gimple new_stmt
= gsi_stmt (gsi
);
4162 update_stmt (new_stmt
);
4164 if (is_gimple_call (old_stmt
)
4165 || is_gimple_call (new_stmt
))
4166 cgraph_update_edges_for_call_stmt (old_stmt
, old_decl
,
4169 if (maybe_clean_or_replace_eh_stmt (old_stmt
, new_stmt
))
4170 gimple_purge_dead_eh_edges (BASIC_BLOCK (first
));
4176 /* Return true if BB has at least one abnormal outgoing edge. */
4179 has_abnormal_outgoing_edge_p (basic_block bb
)
4184 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4185 if (e
->flags
& EDGE_ABNORMAL
)
4191 /* Expand calls to inline functions in the body of FN. */
4194 optimize_inline_calls (tree fn
)
4198 int last
= n_basic_blocks
;
4199 struct gimplify_ctx gctx
;
4200 bool inlined_p
= false;
4202 /* There is no point in performing inlining if errors have already
4203 occurred -- and we might crash if we try to inline invalid
4209 memset (&id
, 0, sizeof (id
));
4211 id
.src_node
= id
.dst_node
= cgraph_get_node (fn
);
4212 gcc_assert (id
.dst_node
->analyzed
);
4214 /* Or any functions that aren't finished yet. */
4215 if (current_function_decl
)
4216 id
.dst_fn
= current_function_decl
;
4218 id
.copy_decl
= copy_decl_maybe_to_var
;
4219 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
4220 id
.transform_new_cfg
= false;
4221 id
.transform_return_to_modify
= true;
4222 id
.transform_lang_insert_block
= NULL
;
4223 id
.statements_to_fold
= pointer_set_create ();
4225 push_gimplify_context (&gctx
);
4227 /* We make no attempts to keep dominance info up-to-date. */
4228 free_dominance_info (CDI_DOMINATORS
);
4229 free_dominance_info (CDI_POST_DOMINATORS
);
4231 /* Register specific gimple functions. */
4232 gimple_register_cfg_hooks ();
4234 /* Reach the trees by walking over the CFG, and note the
4235 enclosing basic-blocks in the call edges. */
4236 /* We walk the blocks going forward, because inlined function bodies
4237 will split id->current_basic_block, and the new blocks will
4238 follow it; we'll trudge through them, processing their CALL_EXPRs
4241 inlined_p
|= gimple_expand_calls_inline (bb
, &id
);
4243 pop_gimplify_context (NULL
);
4245 #ifdef ENABLE_CHECKING
4247 struct cgraph_edge
*e
;
4249 verify_cgraph_node (id
.dst_node
);
4251 /* Double check that we inlined everything we are supposed to inline. */
4252 for (e
= id
.dst_node
->callees
; e
; e
= e
->next_callee
)
4253 gcc_assert (e
->inline_failed
);
4257 /* Fold queued statements. */
4258 fold_marked_statements (last
, id
.statements_to_fold
);
4259 pointer_set_destroy (id
.statements_to_fold
);
4261 gcc_assert (!id
.debug_stmts
);
4263 /* If we didn't inline into the function there is nothing to do. */
4267 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4270 delete_unreachable_blocks_update_callgraph (&id
);
4271 #ifdef ENABLE_CHECKING
4272 verify_cgraph_node (id
.dst_node
);
4275 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4276 not possible yet - the IPA passes might make various functions to not
4277 throw and they don't care to proactively update local EH info. This is
4278 done later in fixup_cfg pass that also execute the verification. */
4279 return (TODO_update_ssa
4281 | (gimple_in_ssa_p (cfun
) ? TODO_remove_unused_locals
: 0)
4282 | (gimple_in_ssa_p (cfun
) ? TODO_update_address_taken
: 0)
4283 | (profile_status
!= PROFILE_ABSENT
? TODO_rebuild_frequencies
: 0));
4286 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4289 copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
4291 enum tree_code code
= TREE_CODE (*tp
);
4292 enum tree_code_class cl
= TREE_CODE_CLASS (code
);
4294 /* We make copies of most nodes. */
4295 if (IS_EXPR_CODE_CLASS (cl
)
4296 || code
== TREE_LIST
4298 || code
== TYPE_DECL
4299 || code
== OMP_CLAUSE
)
4301 /* Because the chain gets clobbered when we make a copy, we save it
4303 tree chain
= NULL_TREE
, new_tree
;
4305 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
4306 chain
= TREE_CHAIN (*tp
);
4308 /* Copy the node. */
4309 new_tree
= copy_node (*tp
);
4311 /* Propagate mudflap marked-ness. */
4312 if (flag_mudflap
&& mf_marked_p (*tp
))
4317 /* Now, restore the chain, if appropriate. That will cause
4318 walk_tree to walk into the chain as well. */
4319 if (code
== PARM_DECL
4320 || code
== TREE_LIST
4321 || code
== OMP_CLAUSE
)
4322 TREE_CHAIN (*tp
) = chain
;
4324 /* For now, we don't update BLOCKs when we make copies. So, we
4325 have to nullify all BIND_EXPRs. */
4326 if (TREE_CODE (*tp
) == BIND_EXPR
)
4327 BIND_EXPR_BLOCK (*tp
) = NULL_TREE
;
4329 else if (code
== CONSTRUCTOR
)
4331 /* CONSTRUCTOR nodes need special handling because
4332 we need to duplicate the vector of elements. */
4335 new_tree
= copy_node (*tp
);
4337 /* Propagate mudflap marked-ness. */
4338 if (flag_mudflap
&& mf_marked_p (*tp
))
4341 CONSTRUCTOR_ELTS (new_tree
) = VEC_copy (constructor_elt
, gc
,
4342 CONSTRUCTOR_ELTS (*tp
));
4345 else if (code
== STATEMENT_LIST
)
4346 /* We used to just abort on STATEMENT_LIST, but we can run into them
4347 with statement-expressions (c++/40975). */
4348 copy_statement_list (tp
);
4349 else if (TREE_CODE_CLASS (code
) == tcc_type
)
4351 else if (TREE_CODE_CLASS (code
) == tcc_declaration
)
4353 else if (TREE_CODE_CLASS (code
) == tcc_constant
)
4358 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4359 information indicating to what new SAVE_EXPR this one should be mapped,
4360 use that one. Otherwise, create a new node and enter it in ST. FN is
4361 the function into which the copy will be placed. */
4364 remap_save_expr (tree
*tp
, void *st_
, int *walk_subtrees
)
4366 struct pointer_map_t
*st
= (struct pointer_map_t
*) st_
;
4370 /* See if we already encountered this SAVE_EXPR. */
4371 n
= (tree
*) pointer_map_contains (st
, *tp
);
4373 /* If we didn't already remap this SAVE_EXPR, do so now. */
4376 t
= copy_node (*tp
);
4378 /* Remember this SAVE_EXPR. */
4379 *pointer_map_insert (st
, *tp
) = t
;
4380 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4381 *pointer_map_insert (st
, t
) = t
;
4385 /* We've already walked into this SAVE_EXPR; don't do it again. */
4390 /* Replace this SAVE_EXPR with the copy. */
4394 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4395 copies the declaration and enters it in the splay_tree in DATA (which is
4396 really an `copy_body_data *'). */
4399 mark_local_for_remap_r (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
4402 copy_body_data
*id
= (copy_body_data
*) data
;
4404 /* Don't walk into types. */
4408 else if (TREE_CODE (*tp
) == LABEL_EXPR
)
4410 tree decl
= TREE_OPERAND (*tp
, 0);
4412 /* Copy the decl and remember the copy. */
4413 insert_decl_map (id
, decl
, id
->copy_decl (decl
, id
));
4419 /* Perform any modifications to EXPR required when it is unsaved. Does
4420 not recurse into EXPR's subtrees. */
4423 unsave_expr_1 (tree expr
)
4425 switch (TREE_CODE (expr
))
4428 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4429 It's OK for this to happen if it was part of a subtree that
4430 isn't immediately expanded, such as operand 2 of another
4432 if (TREE_OPERAND (expr
, 1))
4435 TREE_OPERAND (expr
, 1) = TREE_OPERAND (expr
, 3);
4436 TREE_OPERAND (expr
, 3) = NULL_TREE
;
4444 /* Called via walk_tree when an expression is unsaved. Using the
4445 splay_tree pointed to by ST (which is really a `splay_tree'),
4446 remaps all local declarations to appropriate replacements. */
4449 unsave_r (tree
*tp
, int *walk_subtrees
, void *data
)
4451 copy_body_data
*id
= (copy_body_data
*) data
;
4452 struct pointer_map_t
*st
= id
->decl_map
;
4455 /* Only a local declaration (variable or label). */
4456 if ((TREE_CODE (*tp
) == VAR_DECL
&& !TREE_STATIC (*tp
))
4457 || TREE_CODE (*tp
) == LABEL_DECL
)
4459 /* Lookup the declaration. */
4460 n
= (tree
*) pointer_map_contains (st
, *tp
);
4462 /* If it's there, remap it. */
4467 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
4469 else if (TREE_CODE (*tp
) == BIND_EXPR
)
4470 copy_bind_expr (tp
, walk_subtrees
, id
);
4471 else if (TREE_CODE (*tp
) == SAVE_EXPR
4472 || TREE_CODE (*tp
) == TARGET_EXPR
)
4473 remap_save_expr (tp
, st
, walk_subtrees
);
4476 copy_tree_r (tp
, walk_subtrees
, NULL
);
4478 /* Do whatever unsaving is required. */
4479 unsave_expr_1 (*tp
);
4482 /* Keep iterating. */
4486 /* Copies everything in EXPR and replaces variables, labels
4487 and SAVE_EXPRs local to EXPR. */
4490 unsave_expr_now (tree expr
)
4494 /* There's nothing to do for NULL_TREE. */
4499 memset (&id
, 0, sizeof (id
));
4500 id
.src_fn
= current_function_decl
;
4501 id
.dst_fn
= current_function_decl
;
4502 id
.decl_map
= pointer_map_create ();
4503 id
.debug_map
= NULL
;
4505 id
.copy_decl
= copy_decl_no_change
;
4506 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
4507 id
.transform_new_cfg
= false;
4508 id
.transform_return_to_modify
= false;
4509 id
.transform_lang_insert_block
= NULL
;
4511 /* Walk the tree once to find local labels. */
4512 walk_tree_without_duplicates (&expr
, mark_local_for_remap_r
, &id
);
4514 /* Walk the tree again, copying, remapping, and unsaving. */
4515 walk_tree (&expr
, unsave_r
, &id
, NULL
);
4518 pointer_map_destroy (id
.decl_map
);
4520 pointer_map_destroy (id
.debug_map
);
4525 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4526 label, copies the declaration and enters it in the splay_tree in DATA (which
4527 is really a 'copy_body_data *'. */
4530 mark_local_labels_stmt (gimple_stmt_iterator
*gsip
,
4531 bool *handled_ops_p ATTRIBUTE_UNUSED
,
4532 struct walk_stmt_info
*wi
)
4534 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
4535 gimple stmt
= gsi_stmt (*gsip
);
4537 if (gimple_code (stmt
) == GIMPLE_LABEL
)
4539 tree decl
= gimple_label_label (stmt
);
4541 /* Copy the decl and remember the copy. */
4542 insert_decl_map (id
, decl
, id
->copy_decl (decl
, id
));
4549 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4550 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4551 remaps all local declarations to appropriate replacements in gimple
4555 replace_locals_op (tree
*tp
, int *walk_subtrees
, void *data
)
4557 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4558 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
4559 struct pointer_map_t
*st
= id
->decl_map
;
4563 /* Only a local declaration (variable or label). */
4564 if ((TREE_CODE (expr
) == VAR_DECL
4565 && !TREE_STATIC (expr
))
4566 || TREE_CODE (expr
) == LABEL_DECL
)
4568 /* Lookup the declaration. */
4569 n
= (tree
*) pointer_map_contains (st
, expr
);
4571 /* If it's there, remap it. */
4576 else if (TREE_CODE (expr
) == STATEMENT_LIST
4577 || TREE_CODE (expr
) == BIND_EXPR
4578 || TREE_CODE (expr
) == SAVE_EXPR
)
4580 else if (TREE_CODE (expr
) == TARGET_EXPR
)
4582 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4583 It's OK for this to happen if it was part of a subtree that
4584 isn't immediately expanded, such as operand 2 of another
4586 if (!TREE_OPERAND (expr
, 1))
4588 TREE_OPERAND (expr
, 1) = TREE_OPERAND (expr
, 3);
4589 TREE_OPERAND (expr
, 3) = NULL_TREE
;
4593 /* Keep iterating. */
4598 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4599 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4600 remaps all local declarations to appropriate replacements in gimple
4604 replace_locals_stmt (gimple_stmt_iterator
*gsip
,
4605 bool *handled_ops_p ATTRIBUTE_UNUSED
,
4606 struct walk_stmt_info
*wi
)
4608 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
4609 gimple stmt
= gsi_stmt (*gsip
);
4611 if (gimple_code (stmt
) == GIMPLE_BIND
)
4613 tree block
= gimple_bind_block (stmt
);
4617 remap_block (&block
, id
);
4618 gimple_bind_set_block (stmt
, block
);
4621 /* This will remap a lot of the same decls again, but this should be
4623 if (gimple_bind_vars (stmt
))
4624 gimple_bind_set_vars (stmt
, remap_decls (gimple_bind_vars (stmt
), NULL
, id
));
4627 /* Keep iterating. */
4632 /* Copies everything in SEQ and replaces variables and labels local to
4633 current_function_decl. */
4636 copy_gimple_seq_and_replace_locals (gimple_seq seq
)
4639 struct walk_stmt_info wi
;
4640 struct pointer_set_t
*visited
;
4643 /* There's nothing to do for NULL_TREE. */
4648 memset (&id
, 0, sizeof (id
));
4649 id
.src_fn
= current_function_decl
;
4650 id
.dst_fn
= current_function_decl
;
4651 id
.decl_map
= pointer_map_create ();
4652 id
.debug_map
= NULL
;
4654 id
.copy_decl
= copy_decl_no_change
;
4655 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
4656 id
.transform_new_cfg
= false;
4657 id
.transform_return_to_modify
= false;
4658 id
.transform_lang_insert_block
= NULL
;
4660 /* Walk the tree once to find local labels. */
4661 memset (&wi
, 0, sizeof (wi
));
4662 visited
= pointer_set_create ();
4665 walk_gimple_seq (seq
, mark_local_labels_stmt
, NULL
, &wi
);
4666 pointer_set_destroy (visited
);
4668 copy
= gimple_seq_copy (seq
);
4670 /* Walk the copy, remapping decls. */
4671 memset (&wi
, 0, sizeof (wi
));
4673 walk_gimple_seq (copy
, replace_locals_stmt
, replace_locals_op
, &wi
);
4676 pointer_map_destroy (id
.decl_map
);
4678 pointer_map_destroy (id
.debug_map
);
4684 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4687 debug_find_tree_1 (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
, void *data
)
4696 debug_find_tree (tree top
, tree search
)
4698 return walk_tree_without_duplicates (&top
, debug_find_tree_1
, search
) != 0;
4702 /* Declare the variables created by the inliner. Add all the variables in
4703 VARS to BIND_EXPR. */
4706 declare_inline_vars (tree block
, tree vars
)
4709 for (t
= vars
; t
; t
= DECL_CHAIN (t
))
4711 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
4712 gcc_assert (!TREE_STATIC (t
) && !TREE_ASM_WRITTEN (t
));
4713 add_local_decl (cfun
, t
);
4717 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), vars
);
4720 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4721 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4722 VAR_DECL translation. */
4725 copy_decl_for_dup_finish (copy_body_data
*id
, tree decl
, tree copy
)
4727 /* Don't generate debug information for the copy if we wouldn't have
4728 generated it for the copy either. */
4729 DECL_ARTIFICIAL (copy
) = DECL_ARTIFICIAL (decl
);
4730 DECL_IGNORED_P (copy
) = DECL_IGNORED_P (decl
);
4732 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4733 declaration inspired this copy. */
4734 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (decl
);
4736 /* The new variable/label has no RTL, yet. */
4737 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy
), TS_DECL_WRTL
)
4738 && !TREE_STATIC (copy
) && !DECL_EXTERNAL (copy
))
4739 SET_DECL_RTL (copy
, 0);
4741 /* These args would always appear unused, if not for this. */
4742 TREE_USED (copy
) = 1;
4744 /* Set the context for the new declaration. */
4745 if (!DECL_CONTEXT (decl
))
4746 /* Globals stay global. */
4748 else if (DECL_CONTEXT (decl
) != id
->src_fn
)
4749 /* Things that weren't in the scope of the function we're inlining
4750 from aren't in the scope we're inlining to, either. */
4752 else if (TREE_STATIC (decl
))
4753 /* Function-scoped static variables should stay in the original
4757 /* Ordinary automatic local variables are now in the scope of the
4759 DECL_CONTEXT (copy
) = id
->dst_fn
;
4761 if (TREE_CODE (decl
) == VAR_DECL
4762 /* C++ clones functions during parsing, before
4764 && gimple_referenced_vars (DECL_STRUCT_FUNCTION (id
->src_fn
))
4765 && referenced_var_lookup (DECL_STRUCT_FUNCTION (id
->src_fn
),
4767 add_referenced_var (copy
);
4773 copy_decl_to_var (tree decl
, copy_body_data
*id
)
4777 gcc_assert (TREE_CODE (decl
) == PARM_DECL
4778 || TREE_CODE (decl
) == RESULT_DECL
);
4780 type
= TREE_TYPE (decl
);
4782 copy
= build_decl (DECL_SOURCE_LOCATION (id
->dst_fn
),
4783 VAR_DECL
, DECL_NAME (decl
), type
);
4784 if (DECL_PT_UID_SET_P (decl
))
4785 SET_DECL_PT_UID (copy
, DECL_PT_UID (decl
));
4786 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
4787 TREE_READONLY (copy
) = TREE_READONLY (decl
);
4788 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
4789 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (decl
);
4791 return copy_decl_for_dup_finish (id
, decl
, copy
);
4794 /* Like copy_decl_to_var, but create a return slot object instead of a
4795 pointer variable for return by invisible reference. */
4798 copy_result_decl_to_var (tree decl
, copy_body_data
*id
)
4802 gcc_assert (TREE_CODE (decl
) == PARM_DECL
4803 || TREE_CODE (decl
) == RESULT_DECL
);
4805 type
= TREE_TYPE (decl
);
4806 if (DECL_BY_REFERENCE (decl
))
4807 type
= TREE_TYPE (type
);
4809 copy
= build_decl (DECL_SOURCE_LOCATION (id
->dst_fn
),
4810 VAR_DECL
, DECL_NAME (decl
), type
);
4811 if (DECL_PT_UID_SET_P (decl
))
4812 SET_DECL_PT_UID (copy
, DECL_PT_UID (decl
));
4813 TREE_READONLY (copy
) = TREE_READONLY (decl
);
4814 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
4815 if (!DECL_BY_REFERENCE (decl
))
4817 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
4818 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (decl
);
4821 return copy_decl_for_dup_finish (id
, decl
, copy
);
4825 copy_decl_no_change (tree decl
, copy_body_data
*id
)
4829 copy
= copy_node (decl
);
4831 /* The COPY is not abstract; it will be generated in DST_FN. */
4832 DECL_ABSTRACT (copy
) = 0;
4833 lang_hooks
.dup_lang_specific_decl (copy
);
4835 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4836 been taken; it's for internal bookkeeping in expand_goto_internal. */
4837 if (TREE_CODE (copy
) == LABEL_DECL
)
4839 TREE_ADDRESSABLE (copy
) = 0;
4840 LABEL_DECL_UID (copy
) = -1;
4843 return copy_decl_for_dup_finish (id
, decl
, copy
);
4847 copy_decl_maybe_to_var (tree decl
, copy_body_data
*id
)
4849 if (TREE_CODE (decl
) == PARM_DECL
|| TREE_CODE (decl
) == RESULT_DECL
)
4850 return copy_decl_to_var (decl
, id
);
4852 return copy_decl_no_change (decl
, id
);
4855 /* Return a copy of the function's argument tree. */
4857 copy_arguments_for_versioning (tree orig_parm
, copy_body_data
* id
,
4858 bitmap args_to_skip
, tree
*vars
)
4861 tree new_parm
= NULL
;
4866 for (arg
= orig_parm
; arg
; arg
= DECL_CHAIN (arg
), i
++)
4867 if (!args_to_skip
|| !bitmap_bit_p (args_to_skip
, i
))
4869 tree new_tree
= remap_decl (arg
, id
);
4870 lang_hooks
.dup_lang_specific_decl (new_tree
);
4872 parg
= &DECL_CHAIN (new_tree
);
4874 else if (!pointer_map_contains (id
->decl_map
, arg
))
4876 /* Make an equivalent VAR_DECL. If the argument was used
4877 as temporary variable later in function, the uses will be
4878 replaced by local variable. */
4879 tree var
= copy_decl_to_var (arg
, id
);
4880 add_referenced_var (var
);
4881 insert_decl_map (id
, arg
, var
);
4882 /* Declare this new variable. */
4883 DECL_CHAIN (var
) = *vars
;
4889 /* Return a copy of the function's static chain. */
4891 copy_static_chain (tree static_chain
, copy_body_data
* id
)
4893 tree
*chain_copy
, *pvar
;
4895 chain_copy
= &static_chain
;
4896 for (pvar
= chain_copy
; *pvar
; pvar
= &DECL_CHAIN (*pvar
))
4898 tree new_tree
= remap_decl (*pvar
, id
);
4899 lang_hooks
.dup_lang_specific_decl (new_tree
);
4900 DECL_CHAIN (new_tree
) = DECL_CHAIN (*pvar
);
4903 return static_chain
;
4906 /* Return true if the function is allowed to be versioned.
4907 This is a guard for the versioning functionality. */
4910 tree_versionable_function_p (tree fndecl
)
4912 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl
))
4913 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl
), fndecl
) == NULL
);
4916 /* Delete all unreachable basic blocks and update callgraph.
4917 Doing so is somewhat nontrivial because we need to update all clones and
4918 remove inline function that become unreachable. */
4921 delete_unreachable_blocks_update_callgraph (copy_body_data
*id
)
4923 bool changed
= false;
4924 basic_block b
, next_bb
;
4926 find_unreachable_blocks ();
4928 /* Delete all unreachable basic blocks. */
4930 for (b
= ENTRY_BLOCK_PTR
->next_bb
; b
!= EXIT_BLOCK_PTR
; b
= next_bb
)
4932 next_bb
= b
->next_bb
;
4934 if (!(b
->flags
& BB_REACHABLE
))
4936 gimple_stmt_iterator bsi
;
4938 for (bsi
= gsi_start_bb (b
); !gsi_end_p (bsi
); gsi_next (&bsi
))
4939 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_CALL
)
4941 struct cgraph_edge
*e
;
4942 struct cgraph_node
*node
;
4944 if ((e
= cgraph_edge (id
->dst_node
, gsi_stmt (bsi
))) != NULL
)
4946 if (!e
->inline_failed
)
4947 cgraph_remove_node_and_inline_clones (e
->callee
);
4949 cgraph_remove_edge (e
);
4951 if (id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
4952 && id
->dst_node
->clones
)
4953 for (node
= id
->dst_node
->clones
; node
!= id
->dst_node
;)
4955 if ((e
= cgraph_edge (node
, gsi_stmt (bsi
))) != NULL
)
4957 if (!e
->inline_failed
)
4958 cgraph_remove_node_and_inline_clones (e
->callee
);
4960 cgraph_remove_edge (e
);
4964 node
= node
->clones
;
4965 else if (node
->next_sibling_clone
)
4966 node
= node
->next_sibling_clone
;
4969 while (node
!= id
->dst_node
&& !node
->next_sibling_clone
)
4970 node
= node
->clone_of
;
4971 if (node
!= id
->dst_node
)
4972 node
= node
->next_sibling_clone
;
4976 delete_basic_block (b
);
4984 /* Update clone info after duplication. */
4987 update_clone_info (copy_body_data
* id
)
4989 struct cgraph_node
*node
;
4990 if (!id
->dst_node
->clones
)
4992 for (node
= id
->dst_node
->clones
; node
!= id
->dst_node
;)
4994 /* First update replace maps to match the new body. */
4995 if (node
->clone
.tree_map
)
4998 for (i
= 0; i
< VEC_length (ipa_replace_map_p
, node
->clone
.tree_map
); i
++)
5000 struct ipa_replace_map
*replace_info
;
5001 replace_info
= VEC_index (ipa_replace_map_p
, node
->clone
.tree_map
, i
);
5002 walk_tree (&replace_info
->old_tree
, copy_tree_body_r
, id
, NULL
);
5003 walk_tree (&replace_info
->new_tree
, copy_tree_body_r
, id
, NULL
);
5007 node
= node
->clones
;
5008 else if (node
->next_sibling_clone
)
5009 node
= node
->next_sibling_clone
;
5012 while (node
!= id
->dst_node
&& !node
->next_sibling_clone
)
5013 node
= node
->clone_of
;
5014 if (node
!= id
->dst_node
)
5015 node
= node
->next_sibling_clone
;
5020 /* Create a copy of a function's tree.
5021 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5022 of the original function and the new copied function
5023 respectively. In case we want to replace a DECL
5024 tree with another tree while duplicating the function's
5025 body, TREE_MAP represents the mapping between these
5026 trees. If UPDATE_CLONES is set, the call_stmt fields
5027 of edges of clones of the function will be updated.
5029 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5031 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5032 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5035 tree_function_versioning (tree old_decl
, tree new_decl
,
5036 VEC(ipa_replace_map_p
,gc
)* tree_map
,
5037 bool update_clones
, bitmap args_to_skip
,
5038 bitmap blocks_to_copy
, basic_block new_entry
)
5040 struct cgraph_node
*old_version_node
;
5041 struct cgraph_node
*new_version_node
;
5045 struct ipa_replace_map
*replace_info
;
5046 basic_block old_entry_block
, bb
;
5047 VEC (gimple
, heap
) *init_stmts
= VEC_alloc (gimple
, heap
, 10);
5049 tree old_current_function_decl
= current_function_decl
;
5050 tree vars
= NULL_TREE
;
5052 gcc_assert (TREE_CODE (old_decl
) == FUNCTION_DECL
5053 && TREE_CODE (new_decl
) == FUNCTION_DECL
);
5054 DECL_POSSIBLY_INLINED (old_decl
) = 1;
5056 old_version_node
= cgraph_get_node (old_decl
);
5057 gcc_checking_assert (old_version_node
);
5058 new_version_node
= cgraph_get_node (new_decl
);
5059 gcc_checking_assert (new_version_node
);
5061 /* Copy over debug args. */
5062 if (DECL_HAS_DEBUG_ARGS_P (old_decl
))
5064 VEC(tree
, gc
) **new_debug_args
, **old_debug_args
;
5065 gcc_checking_assert (decl_debug_args_lookup (new_decl
) == NULL
);
5066 DECL_HAS_DEBUG_ARGS_P (new_decl
) = 0;
5067 old_debug_args
= decl_debug_args_lookup (old_decl
);
5070 new_debug_args
= decl_debug_args_insert (new_decl
);
5071 *new_debug_args
= VEC_copy (tree
, gc
, *old_debug_args
);
5075 /* Output the inlining info for this abstract function, since it has been
5076 inlined. If we don't do this now, we can lose the information about the
5077 variables in the function when the blocks get blown away as soon as we
5078 remove the cgraph node. */
5079 (*debug_hooks
->outlining_inline_function
) (old_decl
);
5081 DECL_ARTIFICIAL (new_decl
) = 1;
5082 DECL_ABSTRACT_ORIGIN (new_decl
) = DECL_ORIGIN (old_decl
);
5083 DECL_FUNCTION_PERSONALITY (new_decl
) = DECL_FUNCTION_PERSONALITY (old_decl
);
5085 /* Prepare the data structures for the tree copy. */
5086 memset (&id
, 0, sizeof (id
));
5088 /* Generate a new name for the new version. */
5089 id
.statements_to_fold
= pointer_set_create ();
5091 id
.decl_map
= pointer_map_create ();
5092 id
.debug_map
= NULL
;
5093 id
.src_fn
= old_decl
;
5094 id
.dst_fn
= new_decl
;
5095 id
.src_node
= old_version_node
;
5096 id
.dst_node
= new_version_node
;
5097 id
.src_cfun
= DECL_STRUCT_FUNCTION (old_decl
);
5098 if (id
.src_node
->ipa_transforms_to_apply
)
5100 VEC(ipa_opt_pass
,heap
) * old_transforms_to_apply
= id
.dst_node
->ipa_transforms_to_apply
;
5103 id
.dst_node
->ipa_transforms_to_apply
= VEC_copy (ipa_opt_pass
, heap
,
5104 id
.src_node
->ipa_transforms_to_apply
);
5105 for (i
= 0; i
< VEC_length (ipa_opt_pass
, old_transforms_to_apply
); i
++)
5106 VEC_safe_push (ipa_opt_pass
, heap
, id
.dst_node
->ipa_transforms_to_apply
,
5107 VEC_index (ipa_opt_pass
,
5108 old_transforms_to_apply
,
5112 id
.copy_decl
= copy_decl_no_change
;
5113 id
.transform_call_graph_edges
5114 = update_clones
? CB_CGE_MOVE_CLONES
: CB_CGE_MOVE
;
5115 id
.transform_new_cfg
= true;
5116 id
.transform_return_to_modify
= false;
5117 id
.transform_lang_insert_block
= NULL
;
5119 current_function_decl
= new_decl
;
5120 old_entry_block
= ENTRY_BLOCK_PTR_FOR_FUNCTION
5121 (DECL_STRUCT_FUNCTION (old_decl
));
5122 initialize_cfun (new_decl
, old_decl
,
5123 old_entry_block
->count
);
5124 DECL_STRUCT_FUNCTION (new_decl
)->gimple_df
->ipa_pta
5125 = id
.src_cfun
->gimple_df
->ipa_pta
;
5126 push_cfun (DECL_STRUCT_FUNCTION (new_decl
));
5128 /* Copy the function's static chain. */
5129 p
= DECL_STRUCT_FUNCTION (old_decl
)->static_chain_decl
;
5131 DECL_STRUCT_FUNCTION (new_decl
)->static_chain_decl
=
5132 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl
)->static_chain_decl
,
5135 /* If there's a tree_map, prepare for substitution. */
5137 for (i
= 0; i
< VEC_length (ipa_replace_map_p
, tree_map
); i
++)
5140 replace_info
= VEC_index (ipa_replace_map_p
, tree_map
, i
);
5141 if (replace_info
->replace_p
)
5143 tree op
= replace_info
->new_tree
;
5144 if (!replace_info
->old_tree
)
5146 int i
= replace_info
->parm_num
;
5148 for (parm
= DECL_ARGUMENTS (old_decl
); i
; parm
= DECL_CHAIN (parm
))
5150 replace_info
->old_tree
= parm
;
5156 if (TREE_CODE (op
) == VIEW_CONVERT_EXPR
)
5157 op
= TREE_OPERAND (op
, 0);
5159 if (TREE_CODE (op
) == ADDR_EXPR
)
5161 op
= TREE_OPERAND (op
, 0);
5162 while (handled_component_p (op
))
5163 op
= TREE_OPERAND (op
, 0);
5164 if (TREE_CODE (op
) == VAR_DECL
)
5165 add_referenced_var (op
);
5167 gcc_assert (TREE_CODE (replace_info
->old_tree
) == PARM_DECL
);
5168 init
= setup_one_parameter (&id
, replace_info
->old_tree
,
5169 replace_info
->new_tree
, id
.src_fn
,
5173 VEC_safe_push (gimple
, heap
, init_stmts
, init
);
5176 /* Copy the function's arguments. */
5177 if (DECL_ARGUMENTS (old_decl
) != NULL_TREE
)
5178 DECL_ARGUMENTS (new_decl
) =
5179 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl
), &id
,
5180 args_to_skip
, &vars
);
5182 DECL_INITIAL (new_decl
) = remap_blocks (DECL_INITIAL (id
.src_fn
), &id
);
5183 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl
)) = new_decl
;
5185 declare_inline_vars (DECL_INITIAL (new_decl
), vars
);
5187 if (!VEC_empty (tree
, DECL_STRUCT_FUNCTION (old_decl
)->local_decls
))
5188 /* Add local vars. */
5189 add_local_variables (DECL_STRUCT_FUNCTION (old_decl
), cfun
, &id
, false);
5191 if (DECL_RESULT (old_decl
) != NULL_TREE
)
5194 DECL_RESULT (new_decl
) = remap_decl (DECL_RESULT (old_decl
), &id
);
5195 lang_hooks
.dup_lang_specific_decl (DECL_RESULT (new_decl
));
5196 if (gimple_in_ssa_p (id
.src_cfun
)
5197 && DECL_BY_REFERENCE (DECL_RESULT (old_decl
))
5199 = gimple_default_def (id
.src_cfun
, DECL_RESULT (old_decl
))))
5201 tree new_name
= make_ssa_name (DECL_RESULT (new_decl
), NULL
);
5202 insert_decl_map (&id
, old_name
, new_name
);
5203 SSA_NAME_DEF_STMT (new_name
) = gimple_build_nop ();
5204 set_default_def (DECL_RESULT (new_decl
), new_name
);
5208 /* Copy the Function's body. */
5209 copy_body (&id
, old_entry_block
->count
, REG_BR_PROB_BASE
,
5210 ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, blocks_to_copy
, new_entry
);
5212 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5213 number_blocks (new_decl
);
5215 /* We want to create the BB unconditionally, so that the addition of
5216 debug stmts doesn't affect BB count, which may in the end cause
5217 codegen differences. */
5218 bb
= split_edge (single_succ_edge (ENTRY_BLOCK_PTR
));
5219 while (VEC_length (gimple
, init_stmts
))
5220 insert_init_stmt (&id
, bb
, VEC_pop (gimple
, init_stmts
));
5221 update_clone_info (&id
);
5223 /* Remap the nonlocal_goto_save_area, if any. */
5224 if (cfun
->nonlocal_goto_save_area
)
5226 struct walk_stmt_info wi
;
5228 memset (&wi
, 0, sizeof (wi
));
5230 walk_tree (&cfun
->nonlocal_goto_save_area
, remap_gimple_op_r
, &wi
, NULL
);
5234 pointer_map_destroy (id
.decl_map
);
5236 pointer_map_destroy (id
.debug_map
);
5237 free_dominance_info (CDI_DOMINATORS
);
5238 free_dominance_info (CDI_POST_DOMINATORS
);
5240 fold_marked_statements (0, id
.statements_to_fold
);
5241 pointer_set_destroy (id
.statements_to_fold
);
5242 fold_cond_expr_cond ();
5243 delete_unreachable_blocks_update_callgraph (&id
);
5244 if (id
.dst_node
->analyzed
)
5245 cgraph_rebuild_references ();
5246 update_ssa (TODO_update_ssa
);
5248 /* After partial cloning we need to rescale frequencies, so they are
5249 within proper range in the cloned function. */
5252 struct cgraph_edge
*e
;
5253 rebuild_frequencies ();
5255 new_version_node
->count
= ENTRY_BLOCK_PTR
->count
;
5256 for (e
= new_version_node
->callees
; e
; e
= e
->next_callee
)
5258 basic_block bb
= gimple_bb (e
->call_stmt
);
5259 e
->frequency
= compute_call_stmt_bb_frequency (current_function_decl
,
5261 e
->count
= bb
->count
;
5263 for (e
= new_version_node
->indirect_calls
; e
; e
= e
->next_callee
)
5265 basic_block bb
= gimple_bb (e
->call_stmt
);
5266 e
->frequency
= compute_call_stmt_bb_frequency (current_function_decl
,
5268 e
->count
= bb
->count
;
5272 free_dominance_info (CDI_DOMINATORS
);
5273 free_dominance_info (CDI_POST_DOMINATORS
);
5275 gcc_assert (!id
.debug_stmts
);
5276 VEC_free (gimple
, heap
, init_stmts
);
5278 current_function_decl
= old_current_function_decl
;
5279 gcc_assert (!current_function_decl
5280 || DECL_STRUCT_FUNCTION (current_function_decl
) == cfun
);
5284 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5285 the callee and return the inlined body on success. */
5288 maybe_inline_call_in_expr (tree exp
)
5290 tree fn
= get_callee_fndecl (exp
);
5292 /* We can only try to inline "const" functions. */
5293 if (fn
&& TREE_READONLY (fn
) && DECL_SAVED_TREE (fn
))
5295 struct pointer_map_t
*decl_map
= pointer_map_create ();
5296 call_expr_arg_iterator iter
;
5300 /* Remap the parameters. */
5301 for (param
= DECL_ARGUMENTS (fn
), arg
= first_call_expr_arg (exp
, &iter
);
5303 param
= DECL_CHAIN (param
), arg
= next_call_expr_arg (&iter
))
5304 *pointer_map_insert (decl_map
, param
) = arg
;
5306 memset (&id
, 0, sizeof (id
));
5308 id
.dst_fn
= current_function_decl
;
5309 id
.src_cfun
= DECL_STRUCT_FUNCTION (fn
);
5310 id
.decl_map
= decl_map
;
5312 id
.copy_decl
= copy_decl_no_change
;
5313 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
5314 id
.transform_new_cfg
= false;
5315 id
.transform_return_to_modify
= true;
5316 id
.transform_lang_insert_block
= NULL
;
5318 /* Make sure not to unshare trees behind the front-end's back
5319 since front-end specific mechanisms may rely on sharing. */
5320 id
.regimplify
= false;
5321 id
.do_not_unshare
= true;
5323 /* We're not inside any EH region. */
5326 t
= copy_tree_body (&id
);
5327 pointer_map_destroy (decl_map
);
5329 /* We can only return something suitable for use in a GENERIC
5331 if (TREE_CODE (t
) == MODIFY_EXPR
)
5332 return TREE_OPERAND (t
, 1);
5338 /* Duplicate a type, fields and all. */
5341 build_duplicate_type (tree type
)
5343 struct copy_body_data id
;
5345 memset (&id
, 0, sizeof (id
));
5346 id
.src_fn
= current_function_decl
;
5347 id
.dst_fn
= current_function_decl
;
5349 id
.decl_map
= pointer_map_create ();
5350 id
.debug_map
= NULL
;
5351 id
.copy_decl
= copy_decl_no_change
;
5353 type
= remap_type_1 (type
, &id
);
5355 pointer_map_destroy (id
.decl_map
);
5357 pointer_map_destroy (id
.debug_map
);
5359 TYPE_CANONICAL (type
) = type
;