2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 #include "diagnostic-core.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
33 #include "tree-inline.h"
36 #include "insn-config.h"
37 #include "langhooks.h"
39 #include "hard-reg-set.h"
41 #include "dominance.h"
44 #include "basic-block.h"
45 #include "tree-iterator.h"
47 #include "tree-ssa-alias.h"
48 #include "internal-fn.h"
49 #include "gimple-fold.h"
51 #include "gimple-expr.h"
55 #include "gimple-iterator.h"
56 #include "gimplify-me.h"
57 #include "gimple-walk.h"
58 #include "gimple-ssa.h"
60 #include "tree-phinodes.h"
61 #include "ssa-iterators.h"
62 #include "stringpool.h"
63 #include "tree-ssanames.h"
64 #include "tree-into-ssa.h"
75 #include "tree-pretty-print.h"
78 #include "plugin-api.h"
81 #include "alloc-pool.h"
82 #include "symbol-summary.h"
84 #include "value-prof.h"
85 #include "tree-pass.h"
89 #include "tree-chkp.h"
91 #include "rtl.h" /* FIXME: For asm_str_count. */
93 /* I'm not real happy about this, but we need to handle gimple and
96 /* Inlining, Cloning, Versioning, Parallelization
98 Inlining: a function body is duplicated, but the PARM_DECLs are
99 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
100 MODIFY_EXPRs that store to a dedicated returned-value variable.
101 The duplicated eh_region info of the copy will later be appended
102 to the info for the caller; the eh_region info in copied throwing
103 statements and RESX statements are adjusted accordingly.
105 Cloning: (only in C++) We have one body for a con/de/structor, and
106 multiple function decls, each with a unique parameter list.
107 Duplicate the body, using the given splay tree; some parameters
108 will become constants (like 0 or 1).
110 Versioning: a function body is duplicated and the result is a new
111 function rather than into blocks of an existing function as with
112 inlining. Some parameters will become constants.
114 Parallelization: a region of a function is duplicated resulting in
115 a new function. Variables may be replaced with complex expressions
116 to enable shared variable semantics.
118 All of these will simultaneously lookup any callgraph edges. If
119 we're going to inline the duplicated function body, and the given
120 function has some cloned callgraph nodes (one for each place this
121 function will be inlined) those callgraph edges will be duplicated.
122 If we're cloning the body, those callgraph edges will be
123 updated to point into the new body. (Note that the original
124 callgraph node and edge list will not be altered.)
126 See the CALL_EXPR handling case in copy_tree_body_r (). */
130 o In order to make inlining-on-trees work, we pessimized
131 function-local static constants. In particular, they are now
132 always output, even when not addressed. Fix this by treating
133 function-local static constants just like global static
134 constants; the back-end already knows not to output them if they
137 o Provide heuristics to clamp inlining of recursive template
141 /* Weights that estimate_num_insns uses to estimate the size of the
144 eni_weights eni_size_weights
;
146 /* Weights that estimate_num_insns uses to estimate the time necessary
147 to execute the produced code. */
149 eni_weights eni_time_weights
;
153 static tree
declare_return_variable (copy_body_data
*, tree
, tree
, tree
,
155 static void remap_block (tree
*, copy_body_data
*);
156 static void copy_bind_expr (tree
*, int *, copy_body_data
*);
157 static void declare_inline_vars (tree
, tree
);
158 static void remap_save_expr (tree
*, hash_map
<tree
, tree
> *, int *);
159 static void prepend_lexical_block (tree current_block
, tree new_block
);
160 static tree
copy_decl_to_var (tree
, copy_body_data
*);
161 static tree
copy_result_decl_to_var (tree
, copy_body_data
*);
162 static tree
copy_decl_maybe_to_var (tree
, copy_body_data
*);
163 static gimple_seq
remap_gimple_stmt (gimple
, copy_body_data
*);
164 static bool delete_unreachable_blocks_update_callgraph (copy_body_data
*id
);
165 static void insert_init_stmt (copy_body_data
*, basic_block
, gimple
);
167 /* Insert a tree->tree mapping for ID. Despite the name suggests
168 that the trees should be variables, it is used for more than that. */
171 insert_decl_map (copy_body_data
*id
, tree key
, tree value
)
173 id
->decl_map
->put (key
, value
);
175 /* Always insert an identity map as well. If we see this same new
176 node again, we won't want to duplicate it a second time. */
178 id
->decl_map
->put (value
, value
);
181 /* Insert a tree->tree mapping for ID. This is only used for
185 insert_debug_decl_map (copy_body_data
*id
, tree key
, tree value
)
187 if (!gimple_in_ssa_p (id
->src_cfun
))
190 if (!opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
))
193 if (!target_for_debug_bind (key
))
196 gcc_assert (TREE_CODE (key
) == PARM_DECL
);
197 gcc_assert (TREE_CODE (value
) == VAR_DECL
);
200 id
->debug_map
= new hash_map
<tree
, tree
>;
202 id
->debug_map
->put (key
, value
);
205 /* If nonzero, we're remapping the contents of inlined debug
206 statements. If negative, an error has occurred, such as a
207 reference to a variable that isn't available in the inlined
209 static int processing_debug_stmt
= 0;
211 /* Construct new SSA name for old NAME. ID is the inline context. */
214 remap_ssa_name (tree name
, copy_body_data
*id
)
219 gcc_assert (TREE_CODE (name
) == SSA_NAME
);
221 n
= id
->decl_map
->get (name
);
223 return unshare_expr (*n
);
225 if (processing_debug_stmt
)
227 if (SSA_NAME_IS_DEFAULT_DEF (name
)
228 && TREE_CODE (SSA_NAME_VAR (name
)) == PARM_DECL
229 && id
->entry_bb
== NULL
230 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun
)))
232 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
234 gimple_stmt_iterator gsi
;
235 tree val
= SSA_NAME_VAR (name
);
237 n
= id
->decl_map
->get (val
);
240 if (TREE_CODE (val
) != PARM_DECL
)
242 processing_debug_stmt
= -1;
245 def_temp
= gimple_build_debug_source_bind (vexpr
, val
, NULL
);
246 DECL_ARTIFICIAL (vexpr
) = 1;
247 TREE_TYPE (vexpr
) = TREE_TYPE (name
);
248 DECL_MODE (vexpr
) = DECL_MODE (SSA_NAME_VAR (name
));
249 gsi
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
250 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
254 processing_debug_stmt
= -1;
258 /* Remap anonymous SSA names or SSA names of anonymous decls. */
259 var
= SSA_NAME_VAR (name
);
261 || (!SSA_NAME_IS_DEFAULT_DEF (name
)
262 && TREE_CODE (var
) == VAR_DECL
263 && !VAR_DECL_IS_VIRTUAL_OPERAND (var
)
264 && DECL_ARTIFICIAL (var
)
265 && DECL_IGNORED_P (var
)
266 && !DECL_NAME (var
)))
268 struct ptr_info_def
*pi
;
269 new_tree
= make_ssa_name (remap_type (TREE_TYPE (name
), id
));
270 if (!var
&& SSA_NAME_IDENTIFIER (name
))
271 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree
, SSA_NAME_IDENTIFIER (name
));
272 insert_decl_map (id
, name
, new_tree
);
273 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree
)
274 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
);
275 /* At least IPA points-to info can be directly transferred. */
276 if (id
->src_cfun
->gimple_df
277 && id
->src_cfun
->gimple_df
->ipa_pta
278 && (pi
= SSA_NAME_PTR_INFO (name
))
281 struct ptr_info_def
*new_pi
= get_ptr_info (new_tree
);
287 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
289 new_tree
= remap_decl (var
, id
);
291 /* We might've substituted constant or another SSA_NAME for
294 Replace the SSA name representing RESULT_DECL by variable during
295 inlining: this saves us from need to introduce PHI node in a case
296 return value is just partly initialized. */
297 if ((TREE_CODE (new_tree
) == VAR_DECL
|| TREE_CODE (new_tree
) == PARM_DECL
)
298 && (!SSA_NAME_VAR (name
)
299 || TREE_CODE (SSA_NAME_VAR (name
)) != RESULT_DECL
300 || !id
->transform_return_to_modify
))
302 struct ptr_info_def
*pi
;
303 new_tree
= make_ssa_name (new_tree
);
304 insert_decl_map (id
, name
, new_tree
);
305 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree
)
306 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
);
307 /* At least IPA points-to info can be directly transferred. */
308 if (id
->src_cfun
->gimple_df
309 && id
->src_cfun
->gimple_df
->ipa_pta
310 && (pi
= SSA_NAME_PTR_INFO (name
))
313 struct ptr_info_def
*new_pi
= get_ptr_info (new_tree
);
316 if (SSA_NAME_IS_DEFAULT_DEF (name
))
318 /* By inlining function having uninitialized variable, we might
319 extend the lifetime (variable might get reused). This cause
320 ICE in the case we end up extending lifetime of SSA name across
321 abnormal edge, but also increase register pressure.
323 We simply initialize all uninitialized vars by 0 except
324 for case we are inlining to very first BB. We can avoid
325 this for all BBs that are not inside strongly connected
326 regions of the CFG, but this is expensive to test. */
328 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
)
329 && (!SSA_NAME_VAR (name
)
330 || TREE_CODE (SSA_NAME_VAR (name
)) != PARM_DECL
)
331 && (id
->entry_bb
!= EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
333 || EDGE_COUNT (id
->entry_bb
->preds
) != 1))
335 gimple_stmt_iterator gsi
= gsi_last_bb (id
->entry_bb
);
337 tree zero
= build_zero_cst (TREE_TYPE (new_tree
));
339 init_stmt
= gimple_build_assign (new_tree
, zero
);
340 gsi_insert_after (&gsi
, init_stmt
, GSI_NEW_STMT
);
341 SSA_NAME_IS_DEFAULT_DEF (new_tree
) = 0;
345 SSA_NAME_DEF_STMT (new_tree
) = gimple_build_nop ();
346 set_ssa_default_def (cfun
, SSA_NAME_VAR (new_tree
), new_tree
);
351 insert_decl_map (id
, name
, new_tree
);
355 /* Remap DECL during the copying of the BLOCK tree for the function. */
358 remap_decl (tree decl
, copy_body_data
*id
)
362 /* We only remap local variables in the current function. */
364 /* See if we have remapped this declaration. */
366 n
= id
->decl_map
->get (decl
);
368 if (!n
&& processing_debug_stmt
)
370 processing_debug_stmt
= -1;
374 /* If we didn't already have an equivalent for this declaration,
378 /* Make a copy of the variable or label. */
379 tree t
= id
->copy_decl (decl
, id
);
381 /* Remember it, so that if we encounter this local entity again
382 we can reuse this copy. Do this early because remap_type may
383 need this decl for TYPE_STUB_DECL. */
384 insert_decl_map (id
, decl
, t
);
389 /* Remap types, if necessary. */
390 TREE_TYPE (t
) = remap_type (TREE_TYPE (t
), id
);
391 if (TREE_CODE (t
) == TYPE_DECL
)
392 DECL_ORIGINAL_TYPE (t
) = remap_type (DECL_ORIGINAL_TYPE (t
), id
);
394 /* Remap sizes as necessary. */
395 walk_tree (&DECL_SIZE (t
), copy_tree_body_r
, id
, NULL
);
396 walk_tree (&DECL_SIZE_UNIT (t
), copy_tree_body_r
, id
, NULL
);
398 /* If fields, do likewise for offset and qualifier. */
399 if (TREE_CODE (t
) == FIELD_DECL
)
401 walk_tree (&DECL_FIELD_OFFSET (t
), copy_tree_body_r
, id
, NULL
);
402 if (TREE_CODE (DECL_CONTEXT (t
)) == QUAL_UNION_TYPE
)
403 walk_tree (&DECL_QUALIFIER (t
), copy_tree_body_r
, id
, NULL
);
409 if (id
->do_not_unshare
)
412 return unshare_expr (*n
);
416 remap_type_1 (tree type
, copy_body_data
*id
)
420 /* We do need a copy. build and register it now. If this is a pointer or
421 reference type, remap the designated type and make a new pointer or
423 if (TREE_CODE (type
) == POINTER_TYPE
)
425 new_tree
= build_pointer_type_for_mode (remap_type (TREE_TYPE (type
), id
),
427 TYPE_REF_CAN_ALIAS_ALL (type
));
428 if (TYPE_ATTRIBUTES (type
) || TYPE_QUALS (type
))
429 new_tree
= build_type_attribute_qual_variant (new_tree
,
430 TYPE_ATTRIBUTES (type
),
432 insert_decl_map (id
, type
, new_tree
);
435 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
437 new_tree
= build_reference_type_for_mode (remap_type (TREE_TYPE (type
), id
),
439 TYPE_REF_CAN_ALIAS_ALL (type
));
440 if (TYPE_ATTRIBUTES (type
) || TYPE_QUALS (type
))
441 new_tree
= build_type_attribute_qual_variant (new_tree
,
442 TYPE_ATTRIBUTES (type
),
444 insert_decl_map (id
, type
, new_tree
);
448 new_tree
= copy_node (type
);
450 insert_decl_map (id
, type
, new_tree
);
452 /* This is a new type, not a copy of an old type. Need to reassociate
453 variants. We can handle everything except the main variant lazily. */
454 t
= TYPE_MAIN_VARIANT (type
);
457 t
= remap_type (t
, id
);
458 TYPE_MAIN_VARIANT (new_tree
) = t
;
459 TYPE_NEXT_VARIANT (new_tree
) = TYPE_NEXT_VARIANT (t
);
460 TYPE_NEXT_VARIANT (t
) = new_tree
;
464 TYPE_MAIN_VARIANT (new_tree
) = new_tree
;
465 TYPE_NEXT_VARIANT (new_tree
) = NULL
;
468 if (TYPE_STUB_DECL (type
))
469 TYPE_STUB_DECL (new_tree
) = remap_decl (TYPE_STUB_DECL (type
), id
);
471 /* Lazily create pointer and reference types. */
472 TYPE_POINTER_TO (new_tree
) = NULL
;
473 TYPE_REFERENCE_TO (new_tree
) = NULL
;
475 /* Copy all types that may contain references to local variables; be sure to
476 preserve sharing in between type and its main variant when possible. */
477 switch (TREE_CODE (new_tree
))
481 case FIXED_POINT_TYPE
:
484 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
)
486 gcc_checking_assert (TYPE_MIN_VALUE (type
) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type
)));
487 gcc_checking_assert (TYPE_MAX_VALUE (type
) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type
)));
489 TYPE_MIN_VALUE (new_tree
) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree
));
490 TYPE_MAX_VALUE (new_tree
) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree
));
494 t
= TYPE_MIN_VALUE (new_tree
);
495 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
496 walk_tree (&TYPE_MIN_VALUE (new_tree
), copy_tree_body_r
, id
, NULL
);
498 t
= TYPE_MAX_VALUE (new_tree
);
499 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
500 walk_tree (&TYPE_MAX_VALUE (new_tree
), copy_tree_body_r
, id
, NULL
);
505 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
506 && TREE_TYPE (type
) == TREE_TYPE (TYPE_MAIN_VARIANT (type
)))
507 TREE_TYPE (new_tree
) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree
));
509 TREE_TYPE (new_tree
) = remap_type (TREE_TYPE (new_tree
), id
);
510 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
511 && TYPE_ARG_TYPES (type
) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type
)))
512 TYPE_ARG_TYPES (new_tree
) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree
));
514 walk_tree (&TYPE_ARG_TYPES (new_tree
), copy_tree_body_r
, id
, NULL
);
518 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
519 && TREE_TYPE (type
) == TREE_TYPE (TYPE_MAIN_VARIANT (type
)))
520 TREE_TYPE (new_tree
) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree
));
522 TREE_TYPE (new_tree
) = remap_type (TREE_TYPE (new_tree
), id
);
524 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
)
526 gcc_checking_assert (TYPE_DOMAIN (type
) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type
)));
527 TYPE_DOMAIN (new_tree
) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree
));
530 TYPE_DOMAIN (new_tree
) = remap_type (TYPE_DOMAIN (new_tree
), id
);
535 case QUAL_UNION_TYPE
:
536 if (TYPE_MAIN_VARIANT (type
) != type
537 && TYPE_FIELDS (type
) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type
)))
538 TYPE_FIELDS (new_tree
) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree
));
543 for (f
= TYPE_FIELDS (new_tree
); f
; f
= DECL_CHAIN (f
))
545 t
= remap_decl (f
, id
);
546 DECL_CONTEXT (t
) = new_tree
;
550 TYPE_FIELDS (new_tree
) = nreverse (nf
);
556 /* Shouldn't have been thought variable sized. */
560 /* All variants of type share the same size, so use the already remaped data. */
561 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
)
563 gcc_checking_assert (TYPE_SIZE (type
) == TYPE_SIZE (TYPE_MAIN_VARIANT (type
)));
564 gcc_checking_assert (TYPE_SIZE_UNIT (type
) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type
)));
566 TYPE_SIZE (new_tree
) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree
));
567 TYPE_SIZE_UNIT (new_tree
) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree
));
571 walk_tree (&TYPE_SIZE (new_tree
), copy_tree_body_r
, id
, NULL
);
572 walk_tree (&TYPE_SIZE_UNIT (new_tree
), copy_tree_body_r
, id
, NULL
);
579 remap_type (tree type
, copy_body_data
*id
)
587 /* See if we have remapped this type. */
588 node
= id
->decl_map
->get (type
);
592 /* The type only needs remapping if it's variably modified. */
593 if (! variably_modified_type_p (type
, id
->src_fn
))
595 insert_decl_map (id
, type
, type
);
599 id
->remapping_type_depth
++;
600 tmp
= remap_type_1 (type
, id
);
601 id
->remapping_type_depth
--;
606 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
609 can_be_nonlocal (tree decl
, copy_body_data
*id
)
611 /* We can not duplicate function decls. */
612 if (TREE_CODE (decl
) == FUNCTION_DECL
)
615 /* Local static vars must be non-local or we get multiple declaration
617 if (TREE_CODE (decl
) == VAR_DECL
618 && !auto_var_in_fn_p (decl
, id
->src_fn
))
625 remap_decls (tree decls
, vec
<tree
, va_gc
> **nonlocalized_list
,
629 tree new_decls
= NULL_TREE
;
631 /* Remap its variables. */
632 for (old_var
= decls
; old_var
; old_var
= DECL_CHAIN (old_var
))
636 if (can_be_nonlocal (old_var
, id
))
638 /* We need to add this variable to the local decls as otherwise
639 nothing else will do so. */
640 if (TREE_CODE (old_var
) == VAR_DECL
641 && ! DECL_EXTERNAL (old_var
))
642 add_local_decl (cfun
, old_var
);
643 if ((!optimize
|| debug_info_level
> DINFO_LEVEL_TERSE
)
644 && !DECL_IGNORED_P (old_var
)
645 && nonlocalized_list
)
646 vec_safe_push (*nonlocalized_list
, old_var
);
650 /* Remap the variable. */
651 new_var
= remap_decl (old_var
, id
);
653 /* If we didn't remap this variable, we can't mess with its
654 TREE_CHAIN. If we remapped this variable to the return slot, it's
655 already declared somewhere else, so don't declare it here. */
657 if (new_var
== id
->retvar
)
661 if ((!optimize
|| debug_info_level
> DINFO_LEVEL_TERSE
)
662 && !DECL_IGNORED_P (old_var
)
663 && nonlocalized_list
)
664 vec_safe_push (*nonlocalized_list
, old_var
);
668 gcc_assert (DECL_P (new_var
));
669 DECL_CHAIN (new_var
) = new_decls
;
672 /* Also copy value-expressions. */
673 if (TREE_CODE (new_var
) == VAR_DECL
674 && DECL_HAS_VALUE_EXPR_P (new_var
))
676 tree tem
= DECL_VALUE_EXPR (new_var
);
677 bool old_regimplify
= id
->regimplify
;
678 id
->remapping_type_depth
++;
679 walk_tree (&tem
, copy_tree_body_r
, id
, NULL
);
680 id
->remapping_type_depth
--;
681 id
->regimplify
= old_regimplify
;
682 SET_DECL_VALUE_EXPR (new_var
, tem
);
687 return nreverse (new_decls
);
690 /* Copy the BLOCK to contain remapped versions of the variables
691 therein. And hook the new block into the block-tree. */
694 remap_block (tree
*block
, copy_body_data
*id
)
699 /* Make the new block. */
701 new_block
= make_node (BLOCK
);
702 TREE_USED (new_block
) = TREE_USED (old_block
);
703 BLOCK_ABSTRACT_ORIGIN (new_block
) = old_block
;
704 BLOCK_SOURCE_LOCATION (new_block
) = BLOCK_SOURCE_LOCATION (old_block
);
705 BLOCK_NONLOCALIZED_VARS (new_block
)
706 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block
));
709 /* Remap its variables. */
710 BLOCK_VARS (new_block
) = remap_decls (BLOCK_VARS (old_block
),
711 &BLOCK_NONLOCALIZED_VARS (new_block
),
714 if (id
->transform_lang_insert_block
)
715 id
->transform_lang_insert_block (new_block
);
717 /* Remember the remapped block. */
718 insert_decl_map (id
, old_block
, new_block
);
721 /* Copy the whole block tree and root it in id->block. */
723 remap_blocks (tree block
, copy_body_data
*id
)
726 tree new_tree
= block
;
731 remap_block (&new_tree
, id
);
732 gcc_assert (new_tree
!= block
);
733 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
734 prepend_lexical_block (new_tree
, remap_blocks (t
, id
));
735 /* Blocks are in arbitrary order, but make things slightly prettier and do
736 not swap order when producing a copy. */
737 BLOCK_SUBBLOCKS (new_tree
) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree
));
741 /* Remap the block tree rooted at BLOCK to nothing. */
743 remap_blocks_to_null (tree block
, copy_body_data
*id
)
746 insert_decl_map (id
, block
, NULL_TREE
);
747 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
748 remap_blocks_to_null (t
, id
);
752 copy_statement_list (tree
*tp
)
754 tree_stmt_iterator oi
, ni
;
757 new_tree
= alloc_stmt_list ();
758 ni
= tsi_start (new_tree
);
759 oi
= tsi_start (*tp
);
760 TREE_TYPE (new_tree
) = TREE_TYPE (*tp
);
763 for (; !tsi_end_p (oi
); tsi_next (&oi
))
765 tree stmt
= tsi_stmt (oi
);
766 if (TREE_CODE (stmt
) == STATEMENT_LIST
)
767 /* This copy is not redundant; tsi_link_after will smash this
768 STATEMENT_LIST into the end of the one we're building, and we
769 don't want to do that with the original. */
770 copy_statement_list (&stmt
);
771 tsi_link_after (&ni
, stmt
, TSI_CONTINUE_LINKING
);
776 copy_bind_expr (tree
*tp
, int *walk_subtrees
, copy_body_data
*id
)
778 tree block
= BIND_EXPR_BLOCK (*tp
);
779 /* Copy (and replace) the statement. */
780 copy_tree_r (tp
, walk_subtrees
, NULL
);
783 remap_block (&block
, id
);
784 BIND_EXPR_BLOCK (*tp
) = block
;
787 if (BIND_EXPR_VARS (*tp
))
788 /* This will remap a lot of the same decls again, but this should be
790 BIND_EXPR_VARS (*tp
) = remap_decls (BIND_EXPR_VARS (*tp
), NULL
, id
);
794 /* Create a new gimple_seq by remapping all the statements in BODY
795 using the inlining information in ID. */
798 remap_gimple_seq (gimple_seq body
, copy_body_data
*id
)
800 gimple_stmt_iterator si
;
801 gimple_seq new_body
= NULL
;
803 for (si
= gsi_start (body
); !gsi_end_p (si
); gsi_next (&si
))
805 gimple_seq new_stmts
= remap_gimple_stmt (gsi_stmt (si
), id
);
806 gimple_seq_add_seq (&new_body
, new_stmts
);
813 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
814 block using the mapping information in ID. */
817 copy_gimple_bind (gbind
*stmt
, copy_body_data
*id
)
820 tree new_block
, new_vars
;
821 gimple_seq body
, new_body
;
823 /* Copy the statement. Note that we purposely don't use copy_stmt
824 here because we need to remap statements as we copy. */
825 body
= gimple_bind_body (stmt
);
826 new_body
= remap_gimple_seq (body
, id
);
828 new_block
= gimple_bind_block (stmt
);
830 remap_block (&new_block
, id
);
832 /* This will remap a lot of the same decls again, but this should be
834 new_vars
= gimple_bind_vars (stmt
);
836 new_vars
= remap_decls (new_vars
, NULL
, id
);
838 new_bind
= gimple_build_bind (new_vars
, new_body
, new_block
);
843 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
848 if (TREE_CODE (decl
) == SSA_NAME
)
850 decl
= SSA_NAME_VAR (decl
);
855 return (TREE_CODE (decl
) == PARM_DECL
);
858 /* Remap the dependence CLIQUE from the source to the destination function
859 as specified in ID. */
861 static unsigned short
862 remap_dependence_clique (copy_body_data
*id
, unsigned short clique
)
866 if (!id
->dependence_map
)
868 = new hash_map
<unsigned short, unsigned short, dependence_hasher
>;
870 unsigned short &newc
= id
->dependence_map
->get_or_insert (clique
, &existed
);
872 newc
= ++cfun
->last_clique
;
876 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
877 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
878 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
879 recursing into the children nodes of *TP. */
882 remap_gimple_op_r (tree
*tp
, int *walk_subtrees
, void *data
)
884 struct walk_stmt_info
*wi_p
= (struct walk_stmt_info
*) data
;
885 copy_body_data
*id
= (copy_body_data
*) wi_p
->info
;
886 tree fn
= id
->src_fn
;
888 if (TREE_CODE (*tp
) == SSA_NAME
)
890 *tp
= remap_ssa_name (*tp
, id
);
894 else if (auto_var_in_fn_p (*tp
, fn
))
896 /* Local variables and labels need to be replaced by equivalent
897 variables. We don't want to copy static variables; there's
898 only one of those, no matter how many times we inline the
899 containing function. Similarly for globals from an outer
903 /* Remap the declaration. */
904 new_decl
= remap_decl (*tp
, id
);
905 gcc_assert (new_decl
);
906 /* Replace this variable with the copy. */
907 STRIP_TYPE_NOPS (new_decl
);
908 /* ??? The C++ frontend uses void * pointer zero to initialize
909 any other type. This confuses the middle-end type verification.
910 As cloned bodies do not go through gimplification again the fixup
911 there doesn't trigger. */
912 if (TREE_CODE (new_decl
) == INTEGER_CST
913 && !useless_type_conversion_p (TREE_TYPE (*tp
), TREE_TYPE (new_decl
)))
914 new_decl
= fold_convert (TREE_TYPE (*tp
), new_decl
);
918 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
920 else if (TREE_CODE (*tp
) == SAVE_EXPR
)
922 else if (TREE_CODE (*tp
) == LABEL_DECL
923 && (!DECL_CONTEXT (*tp
)
924 || decl_function_context (*tp
) == id
->src_fn
))
925 /* These may need to be remapped for EH handling. */
926 *tp
= remap_decl (*tp
, id
);
927 else if (TREE_CODE (*tp
) == FIELD_DECL
)
929 /* If the enclosing record type is variably_modified_type_p, the field
930 has already been remapped. Otherwise, it need not be. */
931 tree
*n
= id
->decl_map
->get (*tp
);
936 else if (TYPE_P (*tp
))
937 /* Types may need remapping as well. */
938 *tp
= remap_type (*tp
, id
);
939 else if (CONSTANT_CLASS_P (*tp
))
941 /* If this is a constant, we have to copy the node iff the type
942 will be remapped. copy_tree_r will not copy a constant. */
943 tree new_type
= remap_type (TREE_TYPE (*tp
), id
);
945 if (new_type
== TREE_TYPE (*tp
))
948 else if (TREE_CODE (*tp
) == INTEGER_CST
)
949 *tp
= wide_int_to_tree (new_type
, *tp
);
952 *tp
= copy_node (*tp
);
953 TREE_TYPE (*tp
) = new_type
;
958 /* Otherwise, just copy the node. Note that copy_tree_r already
959 knows not to copy VAR_DECLs, etc., so this is safe. */
961 if (TREE_CODE (*tp
) == MEM_REF
)
963 /* We need to re-canonicalize MEM_REFs from inline substitutions
964 that can happen when a pointer argument is an ADDR_EXPR.
965 Recurse here manually to allow that. */
966 tree ptr
= TREE_OPERAND (*tp
, 0);
967 tree type
= remap_type (TREE_TYPE (*tp
), id
);
969 walk_tree (&ptr
, remap_gimple_op_r
, data
, NULL
);
970 *tp
= fold_build2 (MEM_REF
, type
, ptr
, TREE_OPERAND (*tp
, 1));
971 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
972 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
973 TREE_NO_WARNING (*tp
) = TREE_NO_WARNING (old
);
974 if (MR_DEPENDENCE_CLIQUE (old
) != 0)
976 MR_DEPENDENCE_CLIQUE (*tp
)
977 = remap_dependence_clique (id
, MR_DEPENDENCE_CLIQUE (old
));
978 MR_DEPENDENCE_BASE (*tp
) = MR_DEPENDENCE_BASE (old
);
980 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
981 remapped a parameter as the property might be valid only
982 for the parameter itself. */
983 if (TREE_THIS_NOTRAP (old
)
984 && (!is_parm (TREE_OPERAND (old
, 0))
985 || (!id
->transform_parameter
&& is_parm (ptr
))))
986 TREE_THIS_NOTRAP (*tp
) = 1;
991 /* Here is the "usual case". Copy this tree node, and then
992 tweak some special cases. */
993 copy_tree_r (tp
, walk_subtrees
, NULL
);
995 if (TREE_CODE (*tp
) != OMP_CLAUSE
)
996 TREE_TYPE (*tp
) = remap_type (TREE_TYPE (*tp
), id
);
998 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
1000 /* The copied TARGET_EXPR has never been expanded, even if the
1001 original node was expanded already. */
1002 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
1003 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
1005 else if (TREE_CODE (*tp
) == ADDR_EXPR
)
1007 /* Variable substitution need not be simple. In particular,
1008 the MEM_REF substitution above. Make sure that
1009 TREE_CONSTANT and friends are up-to-date. */
1010 int invariant
= is_gimple_min_invariant (*tp
);
1011 walk_tree (&TREE_OPERAND (*tp
, 0), remap_gimple_op_r
, data
, NULL
);
1012 recompute_tree_invariant_for_addr_expr (*tp
);
1014 /* If this used to be invariant, but is not any longer,
1015 then regimplification is probably needed. */
1016 if (invariant
&& !is_gimple_min_invariant (*tp
))
1017 id
->regimplify
= true;
1023 /* Update the TREE_BLOCK for the cloned expr. */
1026 tree new_block
= id
->remapping_type_depth
== 0 ? id
->block
: NULL
;
1027 tree old_block
= TREE_BLOCK (*tp
);
1031 n
= id
->decl_map
->get (TREE_BLOCK (*tp
));
1035 TREE_SET_BLOCK (*tp
, new_block
);
1038 /* Keep iterating. */
1043 /* Called from copy_body_id via walk_tree. DATA is really a
1044 `copy_body_data *'. */
1047 copy_tree_body_r (tree
*tp
, int *walk_subtrees
, void *data
)
1049 copy_body_data
*id
= (copy_body_data
*) data
;
1050 tree fn
= id
->src_fn
;
1053 /* Begin by recognizing trees that we'll completely rewrite for the
1054 inlining context. Our output for these trees is completely
1055 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1056 into an edge). Further down, we'll handle trees that get
1057 duplicated and/or tweaked. */
1059 /* When requested, RETURN_EXPRs should be transformed to just the
1060 contained MODIFY_EXPR. The branch semantics of the return will
1061 be handled elsewhere by manipulating the CFG rather than a statement. */
1062 if (TREE_CODE (*tp
) == RETURN_EXPR
&& id
->transform_return_to_modify
)
1064 tree assignment
= TREE_OPERAND (*tp
, 0);
1066 /* If we're returning something, just turn that into an
1067 assignment into the equivalent of the original RESULT_DECL.
1068 If the "assignment" is just the result decl, the result
1069 decl has already been set (e.g. a recent "foo (&result_decl,
1070 ...)"); just toss the entire RETURN_EXPR. */
1071 if (assignment
&& TREE_CODE (assignment
) == MODIFY_EXPR
)
1073 /* Replace the RETURN_EXPR with (a copy of) the
1074 MODIFY_EXPR hanging underneath. */
1075 *tp
= copy_node (assignment
);
1077 else /* Else the RETURN_EXPR returns no value. */
1080 return (tree
) (void *)1;
1083 else if (TREE_CODE (*tp
) == SSA_NAME
)
1085 *tp
= remap_ssa_name (*tp
, id
);
1090 /* Local variables and labels need to be replaced by equivalent
1091 variables. We don't want to copy static variables; there's only
1092 one of those, no matter how many times we inline the containing
1093 function. Similarly for globals from an outer function. */
1094 else if (auto_var_in_fn_p (*tp
, fn
))
1098 /* Remap the declaration. */
1099 new_decl
= remap_decl (*tp
, id
);
1100 gcc_assert (new_decl
);
1101 /* Replace this variable with the copy. */
1102 STRIP_TYPE_NOPS (new_decl
);
1106 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
1107 copy_statement_list (tp
);
1108 else if (TREE_CODE (*tp
) == SAVE_EXPR
1109 || TREE_CODE (*tp
) == TARGET_EXPR
)
1110 remap_save_expr (tp
, id
->decl_map
, walk_subtrees
);
1111 else if (TREE_CODE (*tp
) == LABEL_DECL
1112 && (! DECL_CONTEXT (*tp
)
1113 || decl_function_context (*tp
) == id
->src_fn
))
1114 /* These may need to be remapped for EH handling. */
1115 *tp
= remap_decl (*tp
, id
);
1116 else if (TREE_CODE (*tp
) == BIND_EXPR
)
1117 copy_bind_expr (tp
, walk_subtrees
, id
);
1118 /* Types may need remapping as well. */
1119 else if (TYPE_P (*tp
))
1120 *tp
= remap_type (*tp
, id
);
1122 /* If this is a constant, we have to copy the node iff the type will be
1123 remapped. copy_tree_r will not copy a constant. */
1124 else if (CONSTANT_CLASS_P (*tp
))
1126 tree new_type
= remap_type (TREE_TYPE (*tp
), id
);
1128 if (new_type
== TREE_TYPE (*tp
))
1131 else if (TREE_CODE (*tp
) == INTEGER_CST
)
1132 *tp
= wide_int_to_tree (new_type
, *tp
);
1135 *tp
= copy_node (*tp
);
1136 TREE_TYPE (*tp
) = new_type
;
1140 /* Otherwise, just copy the node. Note that copy_tree_r already
1141 knows not to copy VAR_DECLs, etc., so this is safe. */
1144 /* Here we handle trees that are not completely rewritten.
1145 First we detect some inlining-induced bogosities for
1147 if (TREE_CODE (*tp
) == MODIFY_EXPR
1148 && TREE_OPERAND (*tp
, 0) == TREE_OPERAND (*tp
, 1)
1149 && (auto_var_in_fn_p (TREE_OPERAND (*tp
, 0), fn
)))
1151 /* Some assignments VAR = VAR; don't generate any rtl code
1152 and thus don't count as variable modification. Avoid
1153 keeping bogosities like 0 = 0. */
1154 tree decl
= TREE_OPERAND (*tp
, 0), value
;
1157 n
= id
->decl_map
->get (decl
);
1161 STRIP_TYPE_NOPS (value
);
1162 if (TREE_CONSTANT (value
) || TREE_READONLY (value
))
1164 *tp
= build_empty_stmt (EXPR_LOCATION (*tp
));
1165 return copy_tree_body_r (tp
, walk_subtrees
, data
);
1169 else if (TREE_CODE (*tp
) == INDIRECT_REF
)
1171 /* Get rid of *& from inline substitutions that can happen when a
1172 pointer argument is an ADDR_EXPR. */
1173 tree decl
= TREE_OPERAND (*tp
, 0);
1174 tree
*n
= id
->decl_map
->get (decl
);
1177 /* If we happen to get an ADDR_EXPR in n->value, strip
1178 it manually here as we'll eventually get ADDR_EXPRs
1179 which lie about their types pointed to. In this case
1180 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1181 but we absolutely rely on that. As fold_indirect_ref
1182 does other useful transformations, try that first, though. */
1183 tree type
= TREE_TYPE (*tp
);
1184 tree ptr
= id
->do_not_unshare
? *n
: unshare_expr (*n
);
1186 *tp
= gimple_fold_indirect_ref (ptr
);
1189 if (TREE_CODE (ptr
) == ADDR_EXPR
)
1192 = fold_indirect_ref_1 (EXPR_LOCATION (ptr
), type
, ptr
);
1193 /* ??? We should either assert here or build
1194 a VIEW_CONVERT_EXPR instead of blindly leaking
1195 incompatible types to our IL. */
1197 *tp
= TREE_OPERAND (ptr
, 0);
1201 *tp
= build1 (INDIRECT_REF
, type
, ptr
);
1202 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1203 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1204 TREE_READONLY (*tp
) = TREE_READONLY (old
);
1205 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1206 have remapped a parameter as the property might be
1207 valid only for the parameter itself. */
1208 if (TREE_THIS_NOTRAP (old
)
1209 && (!is_parm (TREE_OPERAND (old
, 0))
1210 || (!id
->transform_parameter
&& is_parm (ptr
))))
1211 TREE_THIS_NOTRAP (*tp
) = 1;
1218 else if (TREE_CODE (*tp
) == MEM_REF
)
1220 /* We need to re-canonicalize MEM_REFs from inline substitutions
1221 that can happen when a pointer argument is an ADDR_EXPR.
1222 Recurse here manually to allow that. */
1223 tree ptr
= TREE_OPERAND (*tp
, 0);
1224 tree type
= remap_type (TREE_TYPE (*tp
), id
);
1226 walk_tree (&ptr
, copy_tree_body_r
, data
, NULL
);
1227 *tp
= fold_build2 (MEM_REF
, type
, ptr
, TREE_OPERAND (*tp
, 1));
1228 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1229 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1230 TREE_NO_WARNING (*tp
) = TREE_NO_WARNING (old
);
1231 if (MR_DEPENDENCE_CLIQUE (old
) != 0)
1233 MR_DEPENDENCE_CLIQUE (*tp
)
1234 = remap_dependence_clique (id
, MR_DEPENDENCE_CLIQUE (old
));
1235 MR_DEPENDENCE_BASE (*tp
) = MR_DEPENDENCE_BASE (old
);
1237 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1238 remapped a parameter as the property might be valid only
1239 for the parameter itself. */
1240 if (TREE_THIS_NOTRAP (old
)
1241 && (!is_parm (TREE_OPERAND (old
, 0))
1242 || (!id
->transform_parameter
&& is_parm (ptr
))))
1243 TREE_THIS_NOTRAP (*tp
) = 1;
1248 /* Here is the "usual case". Copy this tree node, and then
1249 tweak some special cases. */
1250 copy_tree_r (tp
, walk_subtrees
, NULL
);
1252 /* If EXPR has block defined, map it to newly constructed block.
1253 When inlining we want EXPRs without block appear in the block
1254 of function call if we are not remapping a type. */
1257 new_block
= id
->remapping_type_depth
== 0 ? id
->block
: NULL
;
1258 if (TREE_BLOCK (*tp
))
1261 n
= id
->decl_map
->get (TREE_BLOCK (*tp
));
1265 TREE_SET_BLOCK (*tp
, new_block
);
1268 if (TREE_CODE (*tp
) != OMP_CLAUSE
)
1269 TREE_TYPE (*tp
) = remap_type (TREE_TYPE (*tp
), id
);
1271 /* The copied TARGET_EXPR has never been expanded, even if the
1272 original node was expanded already. */
1273 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
1275 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
1276 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
1279 /* Variable substitution need not be simple. In particular, the
1280 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1281 and friends are up-to-date. */
1282 else if (TREE_CODE (*tp
) == ADDR_EXPR
)
1284 int invariant
= is_gimple_min_invariant (*tp
);
1285 walk_tree (&TREE_OPERAND (*tp
, 0), copy_tree_body_r
, id
, NULL
);
1287 /* Handle the case where we substituted an INDIRECT_REF
1288 into the operand of the ADDR_EXPR. */
1289 if (TREE_CODE (TREE_OPERAND (*tp
, 0)) == INDIRECT_REF
)
1290 *tp
= TREE_OPERAND (TREE_OPERAND (*tp
, 0), 0);
1292 recompute_tree_invariant_for_addr_expr (*tp
);
1294 /* If this used to be invariant, but is not any longer,
1295 then regimplification is probably needed. */
1296 if (invariant
&& !is_gimple_min_invariant (*tp
))
1297 id
->regimplify
= true;
1303 /* Keep iterating. */
1307 /* Helper for remap_gimple_stmt. Given an EH region number for the
1308 source function, map that to the duplicate EH region number in
1309 the destination function. */
1312 remap_eh_region_nr (int old_nr
, copy_body_data
*id
)
1314 eh_region old_r
, new_r
;
1316 old_r
= get_eh_region_from_number_fn (id
->src_cfun
, old_nr
);
1317 new_r
= static_cast<eh_region
> (*id
->eh_map
->get (old_r
));
1319 return new_r
->index
;
1322 /* Similar, but operate on INTEGER_CSTs. */
1325 remap_eh_region_tree_nr (tree old_t_nr
, copy_body_data
*id
)
1329 old_nr
= tree_to_shwi (old_t_nr
);
1330 new_nr
= remap_eh_region_nr (old_nr
, id
);
1332 return build_int_cst (integer_type_node
, new_nr
);
1335 /* Helper for copy_bb. Remap statement STMT using the inlining
1336 information in ID. Return the new statement copy. */
1339 remap_gimple_stmt (gimple stmt
, copy_body_data
*id
)
1342 struct walk_stmt_info wi
;
1343 bool skip_first
= false;
1344 gimple_seq stmts
= NULL
;
1346 if (is_gimple_debug (stmt
)
1347 && !opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
))
1350 /* Begin by recognizing trees that we'll completely rewrite for the
1351 inlining context. Our output for these trees is completely
1352 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1353 into an edge). Further down, we'll handle trees that get
1354 duplicated and/or tweaked. */
1356 /* When requested, GIMPLE_RETURNs should be transformed to just the
1357 contained GIMPLE_ASSIGN. The branch semantics of the return will
1358 be handled elsewhere by manipulating the CFG rather than the
1360 if (gimple_code (stmt
) == GIMPLE_RETURN
&& id
->transform_return_to_modify
)
1362 tree retval
= gimple_return_retval (as_a
<greturn
*> (stmt
));
1363 tree retbnd
= gimple_return_retbnd (stmt
);
1364 tree bndslot
= id
->retbnd
;
1366 if (retbnd
&& bndslot
)
1368 gimple bndcopy
= gimple_build_assign (bndslot
, retbnd
);
1369 memset (&wi
, 0, sizeof (wi
));
1371 walk_gimple_op (bndcopy
, remap_gimple_op_r
, &wi
);
1372 gimple_seq_add_stmt (&stmts
, bndcopy
);
1375 /* If we're returning something, just turn that into an
1376 assignment into the equivalent of the original RESULT_DECL.
1377 If RETVAL is just the result decl, the result decl has
1378 already been set (e.g. a recent "foo (&result_decl, ...)");
1379 just toss the entire GIMPLE_RETURN. */
1381 && (TREE_CODE (retval
) != RESULT_DECL
1382 && (TREE_CODE (retval
) != SSA_NAME
1383 || ! SSA_NAME_VAR (retval
)
1384 || TREE_CODE (SSA_NAME_VAR (retval
)) != RESULT_DECL
)))
1386 copy
= gimple_build_assign (id
->do_not_unshare
1387 ? id
->retvar
: unshare_expr (id
->retvar
),
1389 /* id->retvar is already substituted. Skip it on later remapping. */
1392 /* We need to copy bounds if return structure with pointers into
1393 instrumented function. */
1394 if (chkp_function_instrumented_p (id
->dst_fn
)
1396 && !BOUNDED_P (id
->retvar
)
1397 && chkp_type_has_pointer (TREE_TYPE (id
->retvar
)))
1398 id
->assign_stmts
.safe_push (copy
);
1404 else if (gimple_has_substatements (stmt
))
1408 /* When cloning bodies from the C++ front end, we will be handed bodies
1409 in High GIMPLE form. Handle here all the High GIMPLE statements that
1410 have embedded statements. */
1411 switch (gimple_code (stmt
))
1414 copy
= copy_gimple_bind (as_a
<gbind
*> (stmt
), id
);
1419 gcatch
*catch_stmt
= as_a
<gcatch
*> (stmt
);
1420 s1
= remap_gimple_seq (gimple_catch_handler (catch_stmt
), id
);
1421 copy
= gimple_build_catch (gimple_catch_types (catch_stmt
), s1
);
1425 case GIMPLE_EH_FILTER
:
1426 s1
= remap_gimple_seq (gimple_eh_filter_failure (stmt
), id
);
1427 copy
= gimple_build_eh_filter (gimple_eh_filter_types (stmt
), s1
);
1431 s1
= remap_gimple_seq (gimple_try_eval (stmt
), id
);
1432 s2
= remap_gimple_seq (gimple_try_cleanup (stmt
), id
);
1433 copy
= gimple_build_try (s1
, s2
, gimple_try_kind (stmt
));
1436 case GIMPLE_WITH_CLEANUP_EXPR
:
1437 s1
= remap_gimple_seq (gimple_wce_cleanup (stmt
), id
);
1438 copy
= gimple_build_wce (s1
);
1441 case GIMPLE_OMP_PARALLEL
:
1443 gomp_parallel
*omp_par_stmt
= as_a
<gomp_parallel
*> (stmt
);
1444 s1
= remap_gimple_seq (gimple_omp_body (omp_par_stmt
), id
);
1445 copy
= gimple_build_omp_parallel
1447 gimple_omp_parallel_clauses (omp_par_stmt
),
1448 gimple_omp_parallel_child_fn (omp_par_stmt
),
1449 gimple_omp_parallel_data_arg (omp_par_stmt
));
1453 case GIMPLE_OMP_TASK
:
1454 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1455 copy
= gimple_build_omp_task
1457 gimple_omp_task_clauses (stmt
),
1458 gimple_omp_task_child_fn (stmt
),
1459 gimple_omp_task_data_arg (stmt
),
1460 gimple_omp_task_copy_fn (stmt
),
1461 gimple_omp_task_arg_size (stmt
),
1462 gimple_omp_task_arg_align (stmt
));
1465 case GIMPLE_OMP_FOR
:
1466 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1467 s2
= remap_gimple_seq (gimple_omp_for_pre_body (stmt
), id
);
1468 copy
= gimple_build_omp_for (s1
, gimple_omp_for_kind (stmt
),
1469 gimple_omp_for_clauses (stmt
),
1470 gimple_omp_for_collapse (stmt
), s2
);
1473 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
1475 gimple_omp_for_set_index (copy
, i
,
1476 gimple_omp_for_index (stmt
, i
));
1477 gimple_omp_for_set_initial (copy
, i
,
1478 gimple_omp_for_initial (stmt
, i
));
1479 gimple_omp_for_set_final (copy
, i
,
1480 gimple_omp_for_final (stmt
, i
));
1481 gimple_omp_for_set_incr (copy
, i
,
1482 gimple_omp_for_incr (stmt
, i
));
1483 gimple_omp_for_set_cond (copy
, i
,
1484 gimple_omp_for_cond (stmt
, i
));
1489 case GIMPLE_OMP_MASTER
:
1490 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1491 copy
= gimple_build_omp_master (s1
);
1494 case GIMPLE_OMP_TASKGROUP
:
1495 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1496 copy
= gimple_build_omp_taskgroup (s1
);
1499 case GIMPLE_OMP_ORDERED
:
1500 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1501 copy
= gimple_build_omp_ordered (s1
);
1504 case GIMPLE_OMP_SECTION
:
1505 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1506 copy
= gimple_build_omp_section (s1
);
1509 case GIMPLE_OMP_SECTIONS
:
1510 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1511 copy
= gimple_build_omp_sections
1512 (s1
, gimple_omp_sections_clauses (stmt
));
1515 case GIMPLE_OMP_SINGLE
:
1516 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1517 copy
= gimple_build_omp_single
1518 (s1
, gimple_omp_single_clauses (stmt
));
1521 case GIMPLE_OMP_TARGET
:
1522 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1523 copy
= gimple_build_omp_target
1524 (s1
, gimple_omp_target_kind (stmt
),
1525 gimple_omp_target_clauses (stmt
));
1528 case GIMPLE_OMP_TEAMS
:
1529 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1530 copy
= gimple_build_omp_teams
1531 (s1
, gimple_omp_teams_clauses (stmt
));
1534 case GIMPLE_OMP_CRITICAL
:
1535 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1536 copy
= gimple_build_omp_critical (s1
,
1537 gimple_omp_critical_name (
1538 as_a
<gomp_critical
*> (stmt
)));
1541 case GIMPLE_TRANSACTION
:
1543 gtransaction
*old_trans_stmt
= as_a
<gtransaction
*> (stmt
);
1544 gtransaction
*new_trans_stmt
;
1545 s1
= remap_gimple_seq (gimple_transaction_body (old_trans_stmt
),
1547 copy
= new_trans_stmt
1548 = gimple_build_transaction (
1550 gimple_transaction_label (old_trans_stmt
));
1551 gimple_transaction_set_subcode (
1553 gimple_transaction_subcode (old_trans_stmt
));
1563 if (gimple_assign_copy_p (stmt
)
1564 && gimple_assign_lhs (stmt
) == gimple_assign_rhs1 (stmt
)
1565 && auto_var_in_fn_p (gimple_assign_lhs (stmt
), id
->src_fn
))
1567 /* Here we handle statements that are not completely rewritten.
1568 First we detect some inlining-induced bogosities for
1571 /* Some assignments VAR = VAR; don't generate any rtl code
1572 and thus don't count as variable modification. Avoid
1573 keeping bogosities like 0 = 0. */
1574 tree decl
= gimple_assign_lhs (stmt
), value
;
1577 n
= id
->decl_map
->get (decl
);
1581 STRIP_TYPE_NOPS (value
);
1582 if (TREE_CONSTANT (value
) || TREE_READONLY (value
))
1587 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1588 in a block that we aren't copying during tree_function_versioning,
1589 just drop the clobber stmt. */
1590 if (id
->blocks_to_copy
&& gimple_clobber_p (stmt
))
1592 tree lhs
= gimple_assign_lhs (stmt
);
1593 if (TREE_CODE (lhs
) == MEM_REF
1594 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
)
1596 gimple def_stmt
= SSA_NAME_DEF_STMT (TREE_OPERAND (lhs
, 0));
1597 if (gimple_bb (def_stmt
)
1598 && !bitmap_bit_p (id
->blocks_to_copy
,
1599 gimple_bb (def_stmt
)->index
))
1604 if (gimple_debug_bind_p (stmt
))
1607 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt
),
1608 gimple_debug_bind_get_value (stmt
),
1610 id
->debug_stmts
.safe_push (copy
);
1611 gimple_seq_add_stmt (&stmts
, copy
);
1614 if (gimple_debug_source_bind_p (stmt
))
1616 gdebug
*copy
= gimple_build_debug_source_bind
1617 (gimple_debug_source_bind_get_var (stmt
),
1618 gimple_debug_source_bind_get_value (stmt
),
1620 id
->debug_stmts
.safe_push (copy
);
1621 gimple_seq_add_stmt (&stmts
, copy
);
1625 /* Create a new deep copy of the statement. */
1626 copy
= gimple_copy (stmt
);
1628 /* Clear flags that need revisiting. */
1629 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (copy
))
1631 if (gimple_call_tail_p (call_stmt
))
1632 gimple_call_set_tail (call_stmt
, false);
1633 if (gimple_call_from_thunk_p (call_stmt
))
1634 gimple_call_set_from_thunk (call_stmt
, false);
1637 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1638 RESX and EH_DISPATCH. */
1640 switch (gimple_code (copy
))
1644 tree r
, fndecl
= gimple_call_fndecl (copy
);
1645 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
1646 switch (DECL_FUNCTION_CODE (fndecl
))
1648 case BUILT_IN_EH_COPY_VALUES
:
1649 r
= gimple_call_arg (copy
, 1);
1650 r
= remap_eh_region_tree_nr (r
, id
);
1651 gimple_call_set_arg (copy
, 1, r
);
1654 case BUILT_IN_EH_POINTER
:
1655 case BUILT_IN_EH_FILTER
:
1656 r
= gimple_call_arg (copy
, 0);
1657 r
= remap_eh_region_tree_nr (r
, id
);
1658 gimple_call_set_arg (copy
, 0, r
);
1665 /* Reset alias info if we didn't apply measures to
1666 keep it valid over inlining by setting DECL_PT_UID. */
1667 if (!id
->src_cfun
->gimple_df
1668 || !id
->src_cfun
->gimple_df
->ipa_pta
)
1669 gimple_call_reset_alias_info (as_a
<gcall
*> (copy
));
1675 gresx
*resx_stmt
= as_a
<gresx
*> (copy
);
1676 int r
= gimple_resx_region (resx_stmt
);
1677 r
= remap_eh_region_nr (r
, id
);
1678 gimple_resx_set_region (resx_stmt
, r
);
1682 case GIMPLE_EH_DISPATCH
:
1684 geh_dispatch
*eh_dispatch
= as_a
<geh_dispatch
*> (copy
);
1685 int r
= gimple_eh_dispatch_region (eh_dispatch
);
1686 r
= remap_eh_region_nr (r
, id
);
1687 gimple_eh_dispatch_set_region (eh_dispatch
, r
);
1696 /* If STMT has a block defined, map it to the newly constructed
1698 if (gimple_block (copy
))
1701 n
= id
->decl_map
->get (gimple_block (copy
));
1703 gimple_set_block (copy
, *n
);
1706 if (gimple_debug_bind_p (copy
) || gimple_debug_source_bind_p (copy
))
1708 gimple_seq_add_stmt (&stmts
, copy
);
1712 /* Remap all the operands in COPY. */
1713 memset (&wi
, 0, sizeof (wi
));
1716 walk_tree (gimple_op_ptr (copy
, 1), remap_gimple_op_r
, &wi
, NULL
);
1718 walk_gimple_op (copy
, remap_gimple_op_r
, &wi
);
1720 /* Clear the copied virtual operands. We are not remapping them here
1721 but are going to recreate them from scratch. */
1722 if (gimple_has_mem_ops (copy
))
1724 gimple_set_vdef (copy
, NULL_TREE
);
1725 gimple_set_vuse (copy
, NULL_TREE
);
1728 gimple_seq_add_stmt (&stmts
, copy
);
1733 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1737 copy_bb (copy_body_data
*id
, basic_block bb
, int frequency_scale
,
1738 gcov_type count_scale
)
1740 gimple_stmt_iterator gsi
, copy_gsi
, seq_gsi
;
1741 basic_block copy_basic_block
;
1746 /* Search for previous copied basic block. */
1749 prev
= prev
->prev_bb
;
1751 /* create_basic_block() will append every new block to
1752 basic_block_info automatically. */
1753 copy_basic_block
= create_basic_block (NULL
, (basic_block
) prev
->aux
);
1754 copy_basic_block
->count
= apply_scale (bb
->count
, count_scale
);
1756 /* We are going to rebuild frequencies from scratch. These values
1757 have just small importance to drive canonicalize_loop_headers. */
1758 freq
= apply_scale ((gcov_type
)bb
->frequency
, frequency_scale
);
1760 /* We recompute frequencies after inlining, so this is quite safe. */
1761 if (freq
> BB_FREQ_MAX
)
1763 copy_basic_block
->frequency
= freq
;
1765 copy_gsi
= gsi_start_bb (copy_basic_block
);
1767 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1770 gimple stmt
= gsi_stmt (gsi
);
1771 gimple orig_stmt
= stmt
;
1772 gimple_stmt_iterator stmts_gsi
;
1773 bool stmt_added
= false;
1775 id
->regimplify
= false;
1776 stmts
= remap_gimple_stmt (stmt
, id
);
1778 if (gimple_seq_empty_p (stmts
))
1783 for (stmts_gsi
= gsi_start (stmts
);
1784 !gsi_end_p (stmts_gsi
); )
1786 stmt
= gsi_stmt (stmts_gsi
);
1788 /* Advance iterator now before stmt is moved to seq_gsi. */
1789 gsi_next (&stmts_gsi
);
1791 if (gimple_nop_p (stmt
))
1794 gimple_duplicate_stmt_histograms (cfun
, stmt
, id
->src_cfun
,
1797 /* With return slot optimization we can end up with
1798 non-gimple (foo *)&this->m, fix that here. */
1799 if (is_gimple_assign (stmt
)
1800 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
1801 && !is_gimple_val (gimple_assign_rhs1 (stmt
)))
1804 new_rhs
= force_gimple_operand_gsi (&seq_gsi
,
1805 gimple_assign_rhs1 (stmt
),
1807 GSI_CONTINUE_LINKING
);
1808 gimple_assign_set_rhs1 (stmt
, new_rhs
);
1809 id
->regimplify
= false;
1812 gsi_insert_after (&seq_gsi
, stmt
, GSI_NEW_STMT
);
1815 gimple_regimplify_operands (stmt
, &seq_gsi
);
1823 /* If copy_basic_block has been empty at the start of this iteration,
1824 call gsi_start_bb again to get at the newly added statements. */
1825 if (gsi_end_p (copy_gsi
))
1826 copy_gsi
= gsi_start_bb (copy_basic_block
);
1828 gsi_next (©_gsi
);
1830 /* Process the new statement. The call to gimple_regimplify_operands
1831 possibly turned the statement into multiple statements, we
1832 need to process all of them. */
1838 stmt
= gsi_stmt (copy_gsi
);
1839 call_stmt
= dyn_cast
<gcall
*> (stmt
);
1841 && gimple_call_va_arg_pack_p (call_stmt
)
1844 /* __builtin_va_arg_pack () should be replaced by
1845 all arguments corresponding to ... in the caller. */
1849 size_t nargs
= gimple_call_num_args (id
->call_stmt
);
1850 size_t n
, i
, nargs_to_copy
;
1851 bool remove_bounds
= false;
1853 for (p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
1856 /* Bounds should be removed from arg pack in case
1857 we handle not instrumented call in instrumented
1859 nargs_to_copy
= nargs
;
1860 if (gimple_call_with_bounds_p (id
->call_stmt
)
1861 && !gimple_call_with_bounds_p (stmt
))
1863 for (i
= gimple_call_num_args (id
->call_stmt
) - nargs
;
1864 i
< gimple_call_num_args (id
->call_stmt
);
1866 if (POINTER_BOUNDS_P (gimple_call_arg (id
->call_stmt
, i
)))
1868 remove_bounds
= true;
1871 /* Create the new array of arguments. */
1872 n
= nargs_to_copy
+ gimple_call_num_args (call_stmt
);
1873 argarray
.create (n
);
1874 argarray
.safe_grow_cleared (n
);
1876 /* Copy all the arguments before '...' */
1877 memcpy (argarray
.address (),
1878 gimple_call_arg_ptr (call_stmt
, 0),
1879 gimple_call_num_args (call_stmt
) * sizeof (tree
));
1883 /* Append the rest of arguments removing bounds. */
1884 unsigned cur
= gimple_call_num_args (call_stmt
);
1885 i
= gimple_call_num_args (id
->call_stmt
) - nargs
;
1886 for (i
= gimple_call_num_args (id
->call_stmt
) - nargs
;
1887 i
< gimple_call_num_args (id
->call_stmt
);
1889 if (!POINTER_BOUNDS_P (gimple_call_arg (id
->call_stmt
, i
)))
1890 argarray
[cur
++] = gimple_call_arg (id
->call_stmt
, i
);
1891 gcc_assert (cur
== n
);
1895 /* Append the arguments passed in '...' */
1896 memcpy (argarray
.address () + gimple_call_num_args (call_stmt
),
1897 gimple_call_arg_ptr (id
->call_stmt
, 0)
1898 + (gimple_call_num_args (id
->call_stmt
) - nargs
),
1899 nargs
* sizeof (tree
));
1902 new_call
= gimple_build_call_vec (gimple_call_fn (call_stmt
),
1905 argarray
.release ();
1907 /* Copy all GIMPLE_CALL flags, location and block, except
1908 GF_CALL_VA_ARG_PACK. */
1909 gimple_call_copy_flags (new_call
, call_stmt
);
1910 gimple_call_set_va_arg_pack (new_call
, false);
1911 gimple_set_location (new_call
, gimple_location (stmt
));
1912 gimple_set_block (new_call
, gimple_block (stmt
));
1913 gimple_call_set_lhs (new_call
, gimple_call_lhs (call_stmt
));
1915 gsi_replace (©_gsi
, new_call
, false);
1920 && (decl
= gimple_call_fndecl (stmt
))
1921 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
1922 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_VA_ARG_PACK_LEN
)
1924 /* __builtin_va_arg_pack_len () should be replaced by
1925 the number of anonymous arguments. */
1926 size_t nargs
= gimple_call_num_args (id
->call_stmt
), i
;
1930 for (p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
1933 /* For instrumented calls we should ignore bounds. */
1934 for (i
= gimple_call_num_args (id
->call_stmt
) - nargs
;
1935 i
< gimple_call_num_args (id
->call_stmt
);
1937 if (POINTER_BOUNDS_P (gimple_call_arg (id
->call_stmt
, i
)))
1940 count
= build_int_cst (integer_type_node
, nargs
);
1941 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
), count
);
1942 gsi_replace (©_gsi
, new_stmt
, false);
1947 && gimple_call_internal_p (stmt
)
1948 && gimple_call_internal_fn (stmt
) == IFN_TSAN_FUNC_EXIT
)
1950 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
1951 gsi_remove (©_gsi
, false);
1955 /* Statements produced by inlining can be unfolded, especially
1956 when we constant propagated some operands. We can't fold
1957 them right now for two reasons:
1958 1) folding require SSA_NAME_DEF_STMTs to be correct
1959 2) we can't change function calls to builtins.
1960 So we just mark statement for later folding. We mark
1961 all new statements, instead just statements that has changed
1962 by some nontrivial substitution so even statements made
1963 foldable indirectly are updated. If this turns out to be
1964 expensive, copy_body can be told to watch for nontrivial
1966 if (id
->statements_to_fold
)
1967 id
->statements_to_fold
->add (stmt
);
1969 /* We're duplicating a CALL_EXPR. Find any corresponding
1970 callgraph edges and update or duplicate them. */
1971 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
1973 struct cgraph_edge
*edge
;
1975 switch (id
->transform_call_graph_edges
)
1977 case CB_CGE_DUPLICATE
:
1978 edge
= id
->src_node
->get_edge (orig_stmt
);
1981 int edge_freq
= edge
->frequency
;
1983 struct cgraph_edge
*old_edge
= edge
;
1984 edge
= edge
->clone (id
->dst_node
, call_stmt
,
1986 REG_BR_PROB_BASE
, CGRAPH_FREQ_BASE
,
1988 /* We could also just rescale the frequency, but
1989 doing so would introduce roundoff errors and make
1990 verifier unhappy. */
1991 new_freq
= compute_call_stmt_bb_frequency (id
->dst_node
->decl
,
1994 /* Speculative calls consist of two edges - direct and indirect.
1995 Duplicate the whole thing and distribute frequencies accordingly. */
1996 if (edge
->speculative
)
1998 struct cgraph_edge
*direct
, *indirect
;
1999 struct ipa_ref
*ref
;
2001 gcc_assert (!edge
->indirect_unknown_callee
);
2002 old_edge
->speculative_call_info (direct
, indirect
, ref
);
2003 indirect
= indirect
->clone (id
->dst_node
, call_stmt
,
2005 REG_BR_PROB_BASE
, CGRAPH_FREQ_BASE
,
2007 if (old_edge
->frequency
+ indirect
->frequency
)
2009 edge
->frequency
= MIN (RDIV ((gcov_type
)new_freq
* old_edge
->frequency
,
2010 (old_edge
->frequency
+ indirect
->frequency
)),
2012 indirect
->frequency
= MIN (RDIV ((gcov_type
)new_freq
* indirect
->frequency
,
2013 (old_edge
->frequency
+ indirect
->frequency
)),
2016 id
->dst_node
->clone_reference (ref
, stmt
);
2020 edge
->frequency
= new_freq
;
2022 && profile_status_for_fn (cfun
) != PROFILE_ABSENT
2023 && (edge_freq
> edge
->frequency
+ 10
2024 || edge_freq
< edge
->frequency
- 10))
2026 fprintf (dump_file
, "Edge frequency estimated by "
2027 "cgraph %i diverge from inliner's estimate %i\n",
2031 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
2034 copy_basic_block
->frequency
);
2040 case CB_CGE_MOVE_CLONES
:
2041 id
->dst_node
->set_call_stmt_including_clones (orig_stmt
,
2043 edge
= id
->dst_node
->get_edge (stmt
);
2047 edge
= id
->dst_node
->get_edge (orig_stmt
);
2049 edge
->set_call_stmt (call_stmt
);
2056 /* Constant propagation on argument done during inlining
2057 may create new direct call. Produce an edge for it. */
2059 || (edge
->indirect_inlining_edge
2060 && id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
))
2061 && id
->dst_node
->definition
2062 && (fn
= gimple_call_fndecl (stmt
)) != NULL
)
2064 struct cgraph_node
*dest
= cgraph_node::get (fn
);
2066 /* We have missing edge in the callgraph. This can happen
2067 when previous inlining turned an indirect call into a
2068 direct call by constant propagating arguments or we are
2069 producing dead clone (for further cloning). In all
2070 other cases we hit a bug (incorrect node sharing is the
2071 most common reason for missing edges). */
2072 gcc_assert (!dest
->definition
2073 || dest
->address_taken
2074 || !id
->src_node
->definition
2075 || !id
->dst_node
->definition
);
2076 if (id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
)
2077 id
->dst_node
->create_edge_including_clones
2078 (dest
, orig_stmt
, call_stmt
, bb
->count
,
2079 compute_call_stmt_bb_frequency (id
->dst_node
->decl
,
2081 CIF_ORIGINALLY_INDIRECT_CALL
);
2083 id
->dst_node
->create_edge (dest
, call_stmt
,
2085 compute_call_stmt_bb_frequency
2086 (id
->dst_node
->decl
,
2087 copy_basic_block
))->inline_failed
2088 = CIF_ORIGINALLY_INDIRECT_CALL
;
2091 fprintf (dump_file
, "Created new direct edge to %s\n",
2096 notice_special_calls (as_a
<gcall
*> (stmt
));
2099 maybe_duplicate_eh_stmt_fn (cfun
, stmt
, id
->src_cfun
, orig_stmt
,
2100 id
->eh_map
, id
->eh_lp_nr
);
2102 if (gimple_in_ssa_p (cfun
) && !is_gimple_debug (stmt
))
2107 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, i
, SSA_OP_DEF
)
2108 if (TREE_CODE (def
) == SSA_NAME
)
2109 SSA_NAME_DEF_STMT (def
) = stmt
;
2112 gsi_next (©_gsi
);
2114 while (!gsi_end_p (copy_gsi
));
2116 copy_gsi
= gsi_last_bb (copy_basic_block
);
2119 return copy_basic_block
;
2122 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2123 form is quite easy, since dominator relationship for old basic blocks does
2126 There is however exception where inlining might change dominator relation
2127 across EH edges from basic block within inlined functions destinating
2128 to landing pads in function we inline into.
2130 The function fills in PHI_RESULTs of such PHI nodes if they refer
2131 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2132 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2133 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2134 set, and this means that there will be no overlapping live ranges
2135 for the underlying symbol.
2137 This might change in future if we allow redirecting of EH edges and
2138 we might want to change way build CFG pre-inlining to include
2139 all the possible edges then. */
2141 update_ssa_across_abnormal_edges (basic_block bb
, basic_block ret_bb
,
2142 bool can_throw
, bool nonlocal_goto
)
2147 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2149 || ((basic_block
)e
->dest
->aux
)->index
== ENTRY_BLOCK
)
2155 gcc_assert (e
->flags
& EDGE_EH
);
2158 gcc_assert (!(e
->flags
& EDGE_EH
));
2160 for (si
= gsi_start_phis (e
->dest
); !gsi_end_p (si
); gsi_next (&si
))
2166 /* For abnormal goto/call edges the receiver can be the
2167 ENTRY_BLOCK. Do not assert this cannot happen. */
2169 gcc_assert ((e
->flags
& EDGE_EH
)
2170 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)));
2172 re
= find_edge (ret_bb
, e
->dest
);
2173 gcc_checking_assert (re
);
2174 gcc_assert ((re
->flags
& (EDGE_EH
| EDGE_ABNORMAL
))
2175 == (e
->flags
& (EDGE_EH
| EDGE_ABNORMAL
)));
2177 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
),
2178 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, re
)));
2184 /* Copy edges from BB into its copy constructed earlier, scale profile
2185 accordingly. Edges will be taken care of later. Assume aux
2186 pointers to point to the copies of each BB. Return true if any
2187 debug stmts are left after a statement that must end the basic block. */
2190 copy_edges_for_bb (basic_block bb
, gcov_type count_scale
, basic_block ret_bb
,
2191 basic_block abnormal_goto_dest
)
2193 basic_block new_bb
= (basic_block
) bb
->aux
;
2196 gimple_stmt_iterator si
;
2198 bool need_debug_cleanup
= false;
2200 /* Use the indices from the original blocks to create edges for the
2202 FOR_EACH_EDGE (old_edge
, ei
, bb
->succs
)
2203 if (!(old_edge
->flags
& EDGE_EH
))
2207 flags
= old_edge
->flags
;
2209 /* Return edges do get a FALLTHRU flag when the get inlined. */
2210 if (old_edge
->dest
->index
== EXIT_BLOCK
2211 && !(old_edge
->flags
& (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
|EDGE_FAKE
))
2212 && old_edge
->dest
->aux
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
2213 flags
|= EDGE_FALLTHRU
;
2214 new_edge
= make_edge (new_bb
, (basic_block
) old_edge
->dest
->aux
, flags
);
2215 new_edge
->count
= apply_scale (old_edge
->count
, count_scale
);
2216 new_edge
->probability
= old_edge
->probability
;
2219 if (bb
->index
== ENTRY_BLOCK
|| bb
->index
== EXIT_BLOCK
)
2222 for (si
= gsi_start_bb (new_bb
); !gsi_end_p (si
);)
2225 bool can_throw
, nonlocal_goto
;
2227 copy_stmt
= gsi_stmt (si
);
2228 if (!is_gimple_debug (copy_stmt
))
2229 update_stmt (copy_stmt
);
2231 /* Do this before the possible split_block. */
2234 /* If this tree could throw an exception, there are two
2235 cases where we need to add abnormal edge(s): the
2236 tree wasn't in a region and there is a "current
2237 region" in the caller; or the original tree had
2238 EH edges. In both cases split the block after the tree,
2239 and add abnormal edge(s) as needed; we need both
2240 those from the callee and the caller.
2241 We check whether the copy can throw, because the const
2242 propagation can change an INDIRECT_REF which throws
2243 into a COMPONENT_REF which doesn't. If the copy
2244 can throw, the original could also throw. */
2245 can_throw
= stmt_can_throw_internal (copy_stmt
);
2247 = (stmt_can_make_abnormal_goto (copy_stmt
)
2248 && !computed_goto_p (copy_stmt
));
2250 if (can_throw
|| nonlocal_goto
)
2252 if (!gsi_end_p (si
))
2254 while (!gsi_end_p (si
) && is_gimple_debug (gsi_stmt (si
)))
2257 need_debug_cleanup
= true;
2259 if (!gsi_end_p (si
))
2260 /* Note that bb's predecessor edges aren't necessarily
2261 right at this point; split_block doesn't care. */
2263 edge e
= split_block (new_bb
, copy_stmt
);
2266 new_bb
->aux
= e
->src
->aux
;
2267 si
= gsi_start_bb (new_bb
);
2271 if (gimple_code (copy_stmt
) == GIMPLE_EH_DISPATCH
)
2272 make_eh_dispatch_edges (as_a
<geh_dispatch
*> (copy_stmt
));
2274 make_eh_edges (copy_stmt
);
2276 /* If the call we inline cannot make abnormal goto do not add
2277 additional abnormal edges but only retain those already present
2278 in the original function body. */
2279 if (abnormal_goto_dest
== NULL
)
2280 nonlocal_goto
= false;
2283 basic_block copy_stmt_bb
= gimple_bb (copy_stmt
);
2285 if (get_abnormal_succ_dispatcher (copy_stmt_bb
))
2286 nonlocal_goto
= false;
2287 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2288 in OpenMP regions which aren't allowed to be left abnormally.
2289 So, no need to add abnormal edge in that case. */
2290 else if (is_gimple_call (copy_stmt
)
2291 && gimple_call_internal_p (copy_stmt
)
2292 && (gimple_call_internal_fn (copy_stmt
)
2293 == IFN_ABNORMAL_DISPATCHER
)
2294 && gimple_call_arg (copy_stmt
, 0) == boolean_true_node
)
2295 nonlocal_goto
= false;
2297 make_edge (copy_stmt_bb
, abnormal_goto_dest
, EDGE_ABNORMAL
);
2300 if ((can_throw
|| nonlocal_goto
)
2301 && gimple_in_ssa_p (cfun
))
2302 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt
), ret_bb
,
2303 can_throw
, nonlocal_goto
);
2305 return need_debug_cleanup
;
2308 /* Copy the PHIs. All blocks and edges are copied, some blocks
2309 was possibly split and new outgoing EH edges inserted.
2310 BB points to the block of original function and AUX pointers links
2311 the original and newly copied blocks. */
2314 copy_phis_for_bb (basic_block bb
, copy_body_data
*id
)
2316 basic_block
const new_bb
= (basic_block
) bb
->aux
;
2321 bool inserted
= false;
2323 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
2329 res
= PHI_RESULT (phi
);
2331 if (!virtual_operand_p (res
))
2333 walk_tree (&new_res
, copy_tree_body_r
, id
, NULL
);
2334 new_phi
= create_phi_node (new_res
, new_bb
);
2335 FOR_EACH_EDGE (new_edge
, ei
, new_bb
->preds
)
2337 edge old_edge
= find_edge ((basic_block
) new_edge
->src
->aux
, bb
);
2343 /* When doing partial cloning, we allow PHIs on the entry block
2344 as long as all the arguments are the same. Find any input
2345 edge to see argument to copy. */
2347 FOR_EACH_EDGE (old_edge
, ei2
, bb
->preds
)
2348 if (!old_edge
->src
->aux
)
2351 arg
= PHI_ARG_DEF_FROM_EDGE (phi
, old_edge
);
2353 walk_tree (&new_arg
, copy_tree_body_r
, id
, NULL
);
2354 gcc_assert (new_arg
);
2355 /* With return slot optimization we can end up with
2356 non-gimple (foo *)&this->m, fix that here. */
2357 if (TREE_CODE (new_arg
) != SSA_NAME
2358 && TREE_CODE (new_arg
) != FUNCTION_DECL
2359 && !is_gimple_val (new_arg
))
2361 gimple_seq stmts
= NULL
;
2362 new_arg
= force_gimple_operand (new_arg
, &stmts
, true, NULL
);
2363 gsi_insert_seq_on_edge (new_edge
, stmts
);
2366 locus
= gimple_phi_arg_location_from_edge (phi
, old_edge
);
2367 if (LOCATION_BLOCK (locus
))
2370 n
= id
->decl_map
->get (LOCATION_BLOCK (locus
));
2373 locus
= COMBINE_LOCATION_DATA (line_table
, locus
, *n
);
2375 locus
= LOCATION_LOCUS (locus
);
2378 locus
= LOCATION_LOCUS (locus
);
2380 add_phi_arg (new_phi
, new_arg
, new_edge
, locus
);
2385 /* Commit the delayed edge insertions. */
2387 FOR_EACH_EDGE (new_edge
, ei
, new_bb
->preds
)
2388 gsi_commit_one_edge_insert (new_edge
, NULL
);
2392 /* Wrapper for remap_decl so it can be used as a callback. */
2395 remap_decl_1 (tree decl
, void *data
)
2397 return remap_decl (decl
, (copy_body_data
*) data
);
2400 /* Build struct function and associated datastructures for the new clone
2401 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2402 the cfun to the function of new_fndecl (and current_function_decl too). */
2405 initialize_cfun (tree new_fndecl
, tree callee_fndecl
, gcov_type count
)
2407 struct function
*src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2408 gcov_type count_scale
;
2410 if (!DECL_ARGUMENTS (new_fndecl
))
2411 DECL_ARGUMENTS (new_fndecl
) = DECL_ARGUMENTS (callee_fndecl
);
2412 if (!DECL_RESULT (new_fndecl
))
2413 DECL_RESULT (new_fndecl
) = DECL_RESULT (callee_fndecl
);
2415 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
)
2417 = GCOV_COMPUTE_SCALE (count
,
2418 ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
);
2420 count_scale
= REG_BR_PROB_BASE
;
2422 /* Register specific tree functions. */
2423 gimple_register_cfg_hooks ();
2425 /* Get clean struct function. */
2426 push_struct_function (new_fndecl
);
2428 /* We will rebuild these, so just sanity check that they are empty. */
2429 gcc_assert (VALUE_HISTOGRAMS (cfun
) == NULL
);
2430 gcc_assert (cfun
->local_decls
== NULL
);
2431 gcc_assert (cfun
->cfg
== NULL
);
2432 gcc_assert (cfun
->decl
== new_fndecl
);
2434 /* Copy items we preserve during cloning. */
2435 cfun
->static_chain_decl
= src_cfun
->static_chain_decl
;
2436 cfun
->nonlocal_goto_save_area
= src_cfun
->nonlocal_goto_save_area
;
2437 cfun
->function_end_locus
= src_cfun
->function_end_locus
;
2438 cfun
->curr_properties
= src_cfun
->curr_properties
;
2439 cfun
->last_verified
= src_cfun
->last_verified
;
2440 cfun
->va_list_gpr_size
= src_cfun
->va_list_gpr_size
;
2441 cfun
->va_list_fpr_size
= src_cfun
->va_list_fpr_size
;
2442 cfun
->has_nonlocal_label
= src_cfun
->has_nonlocal_label
;
2443 cfun
->stdarg
= src_cfun
->stdarg
;
2444 cfun
->after_inlining
= src_cfun
->after_inlining
;
2445 cfun
->can_throw_non_call_exceptions
2446 = src_cfun
->can_throw_non_call_exceptions
;
2447 cfun
->can_delete_dead_exceptions
= src_cfun
->can_delete_dead_exceptions
;
2448 cfun
->returns_struct
= src_cfun
->returns_struct
;
2449 cfun
->returns_pcc_struct
= src_cfun
->returns_pcc_struct
;
2451 init_empty_tree_cfg ();
2453 profile_status_for_fn (cfun
) = profile_status_for_fn (src_cfun
);
2454 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
=
2455 (ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
* count_scale
/
2457 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
2458 = ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->frequency
;
2459 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
=
2460 (EXIT_BLOCK_PTR_FOR_FN (src_cfun
)->count
* count_scale
/
2462 EXIT_BLOCK_PTR_FOR_FN (cfun
)->frequency
=
2463 EXIT_BLOCK_PTR_FOR_FN (src_cfun
)->frequency
;
2465 init_eh_for_function ();
2467 if (src_cfun
->gimple_df
)
2469 init_tree_ssa (cfun
);
2470 cfun
->gimple_df
->in_ssa_p
= true;
2471 init_ssa_operands (cfun
);
2475 /* Helper function for copy_cfg_body. Move debug stmts from the end
2476 of NEW_BB to the beginning of successor basic blocks when needed. If the
2477 successor has multiple predecessors, reset them, otherwise keep
2481 maybe_move_debug_stmts_to_successors (copy_body_data
*id
, basic_block new_bb
)
2485 gimple_stmt_iterator si
= gsi_last_nondebug_bb (new_bb
);
2488 || gsi_one_before_end_p (si
)
2489 || !(stmt_can_throw_internal (gsi_stmt (si
))
2490 || stmt_can_make_abnormal_goto (gsi_stmt (si
))))
2493 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
2495 gimple_stmt_iterator ssi
= gsi_last_bb (new_bb
);
2496 gimple_stmt_iterator dsi
= gsi_after_labels (e
->dest
);
2497 while (is_gimple_debug (gsi_stmt (ssi
)))
2499 gimple stmt
= gsi_stmt (ssi
);
2504 /* For the last edge move the debug stmts instead of copying
2506 if (ei_one_before_end_p (ei
))
2510 if (!single_pred_p (e
->dest
) && gimple_debug_bind_p (stmt
))
2511 gimple_debug_bind_reset_value (stmt
);
2512 gsi_remove (&si
, false);
2513 gsi_insert_before (&dsi
, stmt
, GSI_SAME_STMT
);
2517 if (gimple_debug_bind_p (stmt
))
2519 var
= gimple_debug_bind_get_var (stmt
);
2520 if (single_pred_p (e
->dest
))
2522 value
= gimple_debug_bind_get_value (stmt
);
2523 value
= unshare_expr (value
);
2527 new_stmt
= gimple_build_debug_bind (var
, value
, stmt
);
2529 else if (gimple_debug_source_bind_p (stmt
))
2531 var
= gimple_debug_source_bind_get_var (stmt
);
2532 value
= gimple_debug_source_bind_get_value (stmt
);
2533 new_stmt
= gimple_build_debug_source_bind (var
, value
, stmt
);
2537 gsi_insert_before (&dsi
, new_stmt
, GSI_SAME_STMT
);
2538 id
->debug_stmts
.safe_push (new_stmt
);
2544 /* Make a copy of the sub-loops of SRC_PARENT and place them
2545 as siblings of DEST_PARENT. */
2548 copy_loops (copy_body_data
*id
,
2549 struct loop
*dest_parent
, struct loop
*src_parent
)
2551 struct loop
*src_loop
= src_parent
->inner
;
2554 if (!id
->blocks_to_copy
2555 || bitmap_bit_p (id
->blocks_to_copy
, src_loop
->header
->index
))
2557 struct loop
*dest_loop
= alloc_loop ();
2559 /* Assign the new loop its header and latch and associate
2560 those with the new loop. */
2561 dest_loop
->header
= (basic_block
)src_loop
->header
->aux
;
2562 dest_loop
->header
->loop_father
= dest_loop
;
2563 if (src_loop
->latch
!= NULL
)
2565 dest_loop
->latch
= (basic_block
)src_loop
->latch
->aux
;
2566 dest_loop
->latch
->loop_father
= dest_loop
;
2569 /* Copy loop meta-data. */
2570 copy_loop_info (src_loop
, dest_loop
);
2572 /* Finally place it into the loop array and the loop tree. */
2573 place_new_loop (cfun
, dest_loop
);
2574 flow_loop_tree_node_add (dest_parent
, dest_loop
);
2576 dest_loop
->safelen
= src_loop
->safelen
;
2577 dest_loop
->dont_vectorize
= src_loop
->dont_vectorize
;
2578 if (src_loop
->force_vectorize
)
2580 dest_loop
->force_vectorize
= true;
2581 cfun
->has_force_vectorize_loops
= true;
2583 if (src_loop
->simduid
)
2585 dest_loop
->simduid
= remap_decl (src_loop
->simduid
, id
);
2586 cfun
->has_simduid_loops
= true;
2590 copy_loops (id
, dest_loop
, src_loop
);
2592 src_loop
= src_loop
->next
;
2596 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2599 redirect_all_calls (copy_body_data
* id
, basic_block bb
)
2601 gimple_stmt_iterator si
;
2602 gimple last
= last_stmt (bb
);
2603 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
2605 gimple stmt
= gsi_stmt (si
);
2606 if (is_gimple_call (stmt
))
2608 struct cgraph_edge
*edge
= id
->dst_node
->get_edge (stmt
);
2611 edge
->redirect_call_stmt_to_callee ();
2612 if (stmt
== last
&& id
->call_stmt
&& maybe_clean_eh_stmt (stmt
))
2613 gimple_purge_dead_eh_edges (bb
);
2619 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2620 with each bb's frequency. Used when NODE has a 0-weight entry
2621 but we are about to inline it into a non-zero count call bb.
2622 See the comments for handle_missing_profiles() in predict.c for
2623 when this can happen for COMDATs. */
2626 freqs_to_counts (struct cgraph_node
*node
, gcov_type count
)
2631 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->decl
);
2633 FOR_ALL_BB_FN(bb
, fn
)
2635 bb
->count
= apply_scale (count
,
2636 GCOV_COMPUTE_SCALE (bb
->frequency
, BB_FREQ_MAX
));
2637 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2638 e
->count
= apply_probability (e
->src
->count
, e
->probability
);
2642 /* Make a copy of the body of FN so that it can be inserted inline in
2643 another function. Walks FN via CFG, returns new fndecl. */
2646 copy_cfg_body (copy_body_data
* id
, gcov_type count
, int frequency_scale
,
2647 basic_block entry_block_map
, basic_block exit_block_map
,
2648 basic_block new_entry
)
2650 tree callee_fndecl
= id
->src_fn
;
2651 /* Original cfun for the callee, doesn't change. */
2652 struct function
*src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2653 struct function
*cfun_to_copy
;
2655 tree new_fndecl
= NULL
;
2656 bool need_debug_cleanup
= false;
2657 gcov_type count_scale
;
2659 int incoming_frequency
= 0;
2660 gcov_type incoming_count
= 0;
2662 /* This can happen for COMDAT routines that end up with 0 counts
2663 despite being called (see the comments for handle_missing_profiles()
2664 in predict.c as to why). Apply counts to the blocks in the callee
2665 before inlining, using the guessed edge frequencies, so that we don't
2666 end up with a 0-count inline body which can confuse downstream
2667 optimizations such as function splitting. */
2668 if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
&& count
)
2670 /* Apply the larger of the call bb count and the total incoming
2671 call edge count to the callee. */
2672 gcov_type in_count
= 0;
2673 struct cgraph_edge
*in_edge
;
2674 for (in_edge
= id
->src_node
->callers
; in_edge
;
2675 in_edge
= in_edge
->next_caller
)
2676 in_count
+= in_edge
->count
;
2677 freqs_to_counts (id
->src_node
, count
> in_count
? count
: in_count
);
2680 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
)
2682 = GCOV_COMPUTE_SCALE (count
,
2683 ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
);
2685 count_scale
= REG_BR_PROB_BASE
;
2687 /* Register specific tree functions. */
2688 gimple_register_cfg_hooks ();
2690 /* If we are inlining just region of the function, make sure to connect
2691 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2692 part of loop, we must compute frequency and probability of
2693 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2694 probabilities of edges incoming from nonduplicated region. */
2700 FOR_EACH_EDGE (e
, ei
, new_entry
->preds
)
2703 incoming_frequency
+= EDGE_FREQUENCY (e
);
2704 incoming_count
+= e
->count
;
2706 incoming_count
= apply_scale (incoming_count
, count_scale
);
2708 = apply_scale ((gcov_type
)incoming_frequency
, frequency_scale
);
2709 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= incoming_count
;
2710 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
= incoming_frequency
;
2713 /* Must have a CFG here at this point. */
2714 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2715 (DECL_STRUCT_FUNCTION (callee_fndecl
)));
2717 cfun_to_copy
= id
->src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2719 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy
)->aux
= entry_block_map
;
2720 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy
)->aux
= exit_block_map
;
2721 entry_block_map
->aux
= ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy
);
2722 exit_block_map
->aux
= EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy
);
2724 /* Duplicate any exception-handling regions. */
2726 id
->eh_map
= duplicate_eh_regions (cfun_to_copy
, NULL
, id
->eh_lp_nr
,
2729 /* Use aux pointers to map the original blocks to copy. */
2730 FOR_EACH_BB_FN (bb
, cfun_to_copy
)
2731 if (!id
->blocks_to_copy
|| bitmap_bit_p (id
->blocks_to_copy
, bb
->index
))
2733 basic_block new_bb
= copy_bb (id
, bb
, frequency_scale
, count_scale
);
2736 new_bb
->loop_father
= entry_block_map
->loop_father
;
2739 last
= last_basic_block_for_fn (cfun
);
2741 /* Now that we've duplicated the blocks, duplicate their edges. */
2742 basic_block abnormal_goto_dest
= NULL
;
2744 && stmt_can_make_abnormal_goto (id
->call_stmt
))
2746 gimple_stmt_iterator gsi
= gsi_for_stmt (id
->call_stmt
);
2748 bb
= gimple_bb (id
->call_stmt
);
2750 if (gsi_end_p (gsi
))
2751 abnormal_goto_dest
= get_abnormal_succ_dispatcher (bb
);
2753 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2754 if (!id
->blocks_to_copy
2755 || (bb
->index
> 0 && bitmap_bit_p (id
->blocks_to_copy
, bb
->index
)))
2756 need_debug_cleanup
|= copy_edges_for_bb (bb
, count_scale
, exit_block_map
,
2757 abnormal_goto_dest
);
2761 edge e
= make_edge (entry_block_map
, (basic_block
)new_entry
->aux
, EDGE_FALLTHRU
);
2762 e
->probability
= REG_BR_PROB_BASE
;
2763 e
->count
= incoming_count
;
2766 /* Duplicate the loop tree, if available and wanted. */
2767 if (loops_for_fn (src_cfun
) != NULL
2768 && current_loops
!= NULL
)
2770 copy_loops (id
, entry_block_map
->loop_father
,
2771 get_loop (src_cfun
, 0));
2772 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2773 loops_state_set (LOOPS_NEED_FIXUP
);
2776 /* If the loop tree in the source function needed fixup, mark the
2777 destination loop tree for fixup, too. */
2778 if (loops_for_fn (src_cfun
)->state
& LOOPS_NEED_FIXUP
)
2779 loops_state_set (LOOPS_NEED_FIXUP
);
2781 if (gimple_in_ssa_p (cfun
))
2782 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2783 if (!id
->blocks_to_copy
2784 || (bb
->index
> 0 && bitmap_bit_p (id
->blocks_to_copy
, bb
->index
)))
2785 copy_phis_for_bb (bb
, id
);
2787 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2790 if (need_debug_cleanup
2791 && bb
->index
!= ENTRY_BLOCK
2792 && bb
->index
!= EXIT_BLOCK
)
2793 maybe_move_debug_stmts_to_successors (id
, (basic_block
) bb
->aux
);
2794 /* Update call edge destinations. This can not be done before loop
2795 info is updated, because we may split basic blocks. */
2796 if (id
->transform_call_graph_edges
== CB_CGE_DUPLICATE
2797 && bb
->index
!= ENTRY_BLOCK
2798 && bb
->index
!= EXIT_BLOCK
)
2799 redirect_all_calls (id
, (basic_block
)bb
->aux
);
2800 ((basic_block
)bb
->aux
)->aux
= NULL
;
2804 /* Zero out AUX fields of newly created block during EH edge
2806 for (; last
< last_basic_block_for_fn (cfun
); last
++)
2808 if (need_debug_cleanup
)
2809 maybe_move_debug_stmts_to_successors (id
,
2810 BASIC_BLOCK_FOR_FN (cfun
, last
));
2811 BASIC_BLOCK_FOR_FN (cfun
, last
)->aux
= NULL
;
2812 /* Update call edge destinations. This can not be done before loop
2813 info is updated, because we may split basic blocks. */
2814 if (id
->transform_call_graph_edges
== CB_CGE_DUPLICATE
)
2815 redirect_all_calls (id
, BASIC_BLOCK_FOR_FN (cfun
, last
));
2817 entry_block_map
->aux
= NULL
;
2818 exit_block_map
->aux
= NULL
;
2825 if (id
->dependence_map
)
2827 delete id
->dependence_map
;
2828 id
->dependence_map
= NULL
;
2834 /* Copy the debug STMT using ID. We deal with these statements in a
2835 special way: if any variable in their VALUE expression wasn't
2836 remapped yet, we won't remap it, because that would get decl uids
2837 out of sync, causing codegen differences between -g and -g0. If
2838 this arises, we drop the VALUE expression altogether. */
2841 copy_debug_stmt (gdebug
*stmt
, copy_body_data
*id
)
2844 struct walk_stmt_info wi
;
2846 if (gimple_block (stmt
))
2848 n
= id
->decl_map
->get (gimple_block (stmt
));
2849 gimple_set_block (stmt
, n
? *n
: id
->block
);
2852 /* Remap all the operands in COPY. */
2853 memset (&wi
, 0, sizeof (wi
));
2856 processing_debug_stmt
= 1;
2858 if (gimple_debug_source_bind_p (stmt
))
2859 t
= gimple_debug_source_bind_get_var (stmt
);
2861 t
= gimple_debug_bind_get_var (stmt
);
2863 if (TREE_CODE (t
) == PARM_DECL
&& id
->debug_map
2864 && (n
= id
->debug_map
->get (t
)))
2866 gcc_assert (TREE_CODE (*n
) == VAR_DECL
);
2869 else if (TREE_CODE (t
) == VAR_DECL
2870 && !is_global_var (t
)
2871 && !id
->decl_map
->get (t
))
2872 /* T is a non-localized variable. */;
2874 walk_tree (&t
, remap_gimple_op_r
, &wi
, NULL
);
2876 if (gimple_debug_bind_p (stmt
))
2878 gimple_debug_bind_set_var (stmt
, t
);
2880 if (gimple_debug_bind_has_value_p (stmt
))
2881 walk_tree (gimple_debug_bind_get_value_ptr (stmt
),
2882 remap_gimple_op_r
, &wi
, NULL
);
2884 /* Punt if any decl couldn't be remapped. */
2885 if (processing_debug_stmt
< 0)
2886 gimple_debug_bind_reset_value (stmt
);
2888 else if (gimple_debug_source_bind_p (stmt
))
2890 gimple_debug_source_bind_set_var (stmt
, t
);
2891 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt
),
2892 remap_gimple_op_r
, &wi
, NULL
);
2893 /* When inlining and source bind refers to one of the optimized
2894 away parameters, change the source bind into normal debug bind
2895 referring to the corresponding DEBUG_EXPR_DECL that should have
2896 been bound before the call stmt. */
2897 t
= gimple_debug_source_bind_get_value (stmt
);
2899 && TREE_CODE (t
) == PARM_DECL
2902 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (id
->src_fn
);
2904 if (debug_args
!= NULL
)
2906 for (i
= 0; i
< vec_safe_length (*debug_args
); i
+= 2)
2907 if ((**debug_args
)[i
] == DECL_ORIGIN (t
)
2908 && TREE_CODE ((**debug_args
)[i
+ 1]) == DEBUG_EXPR_DECL
)
2910 t
= (**debug_args
)[i
+ 1];
2911 stmt
->subcode
= GIMPLE_DEBUG_BIND
;
2912 gimple_debug_bind_set_value (stmt
, t
);
2919 processing_debug_stmt
= 0;
2924 /* Process deferred debug stmts. In order to give values better odds
2925 of being successfully remapped, we delay the processing of debug
2926 stmts until all other stmts that might require remapping are
2930 copy_debug_stmts (copy_body_data
*id
)
2935 if (!id
->debug_stmts
.exists ())
2938 FOR_EACH_VEC_ELT (id
->debug_stmts
, i
, stmt
)
2939 copy_debug_stmt (stmt
, id
);
2941 id
->debug_stmts
.release ();
2944 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2945 another function. */
2948 copy_tree_body (copy_body_data
*id
)
2950 tree fndecl
= id
->src_fn
;
2951 tree body
= DECL_SAVED_TREE (fndecl
);
2953 walk_tree (&body
, copy_tree_body_r
, id
, NULL
);
2958 /* Make a copy of the body of FN so that it can be inserted inline in
2959 another function. */
2962 copy_body (copy_body_data
*id
, gcov_type count
, int frequency_scale
,
2963 basic_block entry_block_map
, basic_block exit_block_map
,
2964 basic_block new_entry
)
2966 tree fndecl
= id
->src_fn
;
2969 /* If this body has a CFG, walk CFG and copy. */
2970 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl
)));
2971 body
= copy_cfg_body (id
, count
, frequency_scale
, entry_block_map
, exit_block_map
,
2973 copy_debug_stmts (id
);
2978 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2979 defined in function FN, or of a data member thereof. */
2982 self_inlining_addr_expr (tree value
, tree fn
)
2986 if (TREE_CODE (value
) != ADDR_EXPR
)
2989 var
= get_base_address (TREE_OPERAND (value
, 0));
2991 return var
&& auto_var_in_fn_p (var
, fn
);
2994 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2995 lexical block and line number information from base_stmt, if given,
2996 or from the last stmt of the block otherwise. */
2999 insert_init_debug_bind (copy_body_data
*id
,
3000 basic_block bb
, tree var
, tree value
,
3004 gimple_stmt_iterator gsi
;
3007 if (!gimple_in_ssa_p (id
->src_cfun
))
3010 if (!opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
))
3013 tracked_var
= target_for_debug_bind (var
);
3019 gsi
= gsi_last_bb (bb
);
3020 if (!base_stmt
&& !gsi_end_p (gsi
))
3021 base_stmt
= gsi_stmt (gsi
);
3024 note
= gimple_build_debug_bind (tracked_var
, unshare_expr (value
), base_stmt
);
3028 if (!gsi_end_p (gsi
))
3029 gsi_insert_after (&gsi
, note
, GSI_SAME_STMT
);
3031 gsi_insert_before (&gsi
, note
, GSI_SAME_STMT
);
3038 insert_init_stmt (copy_body_data
*id
, basic_block bb
, gimple init_stmt
)
3040 /* If VAR represents a zero-sized variable, it's possible that the
3041 assignment statement may result in no gimple statements. */
3044 gimple_stmt_iterator si
= gsi_last_bb (bb
);
3046 /* We can end up with init statements that store to a non-register
3047 from a rhs with a conversion. Handle that here by forcing the
3048 rhs into a temporary. gimple_regimplify_operands is not
3049 prepared to do this for us. */
3050 if (!is_gimple_debug (init_stmt
)
3051 && !is_gimple_reg (gimple_assign_lhs (init_stmt
))
3052 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt
)))
3053 && gimple_assign_rhs_class (init_stmt
) == GIMPLE_UNARY_RHS
)
3055 tree rhs
= build1 (gimple_assign_rhs_code (init_stmt
),
3056 gimple_expr_type (init_stmt
),
3057 gimple_assign_rhs1 (init_stmt
));
3058 rhs
= force_gimple_operand_gsi (&si
, rhs
, true, NULL_TREE
, false,
3060 gimple_assign_set_rhs_code (init_stmt
, TREE_CODE (rhs
));
3061 gimple_assign_set_rhs1 (init_stmt
, rhs
);
3063 gsi_insert_after (&si
, init_stmt
, GSI_NEW_STMT
);
3064 gimple_regimplify_operands (init_stmt
, &si
);
3066 if (!is_gimple_debug (init_stmt
))
3068 tree def
= gimple_assign_lhs (init_stmt
);
3069 insert_init_debug_bind (id
, bb
, def
, def
, init_stmt
);
3074 /* Initialize parameter P with VALUE. If needed, produce init statement
3075 at the end of BB. When BB is NULL, we return init statement to be
3078 setup_one_parameter (copy_body_data
*id
, tree p
, tree value
, tree fn
,
3079 basic_block bb
, tree
*vars
)
3081 gimple init_stmt
= NULL
;
3084 tree def
= (gimple_in_ssa_p (cfun
)
3085 ? ssa_default_def (id
->src_cfun
, p
) : NULL
);
3088 && value
!= error_mark_node
3089 && !useless_type_conversion_p (TREE_TYPE (p
), TREE_TYPE (value
)))
3091 /* If we can match up types by promotion/demotion do so. */
3092 if (fold_convertible_p (TREE_TYPE (p
), value
))
3093 rhs
= fold_convert (TREE_TYPE (p
), value
);
3096 /* ??? For valid programs we should not end up here.
3097 Still if we end up with truly mismatched types here, fall back
3098 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3099 GIMPLE to the following passes. */
3100 if (!is_gimple_reg_type (TREE_TYPE (value
))
3101 || TYPE_SIZE (TREE_TYPE (p
)) == TYPE_SIZE (TREE_TYPE (value
)))
3102 rhs
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (p
), value
);
3104 rhs
= build_zero_cst (TREE_TYPE (p
));
3108 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3109 here since the type of this decl must be visible to the calling
3111 var
= copy_decl_to_var (p
, id
);
3113 /* Declare this new variable. */
3114 DECL_CHAIN (var
) = *vars
;
3117 /* Make gimplifier happy about this variable. */
3118 DECL_SEEN_IN_BIND_EXPR_P (var
) = 1;
3120 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3121 we would not need to create a new variable here at all, if it
3122 weren't for debug info. Still, we can just use the argument
3124 if (TREE_READONLY (p
)
3125 && !TREE_ADDRESSABLE (p
)
3126 && value
&& !TREE_SIDE_EFFECTS (value
)
3129 /* We may produce non-gimple trees by adding NOPs or introduce
3130 invalid sharing when operand is not really constant.
3131 It is not big deal to prohibit constant propagation here as
3132 we will constant propagate in DOM1 pass anyway. */
3133 if (is_gimple_min_invariant (value
)
3134 && useless_type_conversion_p (TREE_TYPE (p
),
3136 /* We have to be very careful about ADDR_EXPR. Make sure
3137 the base variable isn't a local variable of the inlined
3138 function, e.g., when doing recursive inlining, direct or
3139 mutually-recursive or whatever, which is why we don't
3140 just test whether fn == current_function_decl. */
3141 && ! self_inlining_addr_expr (value
, fn
))
3143 insert_decl_map (id
, p
, value
);
3144 insert_debug_decl_map (id
, p
, var
);
3145 return insert_init_debug_bind (id
, bb
, var
, value
, NULL
);
3149 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3150 that way, when the PARM_DECL is encountered, it will be
3151 automatically replaced by the VAR_DECL. */
3152 insert_decl_map (id
, p
, var
);
3154 /* Even if P was TREE_READONLY, the new VAR should not be.
3155 In the original code, we would have constructed a
3156 temporary, and then the function body would have never
3157 changed the value of P. However, now, we will be
3158 constructing VAR directly. The constructor body may
3159 change its value multiple times as it is being
3160 constructed. Therefore, it must not be TREE_READONLY;
3161 the back-end assumes that TREE_READONLY variable is
3162 assigned to only once. */
3163 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p
)))
3164 TREE_READONLY (var
) = 0;
3166 /* If there is no setup required and we are in SSA, take the easy route
3167 replacing all SSA names representing the function parameter by the
3168 SSA name passed to function.
3170 We need to construct map for the variable anyway as it might be used
3171 in different SSA names when parameter is set in function.
3173 Do replacement at -O0 for const arguments replaced by constant.
3174 This is important for builtin_constant_p and other construct requiring
3175 constant argument to be visible in inlined function body. */
3176 if (gimple_in_ssa_p (cfun
) && rhs
&& def
&& is_gimple_reg (p
)
3178 || (TREE_READONLY (p
)
3179 && is_gimple_min_invariant (rhs
)))
3180 && (TREE_CODE (rhs
) == SSA_NAME
3181 || is_gimple_min_invariant (rhs
))
3182 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
3184 insert_decl_map (id
, def
, rhs
);
3185 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
3188 /* If the value of argument is never used, don't care about initializing
3190 if (optimize
&& gimple_in_ssa_p (cfun
) && !def
&& is_gimple_reg (p
))
3192 gcc_assert (!value
|| !TREE_SIDE_EFFECTS (value
));
3193 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
3196 /* Initialize this VAR_DECL from the equivalent argument. Convert
3197 the argument to the proper type in case it was promoted. */
3200 if (rhs
== error_mark_node
)
3202 insert_decl_map (id
, p
, var
);
3203 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
3206 STRIP_USELESS_TYPE_CONVERSION (rhs
);
3208 /* If we are in SSA form properly remap the default definition
3209 or assign to a dummy SSA name if the parameter is unused and
3210 we are not optimizing. */
3211 if (gimple_in_ssa_p (cfun
) && is_gimple_reg (p
))
3215 def
= remap_ssa_name (def
, id
);
3216 init_stmt
= gimple_build_assign (def
, rhs
);
3217 SSA_NAME_IS_DEFAULT_DEF (def
) = 0;
3218 set_ssa_default_def (cfun
, var
, NULL
);
3222 def
= make_ssa_name (var
);
3223 init_stmt
= gimple_build_assign (def
, rhs
);
3227 init_stmt
= gimple_build_assign (var
, rhs
);
3229 if (bb
&& init_stmt
)
3230 insert_init_stmt (id
, bb
, init_stmt
);
3235 /* Generate code to initialize the parameters of the function at the
3236 top of the stack in ID from the GIMPLE_CALL STMT. */
3239 initialize_inlined_parameters (copy_body_data
*id
, gimple stmt
,
3240 tree fn
, basic_block bb
)
3245 tree vars
= NULL_TREE
;
3246 tree static_chain
= gimple_call_chain (stmt
);
3248 /* Figure out what the parameters are. */
3249 parms
= DECL_ARGUMENTS (fn
);
3251 /* Loop through the parameter declarations, replacing each with an
3252 equivalent VAR_DECL, appropriately initialized. */
3253 for (p
= parms
, i
= 0; p
; p
= DECL_CHAIN (p
), i
++)
3256 val
= i
< gimple_call_num_args (stmt
) ? gimple_call_arg (stmt
, i
) : NULL
;
3257 setup_one_parameter (id
, p
, val
, fn
, bb
, &vars
);
3259 /* After remapping parameters remap their types. This has to be done
3260 in a second loop over all parameters to appropriately remap
3261 variable sized arrays when the size is specified in a
3262 parameter following the array. */
3263 for (p
= parms
, i
= 0; p
; p
= DECL_CHAIN (p
), i
++)
3265 tree
*varp
= id
->decl_map
->get (p
);
3267 && TREE_CODE (*varp
) == VAR_DECL
)
3269 tree def
= (gimple_in_ssa_p (cfun
) && is_gimple_reg (p
)
3270 ? ssa_default_def (id
->src_cfun
, p
) : NULL
);
3272 TREE_TYPE (var
) = remap_type (TREE_TYPE (var
), id
);
3273 /* Also remap the default definition if it was remapped
3274 to the default definition of the parameter replacement
3275 by the parameter setup. */
3278 tree
*defp
= id
->decl_map
->get (def
);
3280 && TREE_CODE (*defp
) == SSA_NAME
3281 && SSA_NAME_VAR (*defp
) == var
)
3282 TREE_TYPE (*defp
) = TREE_TYPE (var
);
3287 /* Initialize the static chain. */
3288 p
= DECL_STRUCT_FUNCTION (fn
)->static_chain_decl
;
3289 gcc_assert (fn
!= current_function_decl
);
3292 /* No static chain? Seems like a bug in tree-nested.c. */
3293 gcc_assert (static_chain
);
3295 setup_one_parameter (id
, p
, static_chain
, fn
, bb
, &vars
);
3298 declare_inline_vars (id
->block
, vars
);
3302 /* Declare a return variable to replace the RESULT_DECL for the
3303 function we are calling. An appropriate DECL_STMT is returned.
3304 The USE_STMT is filled to contain a use of the declaration to
3305 indicate the return value of the function.
3307 RETURN_SLOT, if non-null is place where to store the result. It
3308 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3309 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3311 RETURN_BOUNDS holds a destination for returned bounds.
3313 The return value is a (possibly null) value that holds the result
3314 as seen by the caller. */
3317 declare_return_variable (copy_body_data
*id
, tree return_slot
, tree modify_dest
,
3318 tree return_bounds
, basic_block entry_bb
)
3320 tree callee
= id
->src_fn
;
3321 tree result
= DECL_RESULT (callee
);
3322 tree callee_type
= TREE_TYPE (result
);
3326 /* Handle type-mismatches in the function declaration return type
3327 vs. the call expression. */
3329 caller_type
= TREE_TYPE (modify_dest
);
3331 caller_type
= TREE_TYPE (TREE_TYPE (callee
));
3333 /* We don't need to do anything for functions that don't return anything. */
3334 if (VOID_TYPE_P (callee_type
))
3337 /* If there was a return slot, then the return value is the
3338 dereferenced address of that object. */
3341 /* The front end shouldn't have used both return_slot and
3342 a modify expression. */
3343 gcc_assert (!modify_dest
);
3344 if (DECL_BY_REFERENCE (result
))
3346 tree return_slot_addr
= build_fold_addr_expr (return_slot
);
3347 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr
);
3349 /* We are going to construct *&return_slot and we can't do that
3350 for variables believed to be not addressable.
3352 FIXME: This check possibly can match, because values returned
3353 via return slot optimization are not believed to have address
3354 taken by alias analysis. */
3355 gcc_assert (TREE_CODE (return_slot
) != SSA_NAME
);
3356 var
= return_slot_addr
;
3361 gcc_assert (TREE_CODE (var
) != SSA_NAME
);
3362 if (TREE_ADDRESSABLE (result
))
3363 mark_addressable (var
);
3365 if ((TREE_CODE (TREE_TYPE (result
)) == COMPLEX_TYPE
3366 || TREE_CODE (TREE_TYPE (result
)) == VECTOR_TYPE
)
3367 && !DECL_GIMPLE_REG_P (result
)
3369 DECL_GIMPLE_REG_P (var
) = 0;
3374 /* All types requiring non-trivial constructors should have been handled. */
3375 gcc_assert (!TREE_ADDRESSABLE (callee_type
));
3377 /* Attempt to avoid creating a new temporary variable. */
3379 && TREE_CODE (modify_dest
) != SSA_NAME
)
3381 bool use_it
= false;
3383 /* We can't use MODIFY_DEST if there's type promotion involved. */
3384 if (!useless_type_conversion_p (callee_type
, caller_type
))
3387 /* ??? If we're assigning to a variable sized type, then we must
3388 reuse the destination variable, because we've no good way to
3389 create variable sized temporaries at this point. */
3390 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type
)) != INTEGER_CST
)
3393 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3394 reuse it as the result of the call directly. Don't do this if
3395 it would promote MODIFY_DEST to addressable. */
3396 else if (TREE_ADDRESSABLE (result
))
3400 tree base_m
= get_base_address (modify_dest
);
3402 /* If the base isn't a decl, then it's a pointer, and we don't
3403 know where that's going to go. */
3404 if (!DECL_P (base_m
))
3406 else if (is_global_var (base_m
))
3408 else if ((TREE_CODE (TREE_TYPE (result
)) == COMPLEX_TYPE
3409 || TREE_CODE (TREE_TYPE (result
)) == VECTOR_TYPE
)
3410 && !DECL_GIMPLE_REG_P (result
)
3411 && DECL_GIMPLE_REG_P (base_m
))
3413 else if (!TREE_ADDRESSABLE (base_m
))
3425 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type
)) == INTEGER_CST
);
3427 var
= copy_result_decl_to_var (result
, id
);
3428 DECL_SEEN_IN_BIND_EXPR_P (var
) = 1;
3430 /* Do not have the rest of GCC warn about this variable as it should
3431 not be visible to the user. */
3432 TREE_NO_WARNING (var
) = 1;
3434 declare_inline_vars (id
->block
, var
);
3436 /* Build the use expr. If the return type of the function was
3437 promoted, convert it back to the expected type. */
3439 if (!useless_type_conversion_p (caller_type
, TREE_TYPE (var
)))
3441 /* If we can match up types by promotion/demotion do so. */
3442 if (fold_convertible_p (caller_type
, var
))
3443 use
= fold_convert (caller_type
, var
);
3446 /* ??? For valid programs we should not end up here.
3447 Still if we end up with truly mismatched types here, fall back
3448 to using a MEM_REF to not leak invalid GIMPLE to the following
3450 /* Prevent var from being written into SSA form. */
3451 if (TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
3452 || TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
)
3453 DECL_GIMPLE_REG_P (var
) = false;
3454 else if (is_gimple_reg_type (TREE_TYPE (var
)))
3455 TREE_ADDRESSABLE (var
) = true;
3456 use
= fold_build2 (MEM_REF
, caller_type
,
3457 build_fold_addr_expr (var
),
3458 build_int_cst (ptr_type_node
, 0));
3462 STRIP_USELESS_TYPE_CONVERSION (use
);
3464 if (DECL_BY_REFERENCE (result
))
3466 TREE_ADDRESSABLE (var
) = 1;
3467 var
= build_fold_addr_expr (var
);
3471 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3472 way, when the RESULT_DECL is encountered, it will be
3473 automatically replaced by the VAR_DECL.
3475 When returning by reference, ensure that RESULT_DECL remaps to
3477 if (DECL_BY_REFERENCE (result
)
3478 && !is_gimple_val (var
))
3480 tree temp
= create_tmp_var (TREE_TYPE (result
), "retvalptr");
3481 insert_decl_map (id
, result
, temp
);
3482 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3483 it's default_def SSA_NAME. */
3484 if (gimple_in_ssa_p (id
->src_cfun
)
3485 && is_gimple_reg (result
))
3487 temp
= make_ssa_name (temp
);
3488 insert_decl_map (id
, ssa_default_def (id
->src_cfun
, result
), temp
);
3490 insert_init_stmt (id
, entry_bb
, gimple_build_assign (temp
, var
));
3493 insert_decl_map (id
, result
, var
);
3495 /* Remember this so we can ignore it in remap_decls. */
3498 /* If returned bounds are used, then make var for them. */
3501 tree bndtemp
= create_tmp_var (pointer_bounds_type_node
, "retbnd");
3502 DECL_SEEN_IN_BIND_EXPR_P (bndtemp
) = 1;
3503 TREE_NO_WARNING (bndtemp
) = 1;
3504 declare_inline_vars (id
->block
, bndtemp
);
3506 id
->retbnd
= bndtemp
;
3507 insert_init_stmt (id
, entry_bb
,
3508 gimple_build_assign (bndtemp
, chkp_get_zero_bounds_var ()));
3514 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3515 to a local label. */
3518 has_label_address_in_static_1 (tree
*nodep
, int *walk_subtrees
, void *fnp
)
3521 tree fn
= (tree
) fnp
;
3523 if (TREE_CODE (node
) == LABEL_DECL
&& DECL_CONTEXT (node
) == fn
)
3532 /* Determine if the function can be copied. If so return NULL. If
3533 not return a string describng the reason for failure. */
3536 copy_forbidden (struct function
*fun
, tree fndecl
)
3538 const char *reason
= fun
->cannot_be_copied_reason
;
3542 /* Only examine the function once. */
3543 if (fun
->cannot_be_copied_set
)
3546 /* We cannot copy a function that receives a non-local goto
3547 because we cannot remap the destination label used in the
3548 function that is performing the non-local goto. */
3549 /* ??? Actually, this should be possible, if we work at it.
3550 No doubt there's just a handful of places that simply
3551 assume it doesn't happen and don't substitute properly. */
3552 if (fun
->has_nonlocal_label
)
3554 reason
= G_("function %q+F can never be copied "
3555 "because it receives a non-local goto");
3559 FOR_EACH_LOCAL_DECL (fun
, ix
, decl
)
3560 if (TREE_CODE (decl
) == VAR_DECL
3561 && TREE_STATIC (decl
)
3562 && !DECL_EXTERNAL (decl
)
3563 && DECL_INITIAL (decl
)
3564 && walk_tree_without_duplicates (&DECL_INITIAL (decl
),
3565 has_label_address_in_static_1
,
3568 reason
= G_("function %q+F can never be copied because it saves "
3569 "address of local label in a static variable");
3574 fun
->cannot_be_copied_reason
= reason
;
3575 fun
->cannot_be_copied_set
= true;
3580 static const char *inline_forbidden_reason
;
3582 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3583 iff a function can not be inlined. Also sets the reason why. */
3586 inline_forbidden_p_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3587 struct walk_stmt_info
*wip
)
3589 tree fn
= (tree
) wip
->info
;
3591 gimple stmt
= gsi_stmt (*gsi
);
3593 switch (gimple_code (stmt
))
3596 /* Refuse to inline alloca call unless user explicitly forced so as
3597 this may change program's memory overhead drastically when the
3598 function using alloca is called in loop. In GCC present in
3599 SPEC2000 inlining into schedule_block cause it to require 2GB of
3600 RAM instead of 256MB. Don't do so for alloca calls emitted for
3601 VLA objects as those can't cause unbounded growth (they're always
3602 wrapped inside stack_save/stack_restore regions. */
3603 if (gimple_alloca_call_p (stmt
)
3604 && !gimple_call_alloca_for_var_p (as_a
<gcall
*> (stmt
))
3605 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
)))
3607 inline_forbidden_reason
3608 = G_("function %q+F can never be inlined because it uses "
3609 "alloca (override using the always_inline attribute)");
3610 *handled_ops_p
= true;
3614 t
= gimple_call_fndecl (stmt
);
3618 /* We cannot inline functions that call setjmp. */
3619 if (setjmp_call_p (t
))
3621 inline_forbidden_reason
3622 = G_("function %q+F can never be inlined because it uses setjmp");
3623 *handled_ops_p
= true;
3627 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
)
3628 switch (DECL_FUNCTION_CODE (t
))
3630 /* We cannot inline functions that take a variable number of
3632 case BUILT_IN_VA_START
:
3633 case BUILT_IN_NEXT_ARG
:
3634 case BUILT_IN_VA_END
:
3635 inline_forbidden_reason
3636 = G_("function %q+F can never be inlined because it "
3637 "uses variable argument lists");
3638 *handled_ops_p
= true;
3641 case BUILT_IN_LONGJMP
:
3642 /* We can't inline functions that call __builtin_longjmp at
3643 all. The non-local goto machinery really requires the
3644 destination be in a different function. If we allow the
3645 function calling __builtin_longjmp to be inlined into the
3646 function calling __builtin_setjmp, Things will Go Awry. */
3647 inline_forbidden_reason
3648 = G_("function %q+F can never be inlined because "
3649 "it uses setjmp-longjmp exception handling");
3650 *handled_ops_p
= true;
3653 case BUILT_IN_NONLOCAL_GOTO
:
3655 inline_forbidden_reason
3656 = G_("function %q+F can never be inlined because "
3657 "it uses non-local goto");
3658 *handled_ops_p
= true;
3661 case BUILT_IN_RETURN
:
3662 case BUILT_IN_APPLY_ARGS
:
3663 /* If a __builtin_apply_args caller would be inlined,
3664 it would be saving arguments of the function it has
3665 been inlined into. Similarly __builtin_return would
3666 return from the function the inline has been inlined into. */
3667 inline_forbidden_reason
3668 = G_("function %q+F can never be inlined because "
3669 "it uses __builtin_return or __builtin_apply_args");
3670 *handled_ops_p
= true;
3679 t
= gimple_goto_dest (stmt
);
3681 /* We will not inline a function which uses computed goto. The
3682 addresses of its local labels, which may be tucked into
3683 global storage, are of course not constant across
3684 instantiations, which causes unexpected behavior. */
3685 if (TREE_CODE (t
) != LABEL_DECL
)
3687 inline_forbidden_reason
3688 = G_("function %q+F can never be inlined "
3689 "because it contains a computed goto");
3690 *handled_ops_p
= true;
3699 *handled_ops_p
= false;
3703 /* Return true if FNDECL is a function that cannot be inlined into
3707 inline_forbidden_p (tree fndecl
)
3709 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
3710 struct walk_stmt_info wi
;
3712 bool forbidden_p
= false;
3714 /* First check for shared reasons not to copy the code. */
3715 inline_forbidden_reason
= copy_forbidden (fun
, fndecl
);
3716 if (inline_forbidden_reason
!= NULL
)
3719 /* Next, walk the statements of the function looking for
3720 constraucts we can't handle, or are non-optimal for inlining. */
3721 hash_set
<tree
> visited_nodes
;
3722 memset (&wi
, 0, sizeof (wi
));
3723 wi
.info
= (void *) fndecl
;
3724 wi
.pset
= &visited_nodes
;
3726 FOR_EACH_BB_FN (bb
, fun
)
3729 gimple_seq seq
= bb_seq (bb
);
3730 ret
= walk_gimple_seq (seq
, inline_forbidden_p_stmt
, NULL
, &wi
);
3731 forbidden_p
= (ret
!= NULL
);
3739 /* Return false if the function FNDECL cannot be inlined on account of its
3740 attributes, true otherwise. */
3742 function_attribute_inlinable_p (const_tree fndecl
)
3744 if (targetm
.attribute_table
)
3748 for (a
= DECL_ATTRIBUTES (fndecl
); a
; a
= TREE_CHAIN (a
))
3750 const_tree name
= TREE_PURPOSE (a
);
3753 for (i
= 0; targetm
.attribute_table
[i
].name
!= NULL
; i
++)
3754 if (is_attribute_p (targetm
.attribute_table
[i
].name
, name
))
3755 return targetm
.function_attribute_inlinable_p (fndecl
);
3762 /* Returns nonzero if FN is a function that does not have any
3763 fundamental inline blocking properties. */
3766 tree_inlinable_function_p (tree fn
)
3768 bool inlinable
= true;
3772 /* If we've already decided this function shouldn't be inlined,
3773 there's no need to check again. */
3774 if (DECL_UNINLINABLE (fn
))
3777 /* We only warn for functions declared `inline' by the user. */
3778 do_warning
= (warn_inline
3779 && DECL_DECLARED_INLINE_P (fn
)
3780 && !DECL_NO_INLINE_WARNING_P (fn
)
3781 && !DECL_IN_SYSTEM_HEADER (fn
));
3783 always_inline
= lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
));
3786 && always_inline
== NULL
)
3789 warning (OPT_Winline
, "function %q+F can never be inlined because it "
3790 "is suppressed using -fno-inline", fn
);
3794 else if (!function_attribute_inlinable_p (fn
))
3797 warning (OPT_Winline
, "function %q+F can never be inlined because it "
3798 "uses attributes conflicting with inlining", fn
);
3802 else if (inline_forbidden_p (fn
))
3804 /* See if we should warn about uninlinable functions. Previously,
3805 some of these warnings would be issued while trying to expand
3806 the function inline, but that would cause multiple warnings
3807 about functions that would for example call alloca. But since
3808 this a property of the function, just one warning is enough.
3809 As a bonus we can now give more details about the reason why a
3810 function is not inlinable. */
3812 error (inline_forbidden_reason
, fn
);
3813 else if (do_warning
)
3814 warning (OPT_Winline
, inline_forbidden_reason
, fn
);
3819 /* Squirrel away the result so that we don't have to check again. */
3820 DECL_UNINLINABLE (fn
) = !inlinable
;
3825 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3826 word size and take possible memcpy call into account and return
3827 cost based on whether optimizing for size or speed according to SPEED_P. */
3830 estimate_move_cost (tree type
, bool ARG_UNUSED (speed_p
))
3834 gcc_assert (!VOID_TYPE_P (type
));
3836 if (TREE_CODE (type
) == VECTOR_TYPE
)
3838 machine_mode inner
= TYPE_MODE (TREE_TYPE (type
));
3840 = targetm
.vectorize
.preferred_simd_mode (inner
);
3841 int simd_mode_size
= GET_MODE_SIZE (simd
);
3842 return ((GET_MODE_SIZE (TYPE_MODE (type
)) + simd_mode_size
- 1)
3846 size
= int_size_in_bytes (type
);
3848 if (size
< 0 || size
> MOVE_MAX_PIECES
* MOVE_RATIO (speed_p
))
3849 /* Cost of a memcpy call, 3 arguments and the call. */
3852 return ((size
+ MOVE_MAX_PIECES
- 1) / MOVE_MAX_PIECES
);
3855 /* Returns cost of operation CODE, according to WEIGHTS */
3858 estimate_operator_cost (enum tree_code code
, eni_weights
*weights
,
3859 tree op1 ATTRIBUTE_UNUSED
, tree op2
)
3863 /* These are "free" conversions, or their presumed cost
3864 is folded into other operations. */
3869 case VIEW_CONVERT_EXPR
:
3872 /* Assign cost of 1 to usual operations.
3873 ??? We may consider mapping RTL costs to this. */
3879 case POINTER_PLUS_EXPR
:
3882 case MULT_HIGHPART_EXPR
:
3885 case ADDR_SPACE_CONVERT_EXPR
:
3886 case FIXED_CONVERT_EXPR
:
3887 case FIX_TRUNC_EXPR
:
3905 case TRUTH_ANDIF_EXPR
:
3906 case TRUTH_ORIF_EXPR
:
3907 case TRUTH_AND_EXPR
:
3909 case TRUTH_XOR_EXPR
:
3910 case TRUTH_NOT_EXPR
:
3919 case UNORDERED_EXPR
:
3930 case PREDECREMENT_EXPR
:
3931 case PREINCREMENT_EXPR
:
3932 case POSTDECREMENT_EXPR
:
3933 case POSTINCREMENT_EXPR
:
3935 case REALIGN_LOAD_EXPR
:
3937 case REDUC_MAX_EXPR
:
3938 case REDUC_MIN_EXPR
:
3939 case REDUC_PLUS_EXPR
:
3940 case WIDEN_SUM_EXPR
:
3941 case WIDEN_MULT_EXPR
:
3944 case WIDEN_MULT_PLUS_EXPR
:
3945 case WIDEN_MULT_MINUS_EXPR
:
3946 case WIDEN_LSHIFT_EXPR
:
3948 case VEC_WIDEN_MULT_HI_EXPR
:
3949 case VEC_WIDEN_MULT_LO_EXPR
:
3950 case VEC_WIDEN_MULT_EVEN_EXPR
:
3951 case VEC_WIDEN_MULT_ODD_EXPR
:
3952 case VEC_UNPACK_HI_EXPR
:
3953 case VEC_UNPACK_LO_EXPR
:
3954 case VEC_UNPACK_FLOAT_HI_EXPR
:
3955 case VEC_UNPACK_FLOAT_LO_EXPR
:
3956 case VEC_PACK_TRUNC_EXPR
:
3957 case VEC_PACK_SAT_EXPR
:
3958 case VEC_PACK_FIX_TRUNC_EXPR
:
3959 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3960 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3964 /* Few special cases of expensive operations. This is useful
3965 to avoid inlining on functions having too many of these. */
3966 case TRUNC_DIV_EXPR
:
3968 case FLOOR_DIV_EXPR
:
3969 case ROUND_DIV_EXPR
:
3970 case EXACT_DIV_EXPR
:
3971 case TRUNC_MOD_EXPR
:
3973 case FLOOR_MOD_EXPR
:
3974 case ROUND_MOD_EXPR
:
3976 if (TREE_CODE (op2
) != INTEGER_CST
)
3977 return weights
->div_mod_cost
;
3981 /* We expect a copy assignment with no operator. */
3982 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_SINGLE_RHS
);
3988 /* Estimate number of instructions that will be created by expanding
3989 the statements in the statement sequence STMTS.
3990 WEIGHTS contains weights attributed to various constructs. */
3993 int estimate_num_insns_seq (gimple_seq stmts
, eni_weights
*weights
)
3996 gimple_stmt_iterator gsi
;
3999 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4000 cost
+= estimate_num_insns (gsi_stmt (gsi
), weights
);
4006 /* Estimate number of instructions that will be created by expanding STMT.
4007 WEIGHTS contains weights attributed to various constructs. */
4010 estimate_num_insns (gimple stmt
, eni_weights
*weights
)
4013 enum gimple_code code
= gimple_code (stmt
);
4020 /* Try to estimate the cost of assignments. We have three cases to
4022 1) Simple assignments to registers;
4023 2) Stores to things that must live in memory. This includes
4024 "normal" stores to scalars, but also assignments of large
4025 structures, or constructors of big arrays;
4027 Let us look at the first two cases, assuming we have "a = b + C":
4028 <GIMPLE_ASSIGN <var_decl "a">
4029 <plus_expr <var_decl "b"> <constant C>>
4030 If "a" is a GIMPLE register, the assignment to it is free on almost
4031 any target, because "a" usually ends up in a real register. Hence
4032 the only cost of this expression comes from the PLUS_EXPR, and we
4033 can ignore the GIMPLE_ASSIGN.
4034 If "a" is not a GIMPLE register, the assignment to "a" will most
4035 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4036 of moving something into "a", which we compute using the function
4037 estimate_move_cost. */
4038 if (gimple_clobber_p (stmt
))
4039 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4041 lhs
= gimple_assign_lhs (stmt
);
4042 rhs
= gimple_assign_rhs1 (stmt
);
4046 /* Account for the cost of moving to / from memory. */
4047 if (gimple_store_p (stmt
))
4048 cost
+= estimate_move_cost (TREE_TYPE (lhs
), weights
->time_based
);
4049 if (gimple_assign_load_p (stmt
))
4050 cost
+= estimate_move_cost (TREE_TYPE (rhs
), weights
->time_based
);
4052 cost
+= estimate_operator_cost (gimple_assign_rhs_code (stmt
), weights
,
4053 gimple_assign_rhs1 (stmt
),
4054 get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
4055 == GIMPLE_BINARY_RHS
4056 ? gimple_assign_rhs2 (stmt
) : NULL
);
4060 cost
= 1 + estimate_operator_cost (gimple_cond_code (stmt
), weights
,
4061 gimple_op (stmt
, 0),
4062 gimple_op (stmt
, 1));
4067 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
4068 /* Take into account cost of the switch + guess 2 conditional jumps for
4071 TODO: once the switch expansion logic is sufficiently separated, we can
4072 do better job on estimating cost of the switch. */
4073 if (weights
->time_based
)
4074 cost
= floor_log2 (gimple_switch_num_labels (switch_stmt
)) * 2;
4076 cost
= gimple_switch_num_labels (switch_stmt
) * 2;
4084 if (gimple_call_internal_p (stmt
))
4086 else if ((decl
= gimple_call_fndecl (stmt
))
4087 && DECL_BUILT_IN (decl
))
4089 /* Do not special case builtins where we see the body.
4090 This just confuse inliner. */
4091 struct cgraph_node
*node
;
4092 if (!(node
= cgraph_node::get (decl
))
4093 || node
->definition
)
4095 /* For buitins that are likely expanded to nothing or
4096 inlined do not account operand costs. */
4097 else if (is_simple_builtin (decl
))
4099 else if (is_inexpensive_builtin (decl
))
4100 return weights
->target_builtin_call_cost
;
4101 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
4103 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4104 specialize the cheap expansion we do here.
4105 ??? This asks for a more general solution. */
4106 switch (DECL_FUNCTION_CODE (decl
))
4111 if (TREE_CODE (gimple_call_arg (stmt
, 1)) == REAL_CST
4112 && REAL_VALUES_EQUAL
4113 (TREE_REAL_CST (gimple_call_arg (stmt
, 1)), dconst2
))
4114 return estimate_operator_cost
4115 (MULT_EXPR
, weights
, gimple_call_arg (stmt
, 0),
4116 gimple_call_arg (stmt
, 0));
4125 cost
= decl
? weights
->call_cost
: weights
->indirect_call_cost
;
4126 if (gimple_call_lhs (stmt
))
4127 cost
+= estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt
)),
4128 weights
->time_based
);
4129 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4131 tree arg
= gimple_call_arg (stmt
, i
);
4132 cost
+= estimate_move_cost (TREE_TYPE (arg
),
4133 weights
->time_based
);
4139 return weights
->return_cost
;
4145 case GIMPLE_PREDICT
:
4151 int count
= asm_str_count (gimple_asm_string (as_a
<gasm
*> (stmt
)));
4152 /* 1000 means infinity. This avoids overflows later
4153 with very long asm statements. */
4160 /* This is either going to be an external function call with one
4161 argument, or two register copy statements plus a goto. */
4164 case GIMPLE_EH_DISPATCH
:
4165 /* ??? This is going to turn into a switch statement. Ideally
4166 we'd have a look at the eh region and estimate the number of
4171 return estimate_num_insns_seq (
4172 gimple_bind_body (as_a
<gbind
*> (stmt
)),
4175 case GIMPLE_EH_FILTER
:
4176 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt
), weights
);
4179 return estimate_num_insns_seq (gimple_catch_handler (
4180 as_a
<gcatch
*> (stmt
)),
4184 return (estimate_num_insns_seq (gimple_try_eval (stmt
), weights
)
4185 + estimate_num_insns_seq (gimple_try_cleanup (stmt
), weights
));
4187 /* OMP directives are generally very expensive. */
4189 case GIMPLE_OMP_RETURN
:
4190 case GIMPLE_OMP_SECTIONS_SWITCH
:
4191 case GIMPLE_OMP_ATOMIC_STORE
:
4192 case GIMPLE_OMP_CONTINUE
:
4193 /* ...except these, which are cheap. */
4196 case GIMPLE_OMP_ATOMIC_LOAD
:
4197 return weights
->omp_cost
;
4199 case GIMPLE_OMP_FOR
:
4200 return (weights
->omp_cost
4201 + estimate_num_insns_seq (gimple_omp_body (stmt
), weights
)
4202 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt
), weights
));
4204 case GIMPLE_OMP_PARALLEL
:
4205 case GIMPLE_OMP_TASK
:
4206 case GIMPLE_OMP_CRITICAL
:
4207 case GIMPLE_OMP_MASTER
:
4208 case GIMPLE_OMP_TASKGROUP
:
4209 case GIMPLE_OMP_ORDERED
:
4210 case GIMPLE_OMP_SECTION
:
4211 case GIMPLE_OMP_SECTIONS
:
4212 case GIMPLE_OMP_SINGLE
:
4213 case GIMPLE_OMP_TARGET
:
4214 case GIMPLE_OMP_TEAMS
:
4215 return (weights
->omp_cost
4216 + estimate_num_insns_seq (gimple_omp_body (stmt
), weights
));
4218 case GIMPLE_TRANSACTION
:
4219 return (weights
->tm_cost
4220 + estimate_num_insns_seq (gimple_transaction_body (
4221 as_a
<gtransaction
*> (stmt
)),
4231 /* Estimate number of instructions that will be created by expanding
4232 function FNDECL. WEIGHTS contains weights attributed to various
4236 estimate_num_insns_fn (tree fndecl
, eni_weights
*weights
)
4238 struct function
*my_function
= DECL_STRUCT_FUNCTION (fndecl
);
4239 gimple_stmt_iterator bsi
;
4243 gcc_assert (my_function
&& my_function
->cfg
);
4244 FOR_EACH_BB_FN (bb
, my_function
)
4246 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
4247 n
+= estimate_num_insns (gsi_stmt (bsi
), weights
);
4254 /* Initializes weights used by estimate_num_insns. */
4257 init_inline_once (void)
4259 eni_size_weights
.call_cost
= 1;
4260 eni_size_weights
.indirect_call_cost
= 3;
4261 eni_size_weights
.target_builtin_call_cost
= 1;
4262 eni_size_weights
.div_mod_cost
= 1;
4263 eni_size_weights
.omp_cost
= 40;
4264 eni_size_weights
.tm_cost
= 10;
4265 eni_size_weights
.time_based
= false;
4266 eni_size_weights
.return_cost
= 1;
4268 /* Estimating time for call is difficult, since we have no idea what the
4269 called function does. In the current uses of eni_time_weights,
4270 underestimating the cost does less harm than overestimating it, so
4271 we choose a rather small value here. */
4272 eni_time_weights
.call_cost
= 10;
4273 eni_time_weights
.indirect_call_cost
= 15;
4274 eni_time_weights
.target_builtin_call_cost
= 1;
4275 eni_time_weights
.div_mod_cost
= 10;
4276 eni_time_weights
.omp_cost
= 40;
4277 eni_time_weights
.tm_cost
= 40;
4278 eni_time_weights
.time_based
= true;
4279 eni_time_weights
.return_cost
= 2;
4282 /* Estimate the number of instructions in a gimple_seq. */
4285 count_insns_seq (gimple_seq seq
, eni_weights
*weights
)
4287 gimple_stmt_iterator gsi
;
4289 for (gsi
= gsi_start (seq
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4290 n
+= estimate_num_insns (gsi_stmt (gsi
), weights
);
4296 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4299 prepend_lexical_block (tree current_block
, tree new_block
)
4301 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (current_block
);
4302 BLOCK_SUBBLOCKS (current_block
) = new_block
;
4303 BLOCK_SUPERCONTEXT (new_block
) = current_block
;
4306 /* Add local variables from CALLEE to CALLER. */
4309 add_local_variables (struct function
*callee
, struct function
*caller
,
4315 FOR_EACH_LOCAL_DECL (callee
, ix
, var
)
4316 if (!can_be_nonlocal (var
, id
))
4318 tree new_var
= remap_decl (var
, id
);
4320 /* Remap debug-expressions. */
4321 if (TREE_CODE (new_var
) == VAR_DECL
4322 && DECL_HAS_DEBUG_EXPR_P (var
)
4325 tree tem
= DECL_DEBUG_EXPR (var
);
4326 bool old_regimplify
= id
->regimplify
;
4327 id
->remapping_type_depth
++;
4328 walk_tree (&tem
, copy_tree_body_r
, id
, NULL
);
4329 id
->remapping_type_depth
--;
4330 id
->regimplify
= old_regimplify
;
4331 SET_DECL_DEBUG_EXPR (new_var
, tem
);
4332 DECL_HAS_DEBUG_EXPR_P (new_var
) = 1;
4334 add_local_decl (caller
, new_var
);
4338 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4339 have brought in or introduced any debug stmts for SRCVAR. */
4342 reset_debug_binding (copy_body_data
*id
, tree srcvar
, gimple_seq
*bindings
)
4344 tree
*remappedvarp
= id
->decl_map
->get (srcvar
);
4349 if (TREE_CODE (*remappedvarp
) != VAR_DECL
)
4352 if (*remappedvarp
== id
->retvar
|| *remappedvarp
== id
->retbnd
)
4355 tree tvar
= target_for_debug_bind (*remappedvarp
);
4359 gdebug
*stmt
= gimple_build_debug_bind (tvar
, NULL_TREE
,
4361 gimple_seq_add_stmt (bindings
, stmt
);
4364 /* For each inlined variable for which we may have debug bind stmts,
4365 add before GSI a final debug stmt resetting it, marking the end of
4366 its life, so that var-tracking knows it doesn't have to compute
4367 further locations for it. */
4370 reset_debug_bindings (copy_body_data
*id
, gimple_stmt_iterator gsi
)
4374 gimple_seq bindings
= NULL
;
4376 if (!gimple_in_ssa_p (id
->src_cfun
))
4379 if (!opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
))
4382 for (var
= DECL_ARGUMENTS (id
->src_fn
);
4383 var
; var
= DECL_CHAIN (var
))
4384 reset_debug_binding (id
, var
, &bindings
);
4386 FOR_EACH_LOCAL_DECL (id
->src_cfun
, ix
, var
)
4387 reset_debug_binding (id
, var
, &bindings
);
4389 gsi_insert_seq_before_without_update (&gsi
, bindings
, GSI_SAME_STMT
);
4392 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4395 expand_call_inline (basic_block bb
, gimple stmt
, copy_body_data
*id
)
4399 hash_map
<tree
, tree
> *dst
;
4400 hash_map
<tree
, tree
> *st
= NULL
;
4403 tree return_bounds
= NULL
;
4404 location_t saved_location
;
4405 struct cgraph_edge
*cg_edge
;
4406 cgraph_inline_failed_t reason
;
4407 basic_block return_block
;
4409 gimple_stmt_iterator gsi
, stmt_gsi
;
4410 bool successfully_inlined
= FALSE
;
4411 bool purge_dead_abnormal_edges
;
4415 /* Set input_location here so we get the right instantiation context
4416 if we call instantiate_decl from inlinable_function_p. */
4417 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4418 saved_location
= input_location
;
4419 input_location
= gimple_location (stmt
);
4421 /* From here on, we're only interested in CALL_EXPRs. */
4422 call_stmt
= dyn_cast
<gcall
*> (stmt
);
4426 cg_edge
= id
->dst_node
->get_edge (stmt
);
4427 gcc_checking_assert (cg_edge
);
4428 /* First, see if we can figure out what function is being called.
4429 If we cannot, then there is no hope of inlining the function. */
4430 if (cg_edge
->indirect_unknown_callee
)
4432 fn
= cg_edge
->callee
->decl
;
4433 gcc_checking_assert (fn
);
4435 /* If FN is a declaration of a function in a nested scope that was
4436 globally declared inline, we don't set its DECL_INITIAL.
4437 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4438 C++ front-end uses it for cdtors to refer to their internal
4439 declarations, that are not real functions. Fortunately those
4440 don't have trees to be saved, so we can tell by checking their
4442 if (!DECL_INITIAL (fn
)
4443 && DECL_ABSTRACT_ORIGIN (fn
)
4444 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn
)))
4445 fn
= DECL_ABSTRACT_ORIGIN (fn
);
4447 /* Don't try to inline functions that are not well-suited to inlining. */
4448 if (cg_edge
->inline_failed
)
4450 reason
= cg_edge
->inline_failed
;
4451 /* If this call was originally indirect, we do not want to emit any
4452 inlining related warnings or sorry messages because there are no
4453 guarantees regarding those. */
4454 if (cg_edge
->indirect_inlining_edge
)
4457 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
))
4458 /* For extern inline functions that get redefined we always
4459 silently ignored always_inline flag. Better behaviour would
4460 be to be able to keep both bodies and use extern inline body
4461 for inlining, but we can't do that because frontends overwrite
4463 && !cg_edge
->callee
->local
.redefined_extern_inline
4464 /* During early inline pass, report only when optimization is
4466 && (symtab
->global_info_ready
4468 || cgraph_inline_failed_type (reason
) == CIF_FINAL_ERROR
)
4469 /* PR 20090218-1_0.c. Body can be provided by another module. */
4470 && (reason
!= CIF_BODY_NOT_AVAILABLE
|| !flag_generate_lto
))
4472 error ("inlining failed in call to always_inline %q+F: %s", fn
,
4473 cgraph_inline_failed_string (reason
));
4474 error ("called from here");
4476 else if (warn_inline
4477 && DECL_DECLARED_INLINE_P (fn
)
4478 && !DECL_NO_INLINE_WARNING_P (fn
)
4479 && !DECL_IN_SYSTEM_HEADER (fn
)
4480 && reason
!= CIF_UNSPECIFIED
4481 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn
))
4482 /* Do not warn about not inlined recursive calls. */
4483 && !cg_edge
->recursive_p ()
4484 /* Avoid warnings during early inline pass. */
4485 && symtab
->global_info_ready
)
4487 warning (OPT_Winline
, "inlining failed in call to %q+F: %s",
4488 fn
, _(cgraph_inline_failed_string (reason
)));
4489 warning (OPT_Winline
, "called from here");
4493 fn
= cg_edge
->callee
->decl
;
4494 cg_edge
->callee
->get_untransformed_body ();
4496 #ifdef ENABLE_CHECKING
4497 if (cg_edge
->callee
->decl
!= id
->dst_node
->decl
)
4498 cg_edge
->callee
->verify ();
4501 /* We will be inlining this callee. */
4502 id
->eh_lp_nr
= lookup_stmt_eh_lp (stmt
);
4503 id
->assign_stmts
.create (0);
4505 /* Update the callers EH personality. */
4506 if (DECL_FUNCTION_PERSONALITY (cg_edge
->callee
->decl
))
4507 DECL_FUNCTION_PERSONALITY (cg_edge
->caller
->decl
)
4508 = DECL_FUNCTION_PERSONALITY (cg_edge
->callee
->decl
);
4510 /* Split the block holding the GIMPLE_CALL. */
4511 e
= split_block (bb
, stmt
);
4513 return_block
= e
->dest
;
4516 /* split_block splits after the statement; work around this by
4517 moving the call into the second block manually. Not pretty,
4518 but seems easier than doing the CFG manipulation by hand
4519 when the GIMPLE_CALL is in the last statement of BB. */
4520 stmt_gsi
= gsi_last_bb (bb
);
4521 gsi_remove (&stmt_gsi
, false);
4523 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4524 been the source of abnormal edges. In this case, schedule
4525 the removal of dead abnormal edges. */
4526 gsi
= gsi_start_bb (return_block
);
4527 if (gsi_end_p (gsi
))
4529 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
4530 purge_dead_abnormal_edges
= true;
4534 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
4535 purge_dead_abnormal_edges
= false;
4538 stmt_gsi
= gsi_start_bb (return_block
);
4540 /* Build a block containing code to initialize the arguments, the
4541 actual inline expansion of the body, and a label for the return
4542 statements within the function to jump to. The type of the
4543 statement expression is the return type of the function call.
4544 ??? If the call does not have an associated block then we will
4545 remap all callee blocks to NULL, effectively dropping most of
4546 its debug information. This should only happen for calls to
4547 artificial decls inserted by the compiler itself. We need to
4548 either link the inlined blocks into the caller block tree or
4549 not refer to them in any way to not break GC for locations. */
4550 if (gimple_block (stmt
))
4552 id
->block
= make_node (BLOCK
);
4553 BLOCK_ABSTRACT_ORIGIN (id
->block
) = fn
;
4554 BLOCK_SOURCE_LOCATION (id
->block
) = LOCATION_LOCUS (input_location
);
4555 prepend_lexical_block (gimple_block (stmt
), id
->block
);
4558 /* Local declarations will be replaced by their equivalents in this
4561 id
->decl_map
= new hash_map
<tree
, tree
>;
4562 dst
= id
->debug_map
;
4563 id
->debug_map
= NULL
;
4565 /* Record the function we are about to inline. */
4567 id
->src_node
= cg_edge
->callee
;
4568 id
->src_cfun
= DECL_STRUCT_FUNCTION (fn
);
4569 id
->call_stmt
= stmt
;
4571 /* If the the src function contains an IFN_VA_ARG, then so will the dst
4572 function after inlining. */
4573 if ((id
->src_cfun
->curr_properties
& PROP_gimple_lva
) == 0)
4575 struct function
*dst_cfun
= DECL_STRUCT_FUNCTION (id
->dst_fn
);
4576 dst_cfun
->curr_properties
&= ~PROP_gimple_lva
;
4579 gcc_assert (!id
->src_cfun
->after_inlining
);
4582 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn
)))
4584 gimple_stmt_iterator si
= gsi_last_bb (bb
);
4585 gsi_insert_after (&si
, gimple_build_predict (PRED_COLD_FUNCTION
,
4589 initialize_inlined_parameters (id
, stmt
, fn
, bb
);
4591 if (DECL_INITIAL (fn
))
4593 if (gimple_block (stmt
))
4597 prepend_lexical_block (id
->block
,
4598 remap_blocks (DECL_INITIAL (fn
), id
));
4599 gcc_checking_assert (BLOCK_SUBBLOCKS (id
->block
)
4600 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id
->block
))
4602 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4603 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4604 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4605 under it. The parameters can be then evaluated in the debugger,
4606 but don't show in backtraces. */
4607 for (var
= &BLOCK_VARS (BLOCK_SUBBLOCKS (id
->block
)); *var
; )
4608 if (TREE_CODE (DECL_ORIGIN (*var
)) == PARM_DECL
)
4611 *var
= TREE_CHAIN (v
);
4612 TREE_CHAIN (v
) = BLOCK_VARS (id
->block
);
4613 BLOCK_VARS (id
->block
) = v
;
4616 var
= &TREE_CHAIN (*var
);
4619 remap_blocks_to_null (DECL_INITIAL (fn
), id
);
4622 /* Return statements in the function body will be replaced by jumps
4623 to the RET_LABEL. */
4624 gcc_assert (DECL_INITIAL (fn
));
4625 gcc_assert (TREE_CODE (DECL_INITIAL (fn
)) == BLOCK
);
4627 /* Find the LHS to which the result of this call is assigned. */
4629 if (gimple_call_lhs (stmt
))
4631 modify_dest
= gimple_call_lhs (stmt
);
4633 /* Remember where to copy returned bounds. */
4634 if (gimple_call_with_bounds_p (stmt
)
4635 && TREE_CODE (modify_dest
) == SSA_NAME
)
4637 gcall
*retbnd
= chkp_retbnd_call_by_val (modify_dest
);
4640 return_bounds
= gimple_call_lhs (retbnd
);
4641 /* If returned bounds are not used then just
4642 remove unused call. */
4645 gimple_stmt_iterator iter
= gsi_for_stmt (retbnd
);
4646 gsi_remove (&iter
, true);
4651 /* The function which we are inlining might not return a value,
4652 in which case we should issue a warning that the function
4653 does not return a value. In that case the optimizers will
4654 see that the variable to which the value is assigned was not
4655 initialized. We do not want to issue a warning about that
4656 uninitialized variable. */
4657 if (DECL_P (modify_dest
))
4658 TREE_NO_WARNING (modify_dest
) = 1;
4660 if (gimple_call_return_slot_opt_p (call_stmt
))
4662 return_slot
= modify_dest
;
4669 /* If we are inlining a call to the C++ operator new, we don't want
4670 to use type based alias analysis on the return value. Otherwise
4671 we may get confused if the compiler sees that the inlined new
4672 function returns a pointer which was just deleted. See bug
4674 if (DECL_IS_OPERATOR_NEW (fn
))
4680 /* Declare the return variable for the function. */
4681 use_retvar
= declare_return_variable (id
, return_slot
, modify_dest
,
4684 /* Add local vars in this inlined callee to caller. */
4685 add_local_variables (id
->src_cfun
, cfun
, id
);
4687 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4689 fprintf (dump_file
, "Inlining ");
4690 print_generic_expr (dump_file
, id
->src_fn
, 0);
4691 fprintf (dump_file
, " to ");
4692 print_generic_expr (dump_file
, id
->dst_fn
, 0);
4693 fprintf (dump_file
, " with frequency %i\n", cg_edge
->frequency
);
4696 /* This is it. Duplicate the callee body. Assume callee is
4697 pre-gimplified. Note that we must not alter the caller
4698 function in any way before this point, as this CALL_EXPR may be
4699 a self-referential call; if we're calling ourselves, we need to
4700 duplicate our body before altering anything. */
4701 copy_body (id
, cg_edge
->callee
->count
,
4702 GCOV_COMPUTE_SCALE (cg_edge
->frequency
, CGRAPH_FREQ_BASE
),
4703 bb
, return_block
, NULL
);
4705 reset_debug_bindings (id
, stmt_gsi
);
4707 /* Reset the escaped solution. */
4708 if (cfun
->gimple_df
)
4709 pt_solution_reset (&cfun
->gimple_df
->escaped
);
4714 delete id
->debug_map
;
4715 id
->debug_map
= dst
;
4717 delete id
->decl_map
;
4720 /* Unlink the calls virtual operands before replacing it. */
4721 unlink_stmt_vdef (stmt
);
4722 if (gimple_vdef (stmt
)
4723 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
4724 release_ssa_name (gimple_vdef (stmt
));
4726 /* If the inlined function returns a result that we care about,
4727 substitute the GIMPLE_CALL with an assignment of the return
4728 variable to the LHS of the call. That is, if STMT was
4729 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4730 if (use_retvar
&& gimple_call_lhs (stmt
))
4732 gimple old_stmt
= stmt
;
4733 stmt
= gimple_build_assign (gimple_call_lhs (stmt
), use_retvar
);
4734 gsi_replace (&stmt_gsi
, stmt
, false);
4735 maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
);
4737 /* Copy bounds if we copy structure with bounds. */
4738 if (chkp_function_instrumented_p (id
->dst_fn
)
4739 && !BOUNDED_P (use_retvar
)
4740 && chkp_type_has_pointer (TREE_TYPE (use_retvar
)))
4741 id
->assign_stmts
.safe_push (stmt
);
4745 /* Handle the case of inlining a function with no return
4746 statement, which causes the return value to become undefined. */
4747 if (gimple_call_lhs (stmt
)
4748 && TREE_CODE (gimple_call_lhs (stmt
)) == SSA_NAME
)
4750 tree name
= gimple_call_lhs (stmt
);
4751 tree var
= SSA_NAME_VAR (name
);
4752 tree def
= ssa_default_def (cfun
, var
);
4756 /* If the variable is used undefined, make this name
4757 undefined via a move. */
4758 stmt
= gimple_build_assign (gimple_call_lhs (stmt
), def
);
4759 gsi_replace (&stmt_gsi
, stmt
, true);
4763 /* Otherwise make this variable undefined. */
4764 gsi_remove (&stmt_gsi
, true);
4765 set_ssa_default_def (cfun
, var
, name
);
4766 SSA_NAME_DEF_STMT (name
) = gimple_build_nop ();
4770 gsi_remove (&stmt_gsi
, true);
4773 /* Put returned bounds into the correct place if required. */
4776 gimple old_stmt
= SSA_NAME_DEF_STMT (return_bounds
);
4777 gimple new_stmt
= gimple_build_assign (return_bounds
, id
->retbnd
);
4778 gimple_stmt_iterator bnd_gsi
= gsi_for_stmt (old_stmt
);
4779 unlink_stmt_vdef (old_stmt
);
4780 gsi_replace (&bnd_gsi
, new_stmt
, false);
4781 maybe_clean_or_replace_eh_stmt (old_stmt
, new_stmt
);
4782 cgraph_update_edges_for_call_stmt (old_stmt
,
4783 gimple_call_fndecl (old_stmt
),
4787 if (purge_dead_abnormal_edges
)
4789 gimple_purge_dead_eh_edges (return_block
);
4790 gimple_purge_dead_abnormal_call_edges (return_block
);
4793 /* If the value of the new expression is ignored, that's OK. We
4794 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4795 the equivalent inlined version either. */
4796 if (is_gimple_assign (stmt
))
4798 gcc_assert (gimple_assign_single_p (stmt
)
4799 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)));
4800 TREE_USED (gimple_assign_rhs1 (stmt
)) = 1;
4803 /* Copy bounds for all generated assigns that need it. */
4804 for (i
= 0; i
< id
->assign_stmts
.length (); i
++)
4805 chkp_copy_bounds_for_assign (id
->assign_stmts
[i
], cg_edge
);
4806 id
->assign_stmts
.release ();
4808 /* Output the inlining info for this abstract function, since it has been
4809 inlined. If we don't do this now, we can lose the information about the
4810 variables in the function when the blocks get blown away as soon as we
4811 remove the cgraph node. */
4812 if (gimple_block (stmt
))
4813 (*debug_hooks
->outlining_inline_function
) (cg_edge
->callee
->decl
);
4815 /* Update callgraph if needed. */
4816 cg_edge
->callee
->remove ();
4818 id
->block
= NULL_TREE
;
4819 successfully_inlined
= TRUE
;
4822 input_location
= saved_location
;
4823 return successfully_inlined
;
4826 /* Expand call statements reachable from STMT_P.
4827 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4828 in a MODIFY_EXPR. */
4831 gimple_expand_calls_inline (basic_block bb
, copy_body_data
*id
)
4833 gimple_stmt_iterator gsi
;
4834 bool inlined
= false;
4836 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
);)
4838 gimple stmt
= gsi_stmt (gsi
);
4841 if (is_gimple_call (stmt
)
4842 && !gimple_call_internal_p (stmt
))
4843 inlined
|= expand_call_inline (bb
, stmt
, id
);
4850 /* Walk all basic blocks created after FIRST and try to fold every statement
4851 in the STATEMENTS pointer set. */
4854 fold_marked_statements (int first
, hash_set
<gimple
> *statements
)
4856 for (; first
< n_basic_blocks_for_fn (cfun
); first
++)
4857 if (BASIC_BLOCK_FOR_FN (cfun
, first
))
4859 gimple_stmt_iterator gsi
;
4861 for (gsi
= gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, first
));
4864 if (statements
->contains (gsi_stmt (gsi
)))
4866 gimple old_stmt
= gsi_stmt (gsi
);
4867 tree old_decl
= is_gimple_call (old_stmt
) ? gimple_call_fndecl (old_stmt
) : 0;
4869 if (old_decl
&& DECL_BUILT_IN (old_decl
))
4871 /* Folding builtins can create multiple instructions,
4872 we need to look at all of them. */
4873 gimple_stmt_iterator i2
= gsi
;
4875 if (fold_stmt (&gsi
))
4878 /* If a builtin at the end of a bb folded into nothing,
4879 the following loop won't work. */
4880 if (gsi_end_p (gsi
))
4882 cgraph_update_edges_for_call_stmt (old_stmt
,
4887 i2
= gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, first
));
4892 new_stmt
= gsi_stmt (i2
);
4893 update_stmt (new_stmt
);
4894 cgraph_update_edges_for_call_stmt (old_stmt
, old_decl
,
4897 if (new_stmt
== gsi_stmt (gsi
))
4899 /* It is okay to check only for the very last
4900 of these statements. If it is a throwing
4901 statement nothing will change. If it isn't
4902 this can remove EH edges. If that weren't
4903 correct then because some intermediate stmts
4904 throw, but not the last one. That would mean
4905 we'd have to split the block, which we can't
4906 here and we'd loose anyway. And as builtins
4907 probably never throw, this all
4909 if (maybe_clean_or_replace_eh_stmt (old_stmt
,
4911 gimple_purge_dead_eh_edges (
4912 BASIC_BLOCK_FOR_FN (cfun
, first
));
4919 else if (fold_stmt (&gsi
))
4921 /* Re-read the statement from GSI as fold_stmt() may
4923 gimple new_stmt
= gsi_stmt (gsi
);
4924 update_stmt (new_stmt
);
4926 if (is_gimple_call (old_stmt
)
4927 || is_gimple_call (new_stmt
))
4928 cgraph_update_edges_for_call_stmt (old_stmt
, old_decl
,
4931 if (maybe_clean_or_replace_eh_stmt (old_stmt
, new_stmt
))
4932 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun
,
4939 /* Expand calls to inline functions in the body of FN. */
4942 optimize_inline_calls (tree fn
)
4946 int last
= n_basic_blocks_for_fn (cfun
);
4947 bool inlined_p
= false;
4950 memset (&id
, 0, sizeof (id
));
4952 id
.src_node
= id
.dst_node
= cgraph_node::get (fn
);
4953 gcc_assert (id
.dst_node
->definition
);
4955 /* Or any functions that aren't finished yet. */
4956 if (current_function_decl
)
4957 id
.dst_fn
= current_function_decl
;
4959 id
.copy_decl
= copy_decl_maybe_to_var
;
4960 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
4961 id
.transform_new_cfg
= false;
4962 id
.transform_return_to_modify
= true;
4963 id
.transform_parameter
= true;
4964 id
.transform_lang_insert_block
= NULL
;
4965 id
.statements_to_fold
= new hash_set
<gimple
>;
4967 push_gimplify_context ();
4969 /* We make no attempts to keep dominance info up-to-date. */
4970 free_dominance_info (CDI_DOMINATORS
);
4971 free_dominance_info (CDI_POST_DOMINATORS
);
4973 /* Register specific gimple functions. */
4974 gimple_register_cfg_hooks ();
4976 /* Reach the trees by walking over the CFG, and note the
4977 enclosing basic-blocks in the call edges. */
4978 /* We walk the blocks going forward, because inlined function bodies
4979 will split id->current_basic_block, and the new blocks will
4980 follow it; we'll trudge through them, processing their CALL_EXPRs
4982 FOR_EACH_BB_FN (bb
, cfun
)
4983 inlined_p
|= gimple_expand_calls_inline (bb
, &id
);
4985 pop_gimplify_context (NULL
);
4987 #ifdef ENABLE_CHECKING
4989 struct cgraph_edge
*e
;
4991 id
.dst_node
->verify ();
4993 /* Double check that we inlined everything we are supposed to inline. */
4994 for (e
= id
.dst_node
->callees
; e
; e
= e
->next_callee
)
4995 gcc_assert (e
->inline_failed
);
4999 /* Fold queued statements. */
5000 fold_marked_statements (last
, id
.statements_to_fold
);
5001 delete id
.statements_to_fold
;
5003 gcc_assert (!id
.debug_stmts
.exists ());
5005 /* If we didn't inline into the function there is nothing to do. */
5009 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5012 delete_unreachable_blocks_update_callgraph (&id
);
5013 #ifdef ENABLE_CHECKING
5014 id
.dst_node
->verify ();
5017 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5018 not possible yet - the IPA passes might make various functions to not
5019 throw and they don't care to proactively update local EH info. This is
5020 done later in fixup_cfg pass that also execute the verification. */
5021 return (TODO_update_ssa
5023 | (gimple_in_ssa_p (cfun
) ? TODO_remove_unused_locals
: 0)
5024 | (gimple_in_ssa_p (cfun
) ? TODO_update_address_taken
: 0)
5025 | (profile_status_for_fn (cfun
) != PROFILE_ABSENT
5026 ? TODO_rebuild_frequencies
: 0));
5029 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5032 copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
5034 enum tree_code code
= TREE_CODE (*tp
);
5035 enum tree_code_class cl
= TREE_CODE_CLASS (code
);
5037 /* We make copies of most nodes. */
5038 if (IS_EXPR_CODE_CLASS (cl
)
5039 || code
== TREE_LIST
5041 || code
== TYPE_DECL
5042 || code
== OMP_CLAUSE
)
5044 /* Because the chain gets clobbered when we make a copy, we save it
5046 tree chain
= NULL_TREE
, new_tree
;
5048 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
5049 chain
= TREE_CHAIN (*tp
);
5051 /* Copy the node. */
5052 new_tree
= copy_node (*tp
);
5056 /* Now, restore the chain, if appropriate. That will cause
5057 walk_tree to walk into the chain as well. */
5058 if (code
== PARM_DECL
5059 || code
== TREE_LIST
5060 || code
== OMP_CLAUSE
)
5061 TREE_CHAIN (*tp
) = chain
;
5063 /* For now, we don't update BLOCKs when we make copies. So, we
5064 have to nullify all BIND_EXPRs. */
5065 if (TREE_CODE (*tp
) == BIND_EXPR
)
5066 BIND_EXPR_BLOCK (*tp
) = NULL_TREE
;
5068 else if (code
== CONSTRUCTOR
)
5070 /* CONSTRUCTOR nodes need special handling because
5071 we need to duplicate the vector of elements. */
5074 new_tree
= copy_node (*tp
);
5075 CONSTRUCTOR_ELTS (new_tree
) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp
));
5078 else if (code
== STATEMENT_LIST
)
5079 /* We used to just abort on STATEMENT_LIST, but we can run into them
5080 with statement-expressions (c++/40975). */
5081 copy_statement_list (tp
);
5082 else if (TREE_CODE_CLASS (code
) == tcc_type
)
5084 else if (TREE_CODE_CLASS (code
) == tcc_declaration
)
5086 else if (TREE_CODE_CLASS (code
) == tcc_constant
)
5091 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5092 information indicating to what new SAVE_EXPR this one should be mapped,
5093 use that one. Otherwise, create a new node and enter it in ST. FN is
5094 the function into which the copy will be placed. */
5097 remap_save_expr (tree
*tp
, hash_map
<tree
, tree
> *st
, int *walk_subtrees
)
5102 /* See if we already encountered this SAVE_EXPR. */
5105 /* If we didn't already remap this SAVE_EXPR, do so now. */
5108 t
= copy_node (*tp
);
5110 /* Remember this SAVE_EXPR. */
5112 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5117 /* We've already walked into this SAVE_EXPR; don't do it again. */
5122 /* Replace this SAVE_EXPR with the copy. */
5126 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5127 label, copies the declaration and enters it in the splay_tree in DATA (which
5128 is really a 'copy_body_data *'. */
5131 mark_local_labels_stmt (gimple_stmt_iterator
*gsip
,
5132 bool *handled_ops_p ATTRIBUTE_UNUSED
,
5133 struct walk_stmt_info
*wi
)
5135 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
5136 glabel
*stmt
= dyn_cast
<glabel
*> (gsi_stmt (*gsip
));
5140 tree decl
= gimple_label_label (stmt
);
5142 /* Copy the decl and remember the copy. */
5143 insert_decl_map (id
, decl
, id
->copy_decl (decl
, id
));
5150 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5151 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5152 remaps all local declarations to appropriate replacements in gimple
5156 replace_locals_op (tree
*tp
, int *walk_subtrees
, void *data
)
5158 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5159 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
5160 hash_map
<tree
, tree
> *st
= id
->decl_map
;
5164 /* Only a local declaration (variable or label). */
5165 if ((TREE_CODE (expr
) == VAR_DECL
5166 && !TREE_STATIC (expr
))
5167 || TREE_CODE (expr
) == LABEL_DECL
)
5169 /* Lookup the declaration. */
5172 /* If it's there, remap it. */
5177 else if (TREE_CODE (expr
) == STATEMENT_LIST
5178 || TREE_CODE (expr
) == BIND_EXPR
5179 || TREE_CODE (expr
) == SAVE_EXPR
)
5181 else if (TREE_CODE (expr
) == TARGET_EXPR
)
5183 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5184 It's OK for this to happen if it was part of a subtree that
5185 isn't immediately expanded, such as operand 2 of another
5187 if (!TREE_OPERAND (expr
, 1))
5189 TREE_OPERAND (expr
, 1) = TREE_OPERAND (expr
, 3);
5190 TREE_OPERAND (expr
, 3) = NULL_TREE
;
5194 /* Keep iterating. */
5199 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5200 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5201 remaps all local declarations to appropriate replacements in gimple
5205 replace_locals_stmt (gimple_stmt_iterator
*gsip
,
5206 bool *handled_ops_p ATTRIBUTE_UNUSED
,
5207 struct walk_stmt_info
*wi
)
5209 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
5210 gimple gs
= gsi_stmt (*gsip
);
5212 if (gbind
*stmt
= dyn_cast
<gbind
*> (gs
))
5214 tree block
= gimple_bind_block (stmt
);
5218 remap_block (&block
, id
);
5219 gimple_bind_set_block (stmt
, block
);
5222 /* This will remap a lot of the same decls again, but this should be
5224 if (gimple_bind_vars (stmt
))
5225 gimple_bind_set_vars (stmt
, remap_decls (gimple_bind_vars (stmt
),
5229 /* Keep iterating. */
5234 /* Copies everything in SEQ and replaces variables and labels local to
5235 current_function_decl. */
5238 copy_gimple_seq_and_replace_locals (gimple_seq seq
)
5241 struct walk_stmt_info wi
;
5244 /* There's nothing to do for NULL_TREE. */
5249 memset (&id
, 0, sizeof (id
));
5250 id
.src_fn
= current_function_decl
;
5251 id
.dst_fn
= current_function_decl
;
5252 id
.decl_map
= new hash_map
<tree
, tree
>;
5253 id
.debug_map
= NULL
;
5255 id
.copy_decl
= copy_decl_no_change
;
5256 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
5257 id
.transform_new_cfg
= false;
5258 id
.transform_return_to_modify
= false;
5259 id
.transform_parameter
= false;
5260 id
.transform_lang_insert_block
= NULL
;
5262 /* Walk the tree once to find local labels. */
5263 memset (&wi
, 0, sizeof (wi
));
5264 hash_set
<tree
> visited
;
5267 walk_gimple_seq (seq
, mark_local_labels_stmt
, NULL
, &wi
);
5269 copy
= gimple_seq_copy (seq
);
5271 /* Walk the copy, remapping decls. */
5272 memset (&wi
, 0, sizeof (wi
));
5274 walk_gimple_seq (copy
, replace_locals_stmt
, replace_locals_op
, &wi
);
5279 delete id
.debug_map
;
5280 if (id
.dependence_map
)
5282 delete id
.dependence_map
;
5283 id
.dependence_map
= NULL
;
5290 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5293 debug_find_tree_1 (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
, void *data
)
5302 debug_find_tree (tree top
, tree search
)
5304 return walk_tree_without_duplicates (&top
, debug_find_tree_1
, search
) != 0;
5308 /* Declare the variables created by the inliner. Add all the variables in
5309 VARS to BIND_EXPR. */
5312 declare_inline_vars (tree block
, tree vars
)
5315 for (t
= vars
; t
; t
= DECL_CHAIN (t
))
5317 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
5318 gcc_assert (!TREE_STATIC (t
) && !TREE_ASM_WRITTEN (t
));
5319 add_local_decl (cfun
, t
);
5323 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), vars
);
5326 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5327 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5328 VAR_DECL translation. */
5331 copy_decl_for_dup_finish (copy_body_data
*id
, tree decl
, tree copy
)
5333 /* Don't generate debug information for the copy if we wouldn't have
5334 generated it for the copy either. */
5335 DECL_ARTIFICIAL (copy
) = DECL_ARTIFICIAL (decl
);
5336 DECL_IGNORED_P (copy
) = DECL_IGNORED_P (decl
);
5338 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5339 declaration inspired this copy. */
5340 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (decl
);
5342 /* The new variable/label has no RTL, yet. */
5343 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy
), TS_DECL_WRTL
)
5344 && !TREE_STATIC (copy
) && !DECL_EXTERNAL (copy
))
5345 SET_DECL_RTL (copy
, 0);
5347 /* These args would always appear unused, if not for this. */
5348 TREE_USED (copy
) = 1;
5350 /* Set the context for the new declaration. */
5351 if (!DECL_CONTEXT (decl
))
5352 /* Globals stay global. */
5354 else if (DECL_CONTEXT (decl
) != id
->src_fn
)
5355 /* Things that weren't in the scope of the function we're inlining
5356 from aren't in the scope we're inlining to, either. */
5358 else if (TREE_STATIC (decl
))
5359 /* Function-scoped static variables should stay in the original
5363 /* Ordinary automatic local variables are now in the scope of the
5365 DECL_CONTEXT (copy
) = id
->dst_fn
;
5371 copy_decl_to_var (tree decl
, copy_body_data
*id
)
5375 gcc_assert (TREE_CODE (decl
) == PARM_DECL
5376 || TREE_CODE (decl
) == RESULT_DECL
);
5378 type
= TREE_TYPE (decl
);
5380 copy
= build_decl (DECL_SOURCE_LOCATION (id
->dst_fn
),
5381 VAR_DECL
, DECL_NAME (decl
), type
);
5382 if (DECL_PT_UID_SET_P (decl
))
5383 SET_DECL_PT_UID (copy
, DECL_PT_UID (decl
));
5384 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
5385 TREE_READONLY (copy
) = TREE_READONLY (decl
);
5386 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
5387 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (decl
);
5389 return copy_decl_for_dup_finish (id
, decl
, copy
);
5392 /* Like copy_decl_to_var, but create a return slot object instead of a
5393 pointer variable for return by invisible reference. */
5396 copy_result_decl_to_var (tree decl
, copy_body_data
*id
)
5400 gcc_assert (TREE_CODE (decl
) == PARM_DECL
5401 || TREE_CODE (decl
) == RESULT_DECL
);
5403 type
= TREE_TYPE (decl
);
5404 if (DECL_BY_REFERENCE (decl
))
5405 type
= TREE_TYPE (type
);
5407 copy
= build_decl (DECL_SOURCE_LOCATION (id
->dst_fn
),
5408 VAR_DECL
, DECL_NAME (decl
), type
);
5409 if (DECL_PT_UID_SET_P (decl
))
5410 SET_DECL_PT_UID (copy
, DECL_PT_UID (decl
));
5411 TREE_READONLY (copy
) = TREE_READONLY (decl
);
5412 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
5413 if (!DECL_BY_REFERENCE (decl
))
5415 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
5416 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (decl
);
5419 return copy_decl_for_dup_finish (id
, decl
, copy
);
5423 copy_decl_no_change (tree decl
, copy_body_data
*id
)
5427 copy
= copy_node (decl
);
5429 /* The COPY is not abstract; it will be generated in DST_FN. */
5430 DECL_ABSTRACT_P (copy
) = false;
5431 lang_hooks
.dup_lang_specific_decl (copy
);
5433 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5434 been taken; it's for internal bookkeeping in expand_goto_internal. */
5435 if (TREE_CODE (copy
) == LABEL_DECL
)
5437 TREE_ADDRESSABLE (copy
) = 0;
5438 LABEL_DECL_UID (copy
) = -1;
5441 return copy_decl_for_dup_finish (id
, decl
, copy
);
5445 copy_decl_maybe_to_var (tree decl
, copy_body_data
*id
)
5447 if (TREE_CODE (decl
) == PARM_DECL
|| TREE_CODE (decl
) == RESULT_DECL
)
5448 return copy_decl_to_var (decl
, id
);
5450 return copy_decl_no_change (decl
, id
);
5453 /* Return a copy of the function's argument tree. */
5455 copy_arguments_for_versioning (tree orig_parm
, copy_body_data
* id
,
5456 bitmap args_to_skip
, tree
*vars
)
5459 tree new_parm
= NULL
;
5464 for (arg
= orig_parm
; arg
; arg
= DECL_CHAIN (arg
), i
++)
5465 if (!args_to_skip
|| !bitmap_bit_p (args_to_skip
, i
))
5467 tree new_tree
= remap_decl (arg
, id
);
5468 if (TREE_CODE (new_tree
) != PARM_DECL
)
5469 new_tree
= id
->copy_decl (arg
, id
);
5470 lang_hooks
.dup_lang_specific_decl (new_tree
);
5472 parg
= &DECL_CHAIN (new_tree
);
5474 else if (!id
->decl_map
->get (arg
))
5476 /* Make an equivalent VAR_DECL. If the argument was used
5477 as temporary variable later in function, the uses will be
5478 replaced by local variable. */
5479 tree var
= copy_decl_to_var (arg
, id
);
5480 insert_decl_map (id
, arg
, var
);
5481 /* Declare this new variable. */
5482 DECL_CHAIN (var
) = *vars
;
5488 /* Return a copy of the function's static chain. */
5490 copy_static_chain (tree static_chain
, copy_body_data
* id
)
5492 tree
*chain_copy
, *pvar
;
5494 chain_copy
= &static_chain
;
5495 for (pvar
= chain_copy
; *pvar
; pvar
= &DECL_CHAIN (*pvar
))
5497 tree new_tree
= remap_decl (*pvar
, id
);
5498 lang_hooks
.dup_lang_specific_decl (new_tree
);
5499 DECL_CHAIN (new_tree
) = DECL_CHAIN (*pvar
);
5502 return static_chain
;
5505 /* Return true if the function is allowed to be versioned.
5506 This is a guard for the versioning functionality. */
5509 tree_versionable_function_p (tree fndecl
)
5511 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl
))
5512 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl
), fndecl
) == NULL
);
5515 /* Delete all unreachable basic blocks and update callgraph.
5516 Doing so is somewhat nontrivial because we need to update all clones and
5517 remove inline function that become unreachable. */
5520 delete_unreachable_blocks_update_callgraph (copy_body_data
*id
)
5522 bool changed
= false;
5523 basic_block b
, next_bb
;
5525 find_unreachable_blocks ();
5527 /* Delete all unreachable basic blocks. */
5529 for (b
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
; b
5530 != EXIT_BLOCK_PTR_FOR_FN (cfun
); b
= next_bb
)
5532 next_bb
= b
->next_bb
;
5534 if (!(b
->flags
& BB_REACHABLE
))
5536 gimple_stmt_iterator bsi
;
5538 for (bsi
= gsi_start_bb (b
); !gsi_end_p (bsi
); gsi_next (&bsi
))
5540 struct cgraph_edge
*e
;
5541 struct cgraph_node
*node
;
5543 id
->dst_node
->remove_stmt_references (gsi_stmt (bsi
));
5545 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_CALL
5546 &&(e
= id
->dst_node
->get_edge (gsi_stmt (bsi
))) != NULL
)
5548 if (!e
->inline_failed
)
5549 e
->callee
->remove_symbol_and_inline_clones (id
->dst_node
);
5553 if (id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
5554 && id
->dst_node
->clones
)
5555 for (node
= id
->dst_node
->clones
; node
!= id
->dst_node
;)
5557 node
->remove_stmt_references (gsi_stmt (bsi
));
5558 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_CALL
5559 && (e
= node
->get_edge (gsi_stmt (bsi
))) != NULL
)
5561 if (!e
->inline_failed
)
5562 e
->callee
->remove_symbol_and_inline_clones (id
->dst_node
);
5568 node
= node
->clones
;
5569 else if (node
->next_sibling_clone
)
5570 node
= node
->next_sibling_clone
;
5573 while (node
!= id
->dst_node
&& !node
->next_sibling_clone
)
5574 node
= node
->clone_of
;
5575 if (node
!= id
->dst_node
)
5576 node
= node
->next_sibling_clone
;
5580 delete_basic_block (b
);
5588 /* Update clone info after duplication. */
5591 update_clone_info (copy_body_data
* id
)
5593 struct cgraph_node
*node
;
5594 if (!id
->dst_node
->clones
)
5596 for (node
= id
->dst_node
->clones
; node
!= id
->dst_node
;)
5598 /* First update replace maps to match the new body. */
5599 if (node
->clone
.tree_map
)
5602 for (i
= 0; i
< vec_safe_length (node
->clone
.tree_map
); i
++)
5604 struct ipa_replace_map
*replace_info
;
5605 replace_info
= (*node
->clone
.tree_map
)[i
];
5606 walk_tree (&replace_info
->old_tree
, copy_tree_body_r
, id
, NULL
);
5607 walk_tree (&replace_info
->new_tree
, copy_tree_body_r
, id
, NULL
);
5611 node
= node
->clones
;
5612 else if (node
->next_sibling_clone
)
5613 node
= node
->next_sibling_clone
;
5616 while (node
!= id
->dst_node
&& !node
->next_sibling_clone
)
5617 node
= node
->clone_of
;
5618 if (node
!= id
->dst_node
)
5619 node
= node
->next_sibling_clone
;
5624 /* Create a copy of a function's tree.
5625 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5626 of the original function and the new copied function
5627 respectively. In case we want to replace a DECL
5628 tree with another tree while duplicating the function's
5629 body, TREE_MAP represents the mapping between these
5630 trees. If UPDATE_CLONES is set, the call_stmt fields
5631 of edges of clones of the function will be updated.
5633 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5635 If SKIP_RETURN is true, the new version will return void.
5636 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5637 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5640 tree_function_versioning (tree old_decl
, tree new_decl
,
5641 vec
<ipa_replace_map
*, va_gc
> *tree_map
,
5642 bool update_clones
, bitmap args_to_skip
,
5643 bool skip_return
, bitmap blocks_to_copy
,
5644 basic_block new_entry
)
5646 struct cgraph_node
*old_version_node
;
5647 struct cgraph_node
*new_version_node
;
5651 struct ipa_replace_map
*replace_info
;
5652 basic_block old_entry_block
, bb
;
5653 auto_vec
<gimple
, 10> init_stmts
;
5654 tree vars
= NULL_TREE
;
5656 gcc_assert (TREE_CODE (old_decl
) == FUNCTION_DECL
5657 && TREE_CODE (new_decl
) == FUNCTION_DECL
);
5658 DECL_POSSIBLY_INLINED (old_decl
) = 1;
5660 old_version_node
= cgraph_node::get (old_decl
);
5661 gcc_checking_assert (old_version_node
);
5662 new_version_node
= cgraph_node::get (new_decl
);
5663 gcc_checking_assert (new_version_node
);
5665 /* Copy over debug args. */
5666 if (DECL_HAS_DEBUG_ARGS_P (old_decl
))
5668 vec
<tree
, va_gc
> **new_debug_args
, **old_debug_args
;
5669 gcc_checking_assert (decl_debug_args_lookup (new_decl
) == NULL
);
5670 DECL_HAS_DEBUG_ARGS_P (new_decl
) = 0;
5671 old_debug_args
= decl_debug_args_lookup (old_decl
);
5674 new_debug_args
= decl_debug_args_insert (new_decl
);
5675 *new_debug_args
= vec_safe_copy (*old_debug_args
);
5679 /* Output the inlining info for this abstract function, since it has been
5680 inlined. If we don't do this now, we can lose the information about the
5681 variables in the function when the blocks get blown away as soon as we
5682 remove the cgraph node. */
5683 (*debug_hooks
->outlining_inline_function
) (old_decl
);
5685 DECL_ARTIFICIAL (new_decl
) = 1;
5686 DECL_ABSTRACT_ORIGIN (new_decl
) = DECL_ORIGIN (old_decl
);
5687 if (DECL_ORIGIN (old_decl
) == old_decl
)
5688 old_version_node
->used_as_abstract_origin
= true;
5689 DECL_FUNCTION_PERSONALITY (new_decl
) = DECL_FUNCTION_PERSONALITY (old_decl
);
5691 /* Prepare the data structures for the tree copy. */
5692 memset (&id
, 0, sizeof (id
));
5694 /* Generate a new name for the new version. */
5695 id
.statements_to_fold
= new hash_set
<gimple
>;
5697 id
.decl_map
= new hash_map
<tree
, tree
>;
5698 id
.debug_map
= NULL
;
5699 id
.src_fn
= old_decl
;
5700 id
.dst_fn
= new_decl
;
5701 id
.src_node
= old_version_node
;
5702 id
.dst_node
= new_version_node
;
5703 id
.src_cfun
= DECL_STRUCT_FUNCTION (old_decl
);
5704 id
.blocks_to_copy
= blocks_to_copy
;
5706 id
.copy_decl
= copy_decl_no_change
;
5707 id
.transform_call_graph_edges
5708 = update_clones
? CB_CGE_MOVE_CLONES
: CB_CGE_MOVE
;
5709 id
.transform_new_cfg
= true;
5710 id
.transform_return_to_modify
= false;
5711 id
.transform_parameter
= false;
5712 id
.transform_lang_insert_block
= NULL
;
5714 old_entry_block
= ENTRY_BLOCK_PTR_FOR_FN
5715 (DECL_STRUCT_FUNCTION (old_decl
));
5716 DECL_RESULT (new_decl
) = DECL_RESULT (old_decl
);
5717 DECL_ARGUMENTS (new_decl
) = DECL_ARGUMENTS (old_decl
);
5718 initialize_cfun (new_decl
, old_decl
,
5719 old_entry_block
->count
);
5720 if (DECL_STRUCT_FUNCTION (new_decl
)->gimple_df
)
5721 DECL_STRUCT_FUNCTION (new_decl
)->gimple_df
->ipa_pta
5722 = id
.src_cfun
->gimple_df
->ipa_pta
;
5724 /* Copy the function's static chain. */
5725 p
= DECL_STRUCT_FUNCTION (old_decl
)->static_chain_decl
;
5727 DECL_STRUCT_FUNCTION (new_decl
)->static_chain_decl
=
5728 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl
)->static_chain_decl
,
5731 /* If there's a tree_map, prepare for substitution. */
5733 for (i
= 0; i
< tree_map
->length (); i
++)
5736 replace_info
= (*tree_map
)[i
];
5737 if (replace_info
->replace_p
)
5739 if (!replace_info
->old_tree
)
5741 int i
= replace_info
->parm_num
;
5745 for (parm
= DECL_ARGUMENTS (old_decl
); i
; parm
= DECL_CHAIN (parm
))
5747 replace_info
->old_tree
= parm
;
5748 req_type
= TREE_TYPE (parm
);
5749 if (!useless_type_conversion_p (req_type
, TREE_TYPE (replace_info
->new_tree
)))
5751 if (fold_convertible_p (req_type
, replace_info
->new_tree
))
5752 replace_info
->new_tree
= fold_build1 (NOP_EXPR
, req_type
, replace_info
->new_tree
);
5753 else if (TYPE_SIZE (req_type
) == TYPE_SIZE (TREE_TYPE (replace_info
->new_tree
)))
5754 replace_info
->new_tree
= fold_build1 (VIEW_CONVERT_EXPR
, req_type
, replace_info
->new_tree
);
5759 fprintf (dump_file
, " const ");
5760 print_generic_expr (dump_file
, replace_info
->new_tree
, 0);
5761 fprintf (dump_file
, " can't be converted to param ");
5762 print_generic_expr (dump_file
, parm
, 0);
5763 fprintf (dump_file
, "\n");
5765 replace_info
->old_tree
= NULL
;
5770 gcc_assert (TREE_CODE (replace_info
->old_tree
) == PARM_DECL
);
5771 if (replace_info
->old_tree
)
5773 init
= setup_one_parameter (&id
, replace_info
->old_tree
,
5774 replace_info
->new_tree
, id
.src_fn
,
5778 init_stmts
.safe_push (init
);
5782 /* Copy the function's arguments. */
5783 if (DECL_ARGUMENTS (old_decl
) != NULL_TREE
)
5784 DECL_ARGUMENTS (new_decl
) =
5785 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl
), &id
,
5786 args_to_skip
, &vars
);
5788 DECL_INITIAL (new_decl
) = remap_blocks (DECL_INITIAL (id
.src_fn
), &id
);
5789 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl
)) = new_decl
;
5791 declare_inline_vars (DECL_INITIAL (new_decl
), vars
);
5793 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl
)->local_decls
))
5794 /* Add local vars. */
5795 add_local_variables (DECL_STRUCT_FUNCTION (old_decl
), cfun
, &id
);
5797 if (DECL_RESULT (old_decl
) == NULL_TREE
)
5799 else if (skip_return
&& !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl
))))
5801 DECL_RESULT (new_decl
)
5802 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl
)),
5803 RESULT_DECL
, NULL_TREE
, void_type_node
);
5804 DECL_CONTEXT (DECL_RESULT (new_decl
)) = new_decl
;
5805 cfun
->returns_struct
= 0;
5806 cfun
->returns_pcc_struct
= 0;
5811 DECL_RESULT (new_decl
) = remap_decl (DECL_RESULT (old_decl
), &id
);
5812 lang_hooks
.dup_lang_specific_decl (DECL_RESULT (new_decl
));
5813 if (gimple_in_ssa_p (id
.src_cfun
)
5814 && DECL_BY_REFERENCE (DECL_RESULT (old_decl
))
5815 && (old_name
= ssa_default_def (id
.src_cfun
, DECL_RESULT (old_decl
))))
5817 tree new_name
= make_ssa_name (DECL_RESULT (new_decl
));
5818 insert_decl_map (&id
, old_name
, new_name
);
5819 SSA_NAME_DEF_STMT (new_name
) = gimple_build_nop ();
5820 set_ssa_default_def (cfun
, DECL_RESULT (new_decl
), new_name
);
5824 /* Set up the destination functions loop tree. */
5825 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl
)) != NULL
)
5827 cfun
->curr_properties
&= ~PROP_loops
;
5828 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
5829 cfun
->curr_properties
|= PROP_loops
;
5832 /* Copy the Function's body. */
5833 copy_body (&id
, old_entry_block
->count
, REG_BR_PROB_BASE
,
5834 ENTRY_BLOCK_PTR_FOR_FN (cfun
), EXIT_BLOCK_PTR_FOR_FN (cfun
),
5837 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5838 number_blocks (new_decl
);
5840 /* We want to create the BB unconditionally, so that the addition of
5841 debug stmts doesn't affect BB count, which may in the end cause
5842 codegen differences. */
5843 bb
= split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
5844 while (init_stmts
.length ())
5845 insert_init_stmt (&id
, bb
, init_stmts
.pop ());
5846 update_clone_info (&id
);
5848 /* Remap the nonlocal_goto_save_area, if any. */
5849 if (cfun
->nonlocal_goto_save_area
)
5851 struct walk_stmt_info wi
;
5853 memset (&wi
, 0, sizeof (wi
));
5855 walk_tree (&cfun
->nonlocal_goto_save_area
, remap_gimple_op_r
, &wi
, NULL
);
5861 delete id
.debug_map
;
5862 free_dominance_info (CDI_DOMINATORS
);
5863 free_dominance_info (CDI_POST_DOMINATORS
);
5865 fold_marked_statements (0, id
.statements_to_fold
);
5866 delete id
.statements_to_fold
;
5867 fold_cond_expr_cond ();
5868 delete_unreachable_blocks_update_callgraph (&id
);
5869 if (id
.dst_node
->definition
)
5870 cgraph_edge::rebuild_references ();
5871 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP
))
5873 calculate_dominance_info (CDI_DOMINATORS
);
5874 fix_loop_structure (NULL
);
5876 update_ssa (TODO_update_ssa
);
5878 /* After partial cloning we need to rescale frequencies, so they are
5879 within proper range in the cloned function. */
5882 struct cgraph_edge
*e
;
5883 rebuild_frequencies ();
5885 new_version_node
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
5886 for (e
= new_version_node
->callees
; e
; e
= e
->next_callee
)
5888 basic_block bb
= gimple_bb (e
->call_stmt
);
5889 e
->frequency
= compute_call_stmt_bb_frequency (current_function_decl
,
5891 e
->count
= bb
->count
;
5893 for (e
= new_version_node
->indirect_calls
; e
; e
= e
->next_callee
)
5895 basic_block bb
= gimple_bb (e
->call_stmt
);
5896 e
->frequency
= compute_call_stmt_bb_frequency (current_function_decl
,
5898 e
->count
= bb
->count
;
5902 free_dominance_info (CDI_DOMINATORS
);
5903 free_dominance_info (CDI_POST_DOMINATORS
);
5905 gcc_assert (!id
.debug_stmts
.exists ());
5910 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5911 the callee and return the inlined body on success. */
5914 maybe_inline_call_in_expr (tree exp
)
5916 tree fn
= get_callee_fndecl (exp
);
5918 /* We can only try to inline "const" functions. */
5919 if (fn
&& TREE_READONLY (fn
) && DECL_SAVED_TREE (fn
))
5921 call_expr_arg_iterator iter
;
5924 hash_map
<tree
, tree
> decl_map
;
5926 /* Remap the parameters. */
5927 for (param
= DECL_ARGUMENTS (fn
), arg
= first_call_expr_arg (exp
, &iter
);
5929 param
= DECL_CHAIN (param
), arg
= next_call_expr_arg (&iter
))
5930 decl_map
.put (param
, arg
);
5932 memset (&id
, 0, sizeof (id
));
5934 id
.dst_fn
= current_function_decl
;
5935 id
.src_cfun
= DECL_STRUCT_FUNCTION (fn
);
5936 id
.decl_map
= &decl_map
;
5938 id
.copy_decl
= copy_decl_no_change
;
5939 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
5940 id
.transform_new_cfg
= false;
5941 id
.transform_return_to_modify
= true;
5942 id
.transform_parameter
= true;
5943 id
.transform_lang_insert_block
= NULL
;
5945 /* Make sure not to unshare trees behind the front-end's back
5946 since front-end specific mechanisms may rely on sharing. */
5947 id
.regimplify
= false;
5948 id
.do_not_unshare
= true;
5950 /* We're not inside any EH region. */
5953 t
= copy_tree_body (&id
);
5955 /* We can only return something suitable for use in a GENERIC
5957 if (TREE_CODE (t
) == MODIFY_EXPR
)
5958 return TREE_OPERAND (t
, 1);
5964 /* Duplicate a type, fields and all. */
5967 build_duplicate_type (tree type
)
5969 struct copy_body_data id
;
5971 memset (&id
, 0, sizeof (id
));
5972 id
.src_fn
= current_function_decl
;
5973 id
.dst_fn
= current_function_decl
;
5975 id
.decl_map
= new hash_map
<tree
, tree
>;
5976 id
.debug_map
= NULL
;
5977 id
.copy_decl
= copy_decl_no_change
;
5979 type
= remap_type_1 (type
, &id
);
5983 delete id
.debug_map
;
5985 TYPE_CANONICAL (type
) = type
;
5990 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
5991 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
5995 copy_fn (tree fn
, tree
& parms
, tree
& result
)
5999 hash_map
<tree
, tree
> decl_map
;
6004 memset (&id
, 0, sizeof (id
));
6006 id
.dst_fn
= current_function_decl
;
6007 id
.src_cfun
= DECL_STRUCT_FUNCTION (fn
);
6008 id
.decl_map
= &decl_map
;
6010 id
.copy_decl
= copy_decl_no_change
;
6011 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
6012 id
.transform_new_cfg
= false;
6013 id
.transform_return_to_modify
= false;
6014 id
.transform_parameter
= true;
6015 id
.transform_lang_insert_block
= NULL
;
6017 /* Make sure not to unshare trees behind the front-end's back
6018 since front-end specific mechanisms may rely on sharing. */
6019 id
.regimplify
= false;
6020 id
.do_not_unshare
= true;
6022 /* We're not inside any EH region. */
6025 /* Remap the parameters and result and return them to the caller. */
6026 for (param
= DECL_ARGUMENTS (fn
);
6028 param
= DECL_CHAIN (param
))
6030 *p
= remap_decl (param
, &id
);
6031 p
= &DECL_CHAIN (*p
);
6034 if (DECL_RESULT (fn
))
6035 result
= remap_decl (DECL_RESULT (fn
), &id
);
6039 return copy_tree_body (&id
);