2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "tree-pass.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
42 #include "tree-iterator.h"
44 #include "gimple-fold.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
51 #include "tree-into-ssa.h"
56 #include "value-prof.h"
59 #include "tree-chkp.h"
60 #include "stringpool.h"
63 /* I'm not real happy about this, but we need to handle gimple and
66 /* Inlining, Cloning, Versioning, Parallelization
68 Inlining: a function body is duplicated, but the PARM_DECLs are
69 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
70 MODIFY_EXPRs that store to a dedicated returned-value variable.
71 The duplicated eh_region info of the copy will later be appended
72 to the info for the caller; the eh_region info in copied throwing
73 statements and RESX statements are adjusted accordingly.
75 Cloning: (only in C++) We have one body for a con/de/structor, and
76 multiple function decls, each with a unique parameter list.
77 Duplicate the body, using the given splay tree; some parameters
78 will become constants (like 0 or 1).
80 Versioning: a function body is duplicated and the result is a new
81 function rather than into blocks of an existing function as with
82 inlining. Some parameters will become constants.
84 Parallelization: a region of a function is duplicated resulting in
85 a new function. Variables may be replaced with complex expressions
86 to enable shared variable semantics.
88 All of these will simultaneously lookup any callgraph edges. If
89 we're going to inline the duplicated function body, and the given
90 function has some cloned callgraph nodes (one for each place this
91 function will be inlined) those callgraph edges will be duplicated.
92 If we're cloning the body, those callgraph edges will be
93 updated to point into the new body. (Note that the original
94 callgraph node and edge list will not be altered.)
96 See the CALL_EXPR handling case in copy_tree_body_r (). */
100 o In order to make inlining-on-trees work, we pessimized
101 function-local static constants. In particular, they are now
102 always output, even when not addressed. Fix this by treating
103 function-local static constants just like global static
104 constants; the back-end already knows not to output them if they
107 o Provide heuristics to clamp inlining of recursive template
111 /* Weights that estimate_num_insns uses to estimate the size of the
114 eni_weights eni_size_weights
;
116 /* Weights that estimate_num_insns uses to estimate the time necessary
117 to execute the produced code. */
119 eni_weights eni_time_weights
;
123 static tree
declare_return_variable (copy_body_data
*, tree
, tree
, tree
,
125 static void remap_block (tree
*, copy_body_data
*);
126 static void copy_bind_expr (tree
*, int *, copy_body_data
*);
127 static void declare_inline_vars (tree
, tree
);
128 static void remap_save_expr (tree
*, hash_map
<tree
, tree
> *, int *);
129 static void prepend_lexical_block (tree current_block
, tree new_block
);
130 static tree
copy_decl_to_var (tree
, copy_body_data
*);
131 static tree
copy_result_decl_to_var (tree
, copy_body_data
*);
132 static tree
copy_decl_maybe_to_var (tree
, copy_body_data
*);
133 static gimple_seq
remap_gimple_stmt (gimple
*, copy_body_data
*);
134 static bool delete_unreachable_blocks_update_callgraph (copy_body_data
*id
);
135 static void insert_init_stmt (copy_body_data
*, basic_block
, gimple
*);
137 /* Insert a tree->tree mapping for ID. Despite the name suggests
138 that the trees should be variables, it is used for more than that. */
141 insert_decl_map (copy_body_data
*id
, tree key
, tree value
)
143 id
->decl_map
->put (key
, value
);
145 /* Always insert an identity map as well. If we see this same new
146 node again, we won't want to duplicate it a second time. */
148 id
->decl_map
->put (value
, value
);
151 /* Insert a tree->tree mapping for ID. This is only used for
155 insert_debug_decl_map (copy_body_data
*id
, tree key
, tree value
)
157 if (!gimple_in_ssa_p (id
->src_cfun
))
160 if (!opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
))
163 if (!target_for_debug_bind (key
))
166 gcc_assert (TREE_CODE (key
) == PARM_DECL
);
167 gcc_assert (VAR_P (value
));
170 id
->debug_map
= new hash_map
<tree
, tree
>;
172 id
->debug_map
->put (key
, value
);
175 /* If nonzero, we're remapping the contents of inlined debug
176 statements. If negative, an error has occurred, such as a
177 reference to a variable that isn't available in the inlined
179 static int processing_debug_stmt
= 0;
181 /* Construct new SSA name for old NAME. ID is the inline context. */
184 remap_ssa_name (tree name
, copy_body_data
*id
)
189 gcc_assert (TREE_CODE (name
) == SSA_NAME
);
191 n
= id
->decl_map
->get (name
);
193 return unshare_expr (*n
);
195 if (processing_debug_stmt
)
197 if (SSA_NAME_IS_DEFAULT_DEF (name
)
198 && TREE_CODE (SSA_NAME_VAR (name
)) == PARM_DECL
199 && id
->entry_bb
== NULL
200 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun
)))
202 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
204 gimple_stmt_iterator gsi
;
205 tree val
= SSA_NAME_VAR (name
);
207 n
= id
->decl_map
->get (val
);
210 if (TREE_CODE (val
) != PARM_DECL
)
212 processing_debug_stmt
= -1;
215 def_temp
= gimple_build_debug_source_bind (vexpr
, val
, NULL
);
216 DECL_ARTIFICIAL (vexpr
) = 1;
217 TREE_TYPE (vexpr
) = TREE_TYPE (name
);
218 SET_DECL_MODE (vexpr
, DECL_MODE (SSA_NAME_VAR (name
)));
219 gsi
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
220 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
224 processing_debug_stmt
= -1;
228 /* Remap anonymous SSA names or SSA names of anonymous decls. */
229 var
= SSA_NAME_VAR (name
);
231 || (!SSA_NAME_IS_DEFAULT_DEF (name
)
233 && !VAR_DECL_IS_VIRTUAL_OPERAND (var
)
234 && DECL_ARTIFICIAL (var
)
235 && DECL_IGNORED_P (var
)
236 && !DECL_NAME (var
)))
238 struct ptr_info_def
*pi
;
239 new_tree
= make_ssa_name (remap_type (TREE_TYPE (name
), id
));
240 if (!var
&& SSA_NAME_IDENTIFIER (name
))
241 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree
, SSA_NAME_IDENTIFIER (name
));
242 insert_decl_map (id
, name
, new_tree
);
243 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree
)
244 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
);
245 /* At least IPA points-to info can be directly transferred. */
246 if (id
->src_cfun
->gimple_df
247 && id
->src_cfun
->gimple_df
->ipa_pta
248 && POINTER_TYPE_P (TREE_TYPE (name
))
249 && (pi
= SSA_NAME_PTR_INFO (name
))
252 struct ptr_info_def
*new_pi
= get_ptr_info (new_tree
);
258 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
260 new_tree
= remap_decl (var
, id
);
262 /* We might've substituted constant or another SSA_NAME for
265 Replace the SSA name representing RESULT_DECL by variable during
266 inlining: this saves us from need to introduce PHI node in a case
267 return value is just partly initialized. */
268 if ((VAR_P (new_tree
) || TREE_CODE (new_tree
) == PARM_DECL
)
269 && (!SSA_NAME_VAR (name
)
270 || TREE_CODE (SSA_NAME_VAR (name
)) != RESULT_DECL
271 || !id
->transform_return_to_modify
))
273 struct ptr_info_def
*pi
;
274 new_tree
= make_ssa_name (new_tree
);
275 insert_decl_map (id
, name
, new_tree
);
276 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree
)
277 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
);
278 /* At least IPA points-to info can be directly transferred. */
279 if (id
->src_cfun
->gimple_df
280 && id
->src_cfun
->gimple_df
->ipa_pta
281 && POINTER_TYPE_P (TREE_TYPE (name
))
282 && (pi
= SSA_NAME_PTR_INFO (name
))
285 struct ptr_info_def
*new_pi
= get_ptr_info (new_tree
);
288 if (SSA_NAME_IS_DEFAULT_DEF (name
))
290 /* By inlining function having uninitialized variable, we might
291 extend the lifetime (variable might get reused). This cause
292 ICE in the case we end up extending lifetime of SSA name across
293 abnormal edge, but also increase register pressure.
295 We simply initialize all uninitialized vars by 0 except
296 for case we are inlining to very first BB. We can avoid
297 this for all BBs that are not inside strongly connected
298 regions of the CFG, but this is expensive to test. */
300 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
)
301 && (!SSA_NAME_VAR (name
)
302 || TREE_CODE (SSA_NAME_VAR (name
)) != PARM_DECL
)
303 && (id
->entry_bb
!= EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
305 || EDGE_COUNT (id
->entry_bb
->preds
) != 1))
307 gimple_stmt_iterator gsi
= gsi_last_bb (id
->entry_bb
);
309 tree zero
= build_zero_cst (TREE_TYPE (new_tree
));
311 init_stmt
= gimple_build_assign (new_tree
, zero
);
312 gsi_insert_after (&gsi
, init_stmt
, GSI_NEW_STMT
);
313 SSA_NAME_IS_DEFAULT_DEF (new_tree
) = 0;
317 SSA_NAME_DEF_STMT (new_tree
) = gimple_build_nop ();
318 set_ssa_default_def (cfun
, SSA_NAME_VAR (new_tree
), new_tree
);
323 insert_decl_map (id
, name
, new_tree
);
327 /* Remap DECL during the copying of the BLOCK tree for the function. */
330 remap_decl (tree decl
, copy_body_data
*id
)
334 /* We only remap local variables in the current function. */
336 /* See if we have remapped this declaration. */
338 n
= id
->decl_map
->get (decl
);
340 if (!n
&& processing_debug_stmt
)
342 processing_debug_stmt
= -1;
346 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
347 necessary DECLs have already been remapped and we do not want to duplicate
348 a decl coming from outside of the sequence we are copying. */
350 && id
->prevent_decl_creation_for_types
351 && id
->remapping_type_depth
> 0
352 && (VAR_P (decl
) || TREE_CODE (decl
) == PARM_DECL
))
355 /* If we didn't already have an equivalent for this declaration, create one
359 /* Make a copy of the variable or label. */
360 tree t
= id
->copy_decl (decl
, id
);
362 /* Remember it, so that if we encounter this local entity again
363 we can reuse this copy. Do this early because remap_type may
364 need this decl for TYPE_STUB_DECL. */
365 insert_decl_map (id
, decl
, t
);
370 /* Remap types, if necessary. */
371 TREE_TYPE (t
) = remap_type (TREE_TYPE (t
), id
);
372 if (TREE_CODE (t
) == TYPE_DECL
)
374 DECL_ORIGINAL_TYPE (t
) = remap_type (DECL_ORIGINAL_TYPE (t
), id
);
376 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
377 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
378 is not set on the TYPE_DECL, for example in LTO mode. */
379 if (DECL_ORIGINAL_TYPE (t
) == TREE_TYPE (t
))
381 tree x
= build_variant_type_copy (TREE_TYPE (t
));
382 TYPE_STUB_DECL (x
) = TYPE_STUB_DECL (TREE_TYPE (t
));
383 TYPE_NAME (x
) = TYPE_NAME (TREE_TYPE (t
));
384 DECL_ORIGINAL_TYPE (t
) = x
;
388 /* Remap sizes as necessary. */
389 walk_tree (&DECL_SIZE (t
), copy_tree_body_r
, id
, NULL
);
390 walk_tree (&DECL_SIZE_UNIT (t
), copy_tree_body_r
, id
, NULL
);
392 /* If fields, do likewise for offset and qualifier. */
393 if (TREE_CODE (t
) == FIELD_DECL
)
395 walk_tree (&DECL_FIELD_OFFSET (t
), copy_tree_body_r
, id
, NULL
);
396 if (TREE_CODE (DECL_CONTEXT (t
)) == QUAL_UNION_TYPE
)
397 walk_tree (&DECL_QUALIFIER (t
), copy_tree_body_r
, id
, NULL
);
403 if (id
->do_not_unshare
)
406 return unshare_expr (*n
);
410 remap_type_1 (tree type
, copy_body_data
*id
)
414 /* We do need a copy. build and register it now. If this is a pointer or
415 reference type, remap the designated type and make a new pointer or
417 if (TREE_CODE (type
) == POINTER_TYPE
)
419 new_tree
= build_pointer_type_for_mode (remap_type (TREE_TYPE (type
), id
),
421 TYPE_REF_CAN_ALIAS_ALL (type
));
422 if (TYPE_ATTRIBUTES (type
) || TYPE_QUALS (type
))
423 new_tree
= build_type_attribute_qual_variant (new_tree
,
424 TYPE_ATTRIBUTES (type
),
426 insert_decl_map (id
, type
, new_tree
);
429 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
431 new_tree
= build_reference_type_for_mode (remap_type (TREE_TYPE (type
), id
),
433 TYPE_REF_CAN_ALIAS_ALL (type
));
434 if (TYPE_ATTRIBUTES (type
) || TYPE_QUALS (type
))
435 new_tree
= build_type_attribute_qual_variant (new_tree
,
436 TYPE_ATTRIBUTES (type
),
438 insert_decl_map (id
, type
, new_tree
);
442 new_tree
= copy_node (type
);
444 insert_decl_map (id
, type
, new_tree
);
446 /* This is a new type, not a copy of an old type. Need to reassociate
447 variants. We can handle everything except the main variant lazily. */
448 t
= TYPE_MAIN_VARIANT (type
);
451 t
= remap_type (t
, id
);
452 TYPE_MAIN_VARIANT (new_tree
) = t
;
453 TYPE_NEXT_VARIANT (new_tree
) = TYPE_NEXT_VARIANT (t
);
454 TYPE_NEXT_VARIANT (t
) = new_tree
;
458 TYPE_MAIN_VARIANT (new_tree
) = new_tree
;
459 TYPE_NEXT_VARIANT (new_tree
) = NULL
;
462 if (TYPE_STUB_DECL (type
))
463 TYPE_STUB_DECL (new_tree
) = remap_decl (TYPE_STUB_DECL (type
), id
);
465 /* Lazily create pointer and reference types. */
466 TYPE_POINTER_TO (new_tree
) = NULL
;
467 TYPE_REFERENCE_TO (new_tree
) = NULL
;
469 /* Copy all types that may contain references to local variables; be sure to
470 preserve sharing in between type and its main variant when possible. */
471 switch (TREE_CODE (new_tree
))
475 case FIXED_POINT_TYPE
:
478 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
)
480 gcc_checking_assert (TYPE_MIN_VALUE (type
) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type
)));
481 gcc_checking_assert (TYPE_MAX_VALUE (type
) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type
)));
483 TYPE_MIN_VALUE (new_tree
) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree
));
484 TYPE_MAX_VALUE (new_tree
) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree
));
488 t
= TYPE_MIN_VALUE (new_tree
);
489 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
490 walk_tree (&TYPE_MIN_VALUE (new_tree
), copy_tree_body_r
, id
, NULL
);
492 t
= TYPE_MAX_VALUE (new_tree
);
493 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
494 walk_tree (&TYPE_MAX_VALUE (new_tree
), copy_tree_body_r
, id
, NULL
);
499 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
500 && TREE_TYPE (type
) == TREE_TYPE (TYPE_MAIN_VARIANT (type
)))
501 TREE_TYPE (new_tree
) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree
));
503 TREE_TYPE (new_tree
) = remap_type (TREE_TYPE (new_tree
), id
);
504 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
505 && TYPE_ARG_TYPES (type
) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type
)))
506 TYPE_ARG_TYPES (new_tree
) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree
));
508 walk_tree (&TYPE_ARG_TYPES (new_tree
), copy_tree_body_r
, id
, NULL
);
512 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
513 && TREE_TYPE (type
) == TREE_TYPE (TYPE_MAIN_VARIANT (type
)))
514 TREE_TYPE (new_tree
) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree
));
516 TREE_TYPE (new_tree
) = remap_type (TREE_TYPE (new_tree
), id
);
518 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
)
520 gcc_checking_assert (TYPE_DOMAIN (type
) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type
)));
521 TYPE_DOMAIN (new_tree
) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree
));
524 TYPE_DOMAIN (new_tree
) = remap_type (TYPE_DOMAIN (new_tree
), id
);
529 case QUAL_UNION_TYPE
:
530 if (TYPE_MAIN_VARIANT (type
) != type
531 && TYPE_FIELDS (type
) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type
)))
532 TYPE_FIELDS (new_tree
) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree
));
537 for (f
= TYPE_FIELDS (new_tree
); f
; f
= DECL_CHAIN (f
))
539 t
= remap_decl (f
, id
);
540 DECL_CONTEXT (t
) = new_tree
;
544 TYPE_FIELDS (new_tree
) = nreverse (nf
);
550 /* Shouldn't have been thought variable sized. */
554 /* All variants of type share the same size, so use the already remaped data. */
555 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
)
557 tree s
= TYPE_SIZE (type
);
558 tree mvs
= TYPE_SIZE (TYPE_MAIN_VARIANT (type
));
559 tree su
= TYPE_SIZE_UNIT (type
);
560 tree mvsu
= TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type
));
561 gcc_checking_assert ((TREE_CODE (s
) == PLACEHOLDER_EXPR
562 && (TREE_CODE (mvs
) == PLACEHOLDER_EXPR
))
564 gcc_checking_assert ((TREE_CODE (su
) == PLACEHOLDER_EXPR
565 && (TREE_CODE (mvsu
) == PLACEHOLDER_EXPR
))
567 TYPE_SIZE (new_tree
) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree
));
568 TYPE_SIZE_UNIT (new_tree
) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree
));
572 walk_tree (&TYPE_SIZE (new_tree
), copy_tree_body_r
, id
, NULL
);
573 walk_tree (&TYPE_SIZE_UNIT (new_tree
), copy_tree_body_r
, id
, NULL
);
580 remap_type (tree type
, copy_body_data
*id
)
588 /* See if we have remapped this type. */
589 node
= id
->decl_map
->get (type
);
593 /* The type only needs remapping if it's variably modified. */
594 if (! variably_modified_type_p (type
, id
->src_fn
))
596 insert_decl_map (id
, type
, type
);
600 id
->remapping_type_depth
++;
601 tmp
= remap_type_1 (type
, id
);
602 id
->remapping_type_depth
--;
607 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
610 can_be_nonlocal (tree decl
, copy_body_data
*id
)
612 /* We can not duplicate function decls. */
613 if (TREE_CODE (decl
) == FUNCTION_DECL
)
616 /* Local static vars must be non-local or we get multiple declaration
618 if (VAR_P (decl
) && !auto_var_in_fn_p (decl
, id
->src_fn
))
625 remap_decls (tree decls
, vec
<tree
, va_gc
> **nonlocalized_list
,
629 tree new_decls
= NULL_TREE
;
631 /* Remap its variables. */
632 for (old_var
= decls
; old_var
; old_var
= DECL_CHAIN (old_var
))
636 if (can_be_nonlocal (old_var
, id
))
638 /* We need to add this variable to the local decls as otherwise
639 nothing else will do so. */
640 if (VAR_P (old_var
) && ! DECL_EXTERNAL (old_var
) && cfun
)
641 add_local_decl (cfun
, old_var
);
642 if ((!optimize
|| debug_info_level
> DINFO_LEVEL_TERSE
)
643 && !DECL_IGNORED_P (old_var
)
644 && nonlocalized_list
)
645 vec_safe_push (*nonlocalized_list
, old_var
);
649 /* Remap the variable. */
650 new_var
= remap_decl (old_var
, id
);
652 /* If we didn't remap this variable, we can't mess with its
653 TREE_CHAIN. If we remapped this variable to the return slot, it's
654 already declared somewhere else, so don't declare it here. */
656 if (new_var
== id
->retvar
)
660 if ((!optimize
|| debug_info_level
> DINFO_LEVEL_TERSE
)
661 && !DECL_IGNORED_P (old_var
)
662 && nonlocalized_list
)
663 vec_safe_push (*nonlocalized_list
, old_var
);
667 gcc_assert (DECL_P (new_var
));
668 DECL_CHAIN (new_var
) = new_decls
;
671 /* Also copy value-expressions. */
672 if (VAR_P (new_var
) && DECL_HAS_VALUE_EXPR_P (new_var
))
674 tree tem
= DECL_VALUE_EXPR (new_var
);
675 bool old_regimplify
= id
->regimplify
;
676 id
->remapping_type_depth
++;
677 walk_tree (&tem
, copy_tree_body_r
, id
, NULL
);
678 id
->remapping_type_depth
--;
679 id
->regimplify
= old_regimplify
;
680 SET_DECL_VALUE_EXPR (new_var
, tem
);
685 return nreverse (new_decls
);
688 /* Copy the BLOCK to contain remapped versions of the variables
689 therein. And hook the new block into the block-tree. */
692 remap_block (tree
*block
, copy_body_data
*id
)
697 /* Make the new block. */
699 new_block
= make_node (BLOCK
);
700 TREE_USED (new_block
) = TREE_USED (old_block
);
701 BLOCK_ABSTRACT_ORIGIN (new_block
) = old_block
;
702 BLOCK_SOURCE_LOCATION (new_block
) = BLOCK_SOURCE_LOCATION (old_block
);
703 BLOCK_NONLOCALIZED_VARS (new_block
)
704 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block
));
707 /* Remap its variables. */
708 BLOCK_VARS (new_block
) = remap_decls (BLOCK_VARS (old_block
),
709 &BLOCK_NONLOCALIZED_VARS (new_block
),
712 if (id
->transform_lang_insert_block
)
713 id
->transform_lang_insert_block (new_block
);
715 /* Remember the remapped block. */
716 insert_decl_map (id
, old_block
, new_block
);
719 /* Copy the whole block tree and root it in id->block. */
721 remap_blocks (tree block
, copy_body_data
*id
)
724 tree new_tree
= block
;
729 remap_block (&new_tree
, id
);
730 gcc_assert (new_tree
!= block
);
731 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
732 prepend_lexical_block (new_tree
, remap_blocks (t
, id
));
733 /* Blocks are in arbitrary order, but make things slightly prettier and do
734 not swap order when producing a copy. */
735 BLOCK_SUBBLOCKS (new_tree
) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree
));
739 /* Remap the block tree rooted at BLOCK to nothing. */
741 remap_blocks_to_null (tree block
, copy_body_data
*id
)
744 insert_decl_map (id
, block
, NULL_TREE
);
745 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
746 remap_blocks_to_null (t
, id
);
750 copy_statement_list (tree
*tp
)
752 tree_stmt_iterator oi
, ni
;
755 new_tree
= alloc_stmt_list ();
756 ni
= tsi_start (new_tree
);
757 oi
= tsi_start (*tp
);
758 TREE_TYPE (new_tree
) = TREE_TYPE (*tp
);
761 for (; !tsi_end_p (oi
); tsi_next (&oi
))
763 tree stmt
= tsi_stmt (oi
);
764 if (TREE_CODE (stmt
) == STATEMENT_LIST
)
765 /* This copy is not redundant; tsi_link_after will smash this
766 STATEMENT_LIST into the end of the one we're building, and we
767 don't want to do that with the original. */
768 copy_statement_list (&stmt
);
769 tsi_link_after (&ni
, stmt
, TSI_CONTINUE_LINKING
);
774 copy_bind_expr (tree
*tp
, int *walk_subtrees
, copy_body_data
*id
)
776 tree block
= BIND_EXPR_BLOCK (*tp
);
777 /* Copy (and replace) the statement. */
778 copy_tree_r (tp
, walk_subtrees
, NULL
);
781 remap_block (&block
, id
);
782 BIND_EXPR_BLOCK (*tp
) = block
;
785 if (BIND_EXPR_VARS (*tp
))
786 /* This will remap a lot of the same decls again, but this should be
788 BIND_EXPR_VARS (*tp
) = remap_decls (BIND_EXPR_VARS (*tp
), NULL
, id
);
792 /* Create a new gimple_seq by remapping all the statements in BODY
793 using the inlining information in ID. */
796 remap_gimple_seq (gimple_seq body
, copy_body_data
*id
)
798 gimple_stmt_iterator si
;
799 gimple_seq new_body
= NULL
;
801 for (si
= gsi_start (body
); !gsi_end_p (si
); gsi_next (&si
))
803 gimple_seq new_stmts
= remap_gimple_stmt (gsi_stmt (si
), id
);
804 gimple_seq_add_seq (&new_body
, new_stmts
);
811 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
812 block using the mapping information in ID. */
815 copy_gimple_bind (gbind
*stmt
, copy_body_data
*id
)
818 tree new_block
, new_vars
;
819 gimple_seq body
, new_body
;
821 /* Copy the statement. Note that we purposely don't use copy_stmt
822 here because we need to remap statements as we copy. */
823 body
= gimple_bind_body (stmt
);
824 new_body
= remap_gimple_seq (body
, id
);
826 new_block
= gimple_bind_block (stmt
);
828 remap_block (&new_block
, id
);
830 /* This will remap a lot of the same decls again, but this should be
832 new_vars
= gimple_bind_vars (stmt
);
834 new_vars
= remap_decls (new_vars
, NULL
, id
);
836 new_bind
= gimple_build_bind (new_vars
, new_body
, new_block
);
841 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
846 if (TREE_CODE (decl
) == SSA_NAME
)
848 decl
= SSA_NAME_VAR (decl
);
853 return (TREE_CODE (decl
) == PARM_DECL
);
856 /* Remap the dependence CLIQUE from the source to the destination function
857 as specified in ID. */
859 static unsigned short
860 remap_dependence_clique (copy_body_data
*id
, unsigned short clique
)
862 if (clique
== 0 || processing_debug_stmt
)
864 if (!id
->dependence_map
)
865 id
->dependence_map
= new hash_map
<dependence_hash
, unsigned short>;
867 unsigned short &newc
= id
->dependence_map
->get_or_insert (clique
, &existed
);
869 newc
= ++cfun
->last_clique
;
873 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
874 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
875 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
876 recursing into the children nodes of *TP. */
879 remap_gimple_op_r (tree
*tp
, int *walk_subtrees
, void *data
)
881 struct walk_stmt_info
*wi_p
= (struct walk_stmt_info
*) data
;
882 copy_body_data
*id
= (copy_body_data
*) wi_p
->info
;
883 tree fn
= id
->src_fn
;
885 /* For recursive invocations this is no longer the LHS itself. */
886 bool is_lhs
= wi_p
->is_lhs
;
887 wi_p
->is_lhs
= false;
889 if (TREE_CODE (*tp
) == SSA_NAME
)
891 *tp
= remap_ssa_name (*tp
, id
);
894 SSA_NAME_DEF_STMT (*tp
) = wi_p
->stmt
;
897 else if (auto_var_in_fn_p (*tp
, fn
))
899 /* Local variables and labels need to be replaced by equivalent
900 variables. We don't want to copy static variables; there's
901 only one of those, no matter how many times we inline the
902 containing function. Similarly for globals from an outer
906 /* Remap the declaration. */
907 new_decl
= remap_decl (*tp
, id
);
908 gcc_assert (new_decl
);
909 /* Replace this variable with the copy. */
910 STRIP_TYPE_NOPS (new_decl
);
911 /* ??? The C++ frontend uses void * pointer zero to initialize
912 any other type. This confuses the middle-end type verification.
913 As cloned bodies do not go through gimplification again the fixup
914 there doesn't trigger. */
915 if (TREE_CODE (new_decl
) == INTEGER_CST
916 && !useless_type_conversion_p (TREE_TYPE (*tp
), TREE_TYPE (new_decl
)))
917 new_decl
= fold_convert (TREE_TYPE (*tp
), new_decl
);
921 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
923 else if (TREE_CODE (*tp
) == SAVE_EXPR
)
925 else if (TREE_CODE (*tp
) == LABEL_DECL
926 && (!DECL_CONTEXT (*tp
)
927 || decl_function_context (*tp
) == id
->src_fn
))
928 /* These may need to be remapped for EH handling. */
929 *tp
= remap_decl (*tp
, id
);
930 else if (TREE_CODE (*tp
) == FIELD_DECL
)
932 /* If the enclosing record type is variably_modified_type_p, the field
933 has already been remapped. Otherwise, it need not be. */
934 tree
*n
= id
->decl_map
->get (*tp
);
939 else if (TYPE_P (*tp
))
940 /* Types may need remapping as well. */
941 *tp
= remap_type (*tp
, id
);
942 else if (CONSTANT_CLASS_P (*tp
))
944 /* If this is a constant, we have to copy the node iff the type
945 will be remapped. copy_tree_r will not copy a constant. */
946 tree new_type
= remap_type (TREE_TYPE (*tp
), id
);
948 if (new_type
== TREE_TYPE (*tp
))
951 else if (TREE_CODE (*tp
) == INTEGER_CST
)
952 *tp
= wide_int_to_tree (new_type
, wi::to_wide (*tp
));
955 *tp
= copy_node (*tp
);
956 TREE_TYPE (*tp
) = new_type
;
961 /* Otherwise, just copy the node. Note that copy_tree_r already
962 knows not to copy VAR_DECLs, etc., so this is safe. */
964 if (TREE_CODE (*tp
) == MEM_REF
)
966 /* We need to re-canonicalize MEM_REFs from inline substitutions
967 that can happen when a pointer argument is an ADDR_EXPR.
968 Recurse here manually to allow that. */
969 tree ptr
= TREE_OPERAND (*tp
, 0);
970 tree type
= remap_type (TREE_TYPE (*tp
), id
);
972 walk_tree (&ptr
, remap_gimple_op_r
, data
, NULL
);
973 *tp
= fold_build2 (MEM_REF
, type
, ptr
, TREE_OPERAND (*tp
, 1));
974 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
975 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
976 TREE_NO_WARNING (*tp
) = TREE_NO_WARNING (old
);
977 if (MR_DEPENDENCE_CLIQUE (old
) != 0)
979 MR_DEPENDENCE_CLIQUE (*tp
)
980 = remap_dependence_clique (id
, MR_DEPENDENCE_CLIQUE (old
));
981 MR_DEPENDENCE_BASE (*tp
) = MR_DEPENDENCE_BASE (old
);
983 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
984 remapped a parameter as the property might be valid only
985 for the parameter itself. */
986 if (TREE_THIS_NOTRAP (old
)
987 && (!is_parm (TREE_OPERAND (old
, 0))
988 || (!id
->transform_parameter
&& is_parm (ptr
))))
989 TREE_THIS_NOTRAP (*tp
) = 1;
990 REF_REVERSE_STORAGE_ORDER (*tp
) = REF_REVERSE_STORAGE_ORDER (old
);
995 /* Here is the "usual case". Copy this tree node, and then
996 tweak some special cases. */
997 copy_tree_r (tp
, walk_subtrees
, NULL
);
999 if (TREE_CODE (*tp
) != OMP_CLAUSE
)
1000 TREE_TYPE (*tp
) = remap_type (TREE_TYPE (*tp
), id
);
1002 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
1004 /* The copied TARGET_EXPR has never been expanded, even if the
1005 original node was expanded already. */
1006 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
1007 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
1009 else if (TREE_CODE (*tp
) == ADDR_EXPR
)
1011 /* Variable substitution need not be simple. In particular,
1012 the MEM_REF substitution above. Make sure that
1013 TREE_CONSTANT and friends are up-to-date. */
1014 int invariant
= is_gimple_min_invariant (*tp
);
1015 walk_tree (&TREE_OPERAND (*tp
, 0), remap_gimple_op_r
, data
, NULL
);
1016 recompute_tree_invariant_for_addr_expr (*tp
);
1018 /* If this used to be invariant, but is not any longer,
1019 then regimplification is probably needed. */
1020 if (invariant
&& !is_gimple_min_invariant (*tp
))
1021 id
->regimplify
= true;
1027 /* Update the TREE_BLOCK for the cloned expr. */
1030 tree new_block
= id
->remapping_type_depth
== 0 ? id
->block
: NULL
;
1031 tree old_block
= TREE_BLOCK (*tp
);
1035 n
= id
->decl_map
->get (TREE_BLOCK (*tp
));
1039 TREE_SET_BLOCK (*tp
, new_block
);
1042 /* Keep iterating. */
1047 /* Called from copy_body_id via walk_tree. DATA is really a
1048 `copy_body_data *'. */
1051 copy_tree_body_r (tree
*tp
, int *walk_subtrees
, void *data
)
1053 copy_body_data
*id
= (copy_body_data
*) data
;
1054 tree fn
= id
->src_fn
;
1057 /* Begin by recognizing trees that we'll completely rewrite for the
1058 inlining context. Our output for these trees is completely
1059 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1060 into an edge). Further down, we'll handle trees that get
1061 duplicated and/or tweaked. */
1063 /* When requested, RETURN_EXPRs should be transformed to just the
1064 contained MODIFY_EXPR. The branch semantics of the return will
1065 be handled elsewhere by manipulating the CFG rather than a statement. */
1066 if (TREE_CODE (*tp
) == RETURN_EXPR
&& id
->transform_return_to_modify
)
1068 tree assignment
= TREE_OPERAND (*tp
, 0);
1070 /* If we're returning something, just turn that into an
1071 assignment into the equivalent of the original RESULT_DECL.
1072 If the "assignment" is just the result decl, the result
1073 decl has already been set (e.g. a recent "foo (&result_decl,
1074 ...)"); just toss the entire RETURN_EXPR. */
1075 if (assignment
&& TREE_CODE (assignment
) == MODIFY_EXPR
)
1077 /* Replace the RETURN_EXPR with (a copy of) the
1078 MODIFY_EXPR hanging underneath. */
1079 *tp
= copy_node (assignment
);
1081 else /* Else the RETURN_EXPR returns no value. */
1084 return (tree
) (void *)1;
1087 else if (TREE_CODE (*tp
) == SSA_NAME
)
1089 *tp
= remap_ssa_name (*tp
, id
);
1094 /* Local variables and labels need to be replaced by equivalent
1095 variables. We don't want to copy static variables; there's only
1096 one of those, no matter how many times we inline the containing
1097 function. Similarly for globals from an outer function. */
1098 else if (auto_var_in_fn_p (*tp
, fn
))
1102 /* Remap the declaration. */
1103 new_decl
= remap_decl (*tp
, id
);
1104 gcc_assert (new_decl
);
1105 /* Replace this variable with the copy. */
1106 STRIP_TYPE_NOPS (new_decl
);
1110 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
1111 copy_statement_list (tp
);
1112 else if (TREE_CODE (*tp
) == SAVE_EXPR
1113 || TREE_CODE (*tp
) == TARGET_EXPR
)
1114 remap_save_expr (tp
, id
->decl_map
, walk_subtrees
);
1115 else if (TREE_CODE (*tp
) == LABEL_DECL
1116 && (! DECL_CONTEXT (*tp
)
1117 || decl_function_context (*tp
) == id
->src_fn
))
1118 /* These may need to be remapped for EH handling. */
1119 *tp
= remap_decl (*tp
, id
);
1120 else if (TREE_CODE (*tp
) == BIND_EXPR
)
1121 copy_bind_expr (tp
, walk_subtrees
, id
);
1122 /* Types may need remapping as well. */
1123 else if (TYPE_P (*tp
))
1124 *tp
= remap_type (*tp
, id
);
1126 /* If this is a constant, we have to copy the node iff the type will be
1127 remapped. copy_tree_r will not copy a constant. */
1128 else if (CONSTANT_CLASS_P (*tp
))
1130 tree new_type
= remap_type (TREE_TYPE (*tp
), id
);
1132 if (new_type
== TREE_TYPE (*tp
))
1135 else if (TREE_CODE (*tp
) == INTEGER_CST
)
1136 *tp
= wide_int_to_tree (new_type
, wi::to_wide (*tp
));
1139 *tp
= copy_node (*tp
);
1140 TREE_TYPE (*tp
) = new_type
;
1144 /* Otherwise, just copy the node. Note that copy_tree_r already
1145 knows not to copy VAR_DECLs, etc., so this is safe. */
1148 /* Here we handle trees that are not completely rewritten.
1149 First we detect some inlining-induced bogosities for
1151 if (TREE_CODE (*tp
) == MODIFY_EXPR
1152 && TREE_OPERAND (*tp
, 0) == TREE_OPERAND (*tp
, 1)
1153 && (auto_var_in_fn_p (TREE_OPERAND (*tp
, 0), fn
)))
1155 /* Some assignments VAR = VAR; don't generate any rtl code
1156 and thus don't count as variable modification. Avoid
1157 keeping bogosities like 0 = 0. */
1158 tree decl
= TREE_OPERAND (*tp
, 0), value
;
1161 n
= id
->decl_map
->get (decl
);
1165 STRIP_TYPE_NOPS (value
);
1166 if (TREE_CONSTANT (value
) || TREE_READONLY (value
))
1168 *tp
= build_empty_stmt (EXPR_LOCATION (*tp
));
1169 return copy_tree_body_r (tp
, walk_subtrees
, data
);
1173 else if (TREE_CODE (*tp
) == INDIRECT_REF
)
1175 /* Get rid of *& from inline substitutions that can happen when a
1176 pointer argument is an ADDR_EXPR. */
1177 tree decl
= TREE_OPERAND (*tp
, 0);
1178 tree
*n
= id
->decl_map
->get (decl
);
1181 /* If we happen to get an ADDR_EXPR in n->value, strip
1182 it manually here as we'll eventually get ADDR_EXPRs
1183 which lie about their types pointed to. In this case
1184 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1185 but we absolutely rely on that. As fold_indirect_ref
1186 does other useful transformations, try that first, though. */
1187 tree type
= TREE_TYPE (*tp
);
1188 tree ptr
= id
->do_not_unshare
? *n
: unshare_expr (*n
);
1190 *tp
= gimple_fold_indirect_ref (ptr
);
1193 if (TREE_CODE (ptr
) == ADDR_EXPR
)
1196 = fold_indirect_ref_1 (EXPR_LOCATION (ptr
), type
, ptr
);
1197 /* ??? We should either assert here or build
1198 a VIEW_CONVERT_EXPR instead of blindly leaking
1199 incompatible types to our IL. */
1201 *tp
= TREE_OPERAND (ptr
, 0);
1205 *tp
= build1 (INDIRECT_REF
, type
, ptr
);
1206 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1207 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1208 TREE_READONLY (*tp
) = TREE_READONLY (old
);
1209 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1210 have remapped a parameter as the property might be
1211 valid only for the parameter itself. */
1212 if (TREE_THIS_NOTRAP (old
)
1213 && (!is_parm (TREE_OPERAND (old
, 0))
1214 || (!id
->transform_parameter
&& is_parm (ptr
))))
1215 TREE_THIS_NOTRAP (*tp
) = 1;
1222 else if (TREE_CODE (*tp
) == MEM_REF
)
1224 /* We need to re-canonicalize MEM_REFs from inline substitutions
1225 that can happen when a pointer argument is an ADDR_EXPR.
1226 Recurse here manually to allow that. */
1227 tree ptr
= TREE_OPERAND (*tp
, 0);
1228 tree type
= remap_type (TREE_TYPE (*tp
), id
);
1230 walk_tree (&ptr
, copy_tree_body_r
, data
, NULL
);
1231 *tp
= fold_build2 (MEM_REF
, type
, ptr
, TREE_OPERAND (*tp
, 1));
1232 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1233 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1234 TREE_NO_WARNING (*tp
) = TREE_NO_WARNING (old
);
1235 if (MR_DEPENDENCE_CLIQUE (old
) != 0)
1237 MR_DEPENDENCE_CLIQUE (*tp
)
1238 = remap_dependence_clique (id
, MR_DEPENDENCE_CLIQUE (old
));
1239 MR_DEPENDENCE_BASE (*tp
) = MR_DEPENDENCE_BASE (old
);
1241 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1242 remapped a parameter as the property might be valid only
1243 for the parameter itself. */
1244 if (TREE_THIS_NOTRAP (old
)
1245 && (!is_parm (TREE_OPERAND (old
, 0))
1246 || (!id
->transform_parameter
&& is_parm (ptr
))))
1247 TREE_THIS_NOTRAP (*tp
) = 1;
1248 REF_REVERSE_STORAGE_ORDER (*tp
) = REF_REVERSE_STORAGE_ORDER (old
);
1253 /* Here is the "usual case". Copy this tree node, and then
1254 tweak some special cases. */
1255 copy_tree_r (tp
, walk_subtrees
, NULL
);
1257 /* If EXPR has block defined, map it to newly constructed block.
1258 When inlining we want EXPRs without block appear in the block
1259 of function call if we are not remapping a type. */
1262 new_block
= id
->remapping_type_depth
== 0 ? id
->block
: NULL
;
1263 if (TREE_BLOCK (*tp
))
1266 n
= id
->decl_map
->get (TREE_BLOCK (*tp
));
1270 TREE_SET_BLOCK (*tp
, new_block
);
1273 if (TREE_CODE (*tp
) != OMP_CLAUSE
)
1274 TREE_TYPE (*tp
) = remap_type (TREE_TYPE (*tp
), id
);
1276 /* The copied TARGET_EXPR has never been expanded, even if the
1277 original node was expanded already. */
1278 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
1280 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
1281 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
1284 /* Variable substitution need not be simple. In particular, the
1285 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1286 and friends are up-to-date. */
1287 else if (TREE_CODE (*tp
) == ADDR_EXPR
)
1289 int invariant
= is_gimple_min_invariant (*tp
);
1290 walk_tree (&TREE_OPERAND (*tp
, 0), copy_tree_body_r
, id
, NULL
);
1292 /* Handle the case where we substituted an INDIRECT_REF
1293 into the operand of the ADDR_EXPR. */
1294 if (TREE_CODE (TREE_OPERAND (*tp
, 0)) == INDIRECT_REF
)
1296 tree t
= TREE_OPERAND (TREE_OPERAND (*tp
, 0), 0);
1297 if (TREE_TYPE (t
) != TREE_TYPE (*tp
))
1298 t
= fold_convert (remap_type (TREE_TYPE (*tp
), id
), t
);
1302 recompute_tree_invariant_for_addr_expr (*tp
);
1304 /* If this used to be invariant, but is not any longer,
1305 then regimplification is probably needed. */
1306 if (invariant
&& !is_gimple_min_invariant (*tp
))
1307 id
->regimplify
= true;
1313 /* Keep iterating. */
1317 /* Helper for remap_gimple_stmt. Given an EH region number for the
1318 source function, map that to the duplicate EH region number in
1319 the destination function. */
1322 remap_eh_region_nr (int old_nr
, copy_body_data
*id
)
1324 eh_region old_r
, new_r
;
1326 old_r
= get_eh_region_from_number_fn (id
->src_cfun
, old_nr
);
1327 new_r
= static_cast<eh_region
> (*id
->eh_map
->get (old_r
));
1329 return new_r
->index
;
1332 /* Similar, but operate on INTEGER_CSTs. */
1335 remap_eh_region_tree_nr (tree old_t_nr
, copy_body_data
*id
)
1339 old_nr
= tree_to_shwi (old_t_nr
);
1340 new_nr
= remap_eh_region_nr (old_nr
, id
);
1342 return build_int_cst (integer_type_node
, new_nr
);
1345 /* Helper for copy_bb. Remap statement STMT using the inlining
1346 information in ID. Return the new statement copy. */
1349 remap_gimple_stmt (gimple
*stmt
, copy_body_data
*id
)
1351 gimple
*copy
= NULL
;
1352 struct walk_stmt_info wi
;
1353 bool skip_first
= false;
1354 gimple_seq stmts
= NULL
;
1356 if (is_gimple_debug (stmt
)
1357 && !opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
))
1360 /* Begin by recognizing trees that we'll completely rewrite for the
1361 inlining context. Our output for these trees is completely
1362 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1363 into an edge). Further down, we'll handle trees that get
1364 duplicated and/or tweaked. */
1366 /* When requested, GIMPLE_RETURNs should be transformed to just the
1367 contained GIMPLE_ASSIGN. The branch semantics of the return will
1368 be handled elsewhere by manipulating the CFG rather than the
1370 if (gimple_code (stmt
) == GIMPLE_RETURN
&& id
->transform_return_to_modify
)
1372 tree retval
= gimple_return_retval (as_a
<greturn
*> (stmt
));
1373 tree retbnd
= gimple_return_retbnd (stmt
);
1374 tree bndslot
= id
->retbnd
;
1376 if (retbnd
&& bndslot
)
1378 gimple
*bndcopy
= gimple_build_assign (bndslot
, retbnd
);
1379 memset (&wi
, 0, sizeof (wi
));
1381 walk_gimple_op (bndcopy
, remap_gimple_op_r
, &wi
);
1382 gimple_seq_add_stmt (&stmts
, bndcopy
);
1385 /* If we're returning something, just turn that into an
1386 assignment into the equivalent of the original RESULT_DECL.
1387 If RETVAL is just the result decl, the result decl has
1388 already been set (e.g. a recent "foo (&result_decl, ...)");
1389 just toss the entire GIMPLE_RETURN. */
1391 && (TREE_CODE (retval
) != RESULT_DECL
1392 && (TREE_CODE (retval
) != SSA_NAME
1393 || ! SSA_NAME_VAR (retval
)
1394 || TREE_CODE (SSA_NAME_VAR (retval
)) != RESULT_DECL
)))
1396 copy
= gimple_build_assign (id
->do_not_unshare
1397 ? id
->retvar
: unshare_expr (id
->retvar
),
1399 /* id->retvar is already substituted. Skip it on later remapping. */
1402 /* We need to copy bounds if return structure with pointers into
1403 instrumented function. */
1404 if (chkp_function_instrumented_p (id
->dst_fn
)
1406 && !BOUNDED_P (id
->retvar
)
1407 && chkp_type_has_pointer (TREE_TYPE (id
->retvar
)))
1408 id
->assign_stmts
.safe_push (copy
);
1414 else if (gimple_has_substatements (stmt
))
1418 /* When cloning bodies from the C++ front end, we will be handed bodies
1419 in High GIMPLE form. Handle here all the High GIMPLE statements that
1420 have embedded statements. */
1421 switch (gimple_code (stmt
))
1424 copy
= copy_gimple_bind (as_a
<gbind
*> (stmt
), id
);
1429 gcatch
*catch_stmt
= as_a
<gcatch
*> (stmt
);
1430 s1
= remap_gimple_seq (gimple_catch_handler (catch_stmt
), id
);
1431 copy
= gimple_build_catch (gimple_catch_types (catch_stmt
), s1
);
1435 case GIMPLE_EH_FILTER
:
1436 s1
= remap_gimple_seq (gimple_eh_filter_failure (stmt
), id
);
1437 copy
= gimple_build_eh_filter (gimple_eh_filter_types (stmt
), s1
);
1441 s1
= remap_gimple_seq (gimple_try_eval (stmt
), id
);
1442 s2
= remap_gimple_seq (gimple_try_cleanup (stmt
), id
);
1443 copy
= gimple_build_try (s1
, s2
, gimple_try_kind (stmt
));
1446 case GIMPLE_WITH_CLEANUP_EXPR
:
1447 s1
= remap_gimple_seq (gimple_wce_cleanup (stmt
), id
);
1448 copy
= gimple_build_wce (s1
);
1451 case GIMPLE_OMP_PARALLEL
:
1453 gomp_parallel
*omp_par_stmt
= as_a
<gomp_parallel
*> (stmt
);
1454 s1
= remap_gimple_seq (gimple_omp_body (omp_par_stmt
), id
);
1455 copy
= gimple_build_omp_parallel
1457 gimple_omp_parallel_clauses (omp_par_stmt
),
1458 gimple_omp_parallel_child_fn (omp_par_stmt
),
1459 gimple_omp_parallel_data_arg (omp_par_stmt
));
1463 case GIMPLE_OMP_TASK
:
1464 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1465 copy
= gimple_build_omp_task
1467 gimple_omp_task_clauses (stmt
),
1468 gimple_omp_task_child_fn (stmt
),
1469 gimple_omp_task_data_arg (stmt
),
1470 gimple_omp_task_copy_fn (stmt
),
1471 gimple_omp_task_arg_size (stmt
),
1472 gimple_omp_task_arg_align (stmt
));
1475 case GIMPLE_OMP_FOR
:
1476 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1477 s2
= remap_gimple_seq (gimple_omp_for_pre_body (stmt
), id
);
1478 copy
= gimple_build_omp_for (s1
, gimple_omp_for_kind (stmt
),
1479 gimple_omp_for_clauses (stmt
),
1480 gimple_omp_for_collapse (stmt
), s2
);
1483 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
1485 gimple_omp_for_set_index (copy
, i
,
1486 gimple_omp_for_index (stmt
, i
));
1487 gimple_omp_for_set_initial (copy
, i
,
1488 gimple_omp_for_initial (stmt
, i
));
1489 gimple_omp_for_set_final (copy
, i
,
1490 gimple_omp_for_final (stmt
, i
));
1491 gimple_omp_for_set_incr (copy
, i
,
1492 gimple_omp_for_incr (stmt
, i
));
1493 gimple_omp_for_set_cond (copy
, i
,
1494 gimple_omp_for_cond (stmt
, i
));
1499 case GIMPLE_OMP_MASTER
:
1500 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1501 copy
= gimple_build_omp_master (s1
);
1504 case GIMPLE_OMP_TASKGROUP
:
1505 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1506 copy
= gimple_build_omp_taskgroup (s1
);
1509 case GIMPLE_OMP_ORDERED
:
1510 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1511 copy
= gimple_build_omp_ordered
1513 gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
)));
1516 case GIMPLE_OMP_SECTION
:
1517 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1518 copy
= gimple_build_omp_section (s1
);
1521 case GIMPLE_OMP_SECTIONS
:
1522 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1523 copy
= gimple_build_omp_sections
1524 (s1
, gimple_omp_sections_clauses (stmt
));
1527 case GIMPLE_OMP_SINGLE
:
1528 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1529 copy
= gimple_build_omp_single
1530 (s1
, gimple_omp_single_clauses (stmt
));
1533 case GIMPLE_OMP_TARGET
:
1534 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1535 copy
= gimple_build_omp_target
1536 (s1
, gimple_omp_target_kind (stmt
),
1537 gimple_omp_target_clauses (stmt
));
1540 case GIMPLE_OMP_TEAMS
:
1541 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1542 copy
= gimple_build_omp_teams
1543 (s1
, gimple_omp_teams_clauses (stmt
));
1546 case GIMPLE_OMP_CRITICAL
:
1547 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1548 copy
= gimple_build_omp_critical (s1
,
1549 gimple_omp_critical_name
1550 (as_a
<gomp_critical
*> (stmt
)),
1551 gimple_omp_critical_clauses
1552 (as_a
<gomp_critical
*> (stmt
)));
1555 case GIMPLE_TRANSACTION
:
1557 gtransaction
*old_trans_stmt
= as_a
<gtransaction
*> (stmt
);
1558 gtransaction
*new_trans_stmt
;
1559 s1
= remap_gimple_seq (gimple_transaction_body (old_trans_stmt
),
1561 copy
= new_trans_stmt
= gimple_build_transaction (s1
);
1562 gimple_transaction_set_subcode (new_trans_stmt
,
1563 gimple_transaction_subcode (old_trans_stmt
));
1564 gimple_transaction_set_label_norm (new_trans_stmt
,
1565 gimple_transaction_label_norm (old_trans_stmt
));
1566 gimple_transaction_set_label_uninst (new_trans_stmt
,
1567 gimple_transaction_label_uninst (old_trans_stmt
));
1568 gimple_transaction_set_label_over (new_trans_stmt
,
1569 gimple_transaction_label_over (old_trans_stmt
));
1579 if (gimple_assign_copy_p (stmt
)
1580 && gimple_assign_lhs (stmt
) == gimple_assign_rhs1 (stmt
)
1581 && auto_var_in_fn_p (gimple_assign_lhs (stmt
), id
->src_fn
))
1583 /* Here we handle statements that are not completely rewritten.
1584 First we detect some inlining-induced bogosities for
1587 /* Some assignments VAR = VAR; don't generate any rtl code
1588 and thus don't count as variable modification. Avoid
1589 keeping bogosities like 0 = 0. */
1590 tree decl
= gimple_assign_lhs (stmt
), value
;
1593 n
= id
->decl_map
->get (decl
);
1597 STRIP_TYPE_NOPS (value
);
1598 if (TREE_CONSTANT (value
) || TREE_READONLY (value
))
1603 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1604 in a block that we aren't copying during tree_function_versioning,
1605 just drop the clobber stmt. */
1606 if (id
->blocks_to_copy
&& gimple_clobber_p (stmt
))
1608 tree lhs
= gimple_assign_lhs (stmt
);
1609 if (TREE_CODE (lhs
) == MEM_REF
1610 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
)
1612 gimple
*def_stmt
= SSA_NAME_DEF_STMT (TREE_OPERAND (lhs
, 0));
1613 if (gimple_bb (def_stmt
)
1614 && !bitmap_bit_p (id
->blocks_to_copy
,
1615 gimple_bb (def_stmt
)->index
))
1620 if (gimple_debug_bind_p (stmt
))
1623 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt
),
1624 gimple_debug_bind_get_value (stmt
),
1626 id
->debug_stmts
.safe_push (copy
);
1627 gimple_seq_add_stmt (&stmts
, copy
);
1630 if (gimple_debug_source_bind_p (stmt
))
1632 gdebug
*copy
= gimple_build_debug_source_bind
1633 (gimple_debug_source_bind_get_var (stmt
),
1634 gimple_debug_source_bind_get_value (stmt
),
1636 id
->debug_stmts
.safe_push (copy
);
1637 gimple_seq_add_stmt (&stmts
, copy
);
1641 /* Create a new deep copy of the statement. */
1642 copy
= gimple_copy (stmt
);
1644 /* Clear flags that need revisiting. */
1645 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (copy
))
1647 if (gimple_call_tail_p (call_stmt
))
1648 gimple_call_set_tail (call_stmt
, false);
1649 if (gimple_call_from_thunk_p (call_stmt
))
1650 gimple_call_set_from_thunk (call_stmt
, false);
1651 if (gimple_call_internal_p (call_stmt
))
1652 switch (gimple_call_internal_fn (call_stmt
))
1654 case IFN_GOMP_SIMD_LANE
:
1655 case IFN_GOMP_SIMD_VF
:
1656 case IFN_GOMP_SIMD_LAST_LANE
:
1657 case IFN_GOMP_SIMD_ORDERED_START
:
1658 case IFN_GOMP_SIMD_ORDERED_END
:
1659 DECL_STRUCT_FUNCTION (id
->dst_fn
)->has_simduid_loops
= true;
1666 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1667 RESX and EH_DISPATCH. */
1669 switch (gimple_code (copy
))
1673 tree r
, fndecl
= gimple_call_fndecl (copy
);
1674 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
1675 switch (DECL_FUNCTION_CODE (fndecl
))
1677 case BUILT_IN_EH_COPY_VALUES
:
1678 r
= gimple_call_arg (copy
, 1);
1679 r
= remap_eh_region_tree_nr (r
, id
);
1680 gimple_call_set_arg (copy
, 1, r
);
1683 case BUILT_IN_EH_POINTER
:
1684 case BUILT_IN_EH_FILTER
:
1685 r
= gimple_call_arg (copy
, 0);
1686 r
= remap_eh_region_tree_nr (r
, id
);
1687 gimple_call_set_arg (copy
, 0, r
);
1694 /* Reset alias info if we didn't apply measures to
1695 keep it valid over inlining by setting DECL_PT_UID. */
1696 if (!id
->src_cfun
->gimple_df
1697 || !id
->src_cfun
->gimple_df
->ipa_pta
)
1698 gimple_call_reset_alias_info (as_a
<gcall
*> (copy
));
1704 gresx
*resx_stmt
= as_a
<gresx
*> (copy
);
1705 int r
= gimple_resx_region (resx_stmt
);
1706 r
= remap_eh_region_nr (r
, id
);
1707 gimple_resx_set_region (resx_stmt
, r
);
1711 case GIMPLE_EH_DISPATCH
:
1713 geh_dispatch
*eh_dispatch
= as_a
<geh_dispatch
*> (copy
);
1714 int r
= gimple_eh_dispatch_region (eh_dispatch
);
1715 r
= remap_eh_region_nr (r
, id
);
1716 gimple_eh_dispatch_set_region (eh_dispatch
, r
);
1725 /* If STMT has a block defined, map it to the newly constructed
1727 if (gimple_block (copy
))
1730 n
= id
->decl_map
->get (gimple_block (copy
));
1732 gimple_set_block (copy
, *n
);
1735 if (gimple_debug_bind_p (copy
) || gimple_debug_source_bind_p (copy
))
1737 gimple_seq_add_stmt (&stmts
, copy
);
1741 /* Remap all the operands in COPY. */
1742 memset (&wi
, 0, sizeof (wi
));
1745 walk_tree (gimple_op_ptr (copy
, 1), remap_gimple_op_r
, &wi
, NULL
);
1747 walk_gimple_op (copy
, remap_gimple_op_r
, &wi
);
1749 /* Clear the copied virtual operands. We are not remapping them here
1750 but are going to recreate them from scratch. */
1751 if (gimple_has_mem_ops (copy
))
1753 gimple_set_vdef (copy
, NULL_TREE
);
1754 gimple_set_vuse (copy
, NULL_TREE
);
1757 gimple_seq_add_stmt (&stmts
, copy
);
1762 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1766 copy_bb (copy_body_data
*id
, basic_block bb
, int frequency_scale
,
1767 profile_count num
, profile_count den
)
1769 gimple_stmt_iterator gsi
, copy_gsi
, seq_gsi
;
1770 basic_block copy_basic_block
;
1774 bool scale
= num
.initialized_p ()
1775 && (den
> 0 || num
== profile_count::zero ());
1777 /* Search for previous copied basic block. */
1780 prev
= prev
->prev_bb
;
1782 /* create_basic_block() will append every new block to
1783 basic_block_info automatically. */
1784 copy_basic_block
= create_basic_block (NULL
, (basic_block
) prev
->aux
);
1786 copy_basic_block
->count
= bb
->count
.apply_scale (num
, den
);
1788 /* We are going to rebuild frequencies from scratch. These values
1789 have just small importance to drive canonicalize_loop_headers. */
1790 freq
= apply_scale ((gcov_type
)bb
->frequency
, frequency_scale
);
1792 /* We recompute frequencies after inlining, so this is quite safe. */
1793 if (freq
> BB_FREQ_MAX
)
1795 copy_basic_block
->frequency
= freq
;
1797 copy_gsi
= gsi_start_bb (copy_basic_block
);
1799 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1802 gimple
*stmt
= gsi_stmt (gsi
);
1803 gimple
*orig_stmt
= stmt
;
1804 gimple_stmt_iterator stmts_gsi
;
1805 bool stmt_added
= false;
1807 id
->regimplify
= false;
1808 stmts
= remap_gimple_stmt (stmt
, id
);
1810 if (gimple_seq_empty_p (stmts
))
1815 for (stmts_gsi
= gsi_start (stmts
);
1816 !gsi_end_p (stmts_gsi
); )
1818 stmt
= gsi_stmt (stmts_gsi
);
1820 /* Advance iterator now before stmt is moved to seq_gsi. */
1821 gsi_next (&stmts_gsi
);
1823 if (gimple_nop_p (stmt
))
1826 gimple_duplicate_stmt_histograms (cfun
, stmt
, id
->src_cfun
,
1829 /* With return slot optimization we can end up with
1830 non-gimple (foo *)&this->m, fix that here. */
1831 if (is_gimple_assign (stmt
)
1832 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
1833 && !is_gimple_val (gimple_assign_rhs1 (stmt
)))
1836 new_rhs
= force_gimple_operand_gsi (&seq_gsi
,
1837 gimple_assign_rhs1 (stmt
),
1839 GSI_CONTINUE_LINKING
);
1840 gimple_assign_set_rhs1 (stmt
, new_rhs
);
1841 id
->regimplify
= false;
1844 gsi_insert_after (&seq_gsi
, stmt
, GSI_NEW_STMT
);
1847 gimple_regimplify_operands (stmt
, &seq_gsi
);
1855 /* If copy_basic_block has been empty at the start of this iteration,
1856 call gsi_start_bb again to get at the newly added statements. */
1857 if (gsi_end_p (copy_gsi
))
1858 copy_gsi
= gsi_start_bb (copy_basic_block
);
1860 gsi_next (©_gsi
);
1862 /* Process the new statement. The call to gimple_regimplify_operands
1863 possibly turned the statement into multiple statements, we
1864 need to process all of them. */
1870 stmt
= gsi_stmt (copy_gsi
);
1871 call_stmt
= dyn_cast
<gcall
*> (stmt
);
1873 && gimple_call_va_arg_pack_p (call_stmt
)
1875 && ! gimple_call_va_arg_pack_p (id
->call_stmt
))
1877 /* __builtin_va_arg_pack () should be replaced by
1878 all arguments corresponding to ... in the caller. */
1882 size_t nargs
= gimple_call_num_args (id
->call_stmt
);
1883 size_t n
, i
, nargs_to_copy
;
1884 bool remove_bounds
= false;
1886 for (p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
1889 /* Bounds should be removed from arg pack in case
1890 we handle not instrumented call in instrumented
1892 nargs_to_copy
= nargs
;
1893 if (gimple_call_with_bounds_p (id
->call_stmt
)
1894 && !gimple_call_with_bounds_p (stmt
))
1896 for (i
= gimple_call_num_args (id
->call_stmt
) - nargs
;
1897 i
< gimple_call_num_args (id
->call_stmt
);
1899 if (POINTER_BOUNDS_P (gimple_call_arg (id
->call_stmt
, i
)))
1901 remove_bounds
= true;
1904 /* Create the new array of arguments. */
1905 n
= nargs_to_copy
+ gimple_call_num_args (call_stmt
);
1906 argarray
.create (n
);
1907 argarray
.safe_grow_cleared (n
);
1909 /* Copy all the arguments before '...' */
1910 memcpy (argarray
.address (),
1911 gimple_call_arg_ptr (call_stmt
, 0),
1912 gimple_call_num_args (call_stmt
) * sizeof (tree
));
1916 /* Append the rest of arguments removing bounds. */
1917 unsigned cur
= gimple_call_num_args (call_stmt
);
1918 i
= gimple_call_num_args (id
->call_stmt
) - nargs
;
1919 for (i
= gimple_call_num_args (id
->call_stmt
) - nargs
;
1920 i
< gimple_call_num_args (id
->call_stmt
);
1922 if (!POINTER_BOUNDS_P (gimple_call_arg (id
->call_stmt
, i
)))
1923 argarray
[cur
++] = gimple_call_arg (id
->call_stmt
, i
);
1924 gcc_assert (cur
== n
);
1928 /* Append the arguments passed in '...' */
1929 memcpy (argarray
.address () + gimple_call_num_args (call_stmt
),
1930 gimple_call_arg_ptr (id
->call_stmt
, 0)
1931 + (gimple_call_num_args (id
->call_stmt
) - nargs
),
1932 nargs
* sizeof (tree
));
1935 new_call
= gimple_build_call_vec (gimple_call_fn (call_stmt
),
1938 argarray
.release ();
1940 /* Copy all GIMPLE_CALL flags, location and block, except
1941 GF_CALL_VA_ARG_PACK. */
1942 gimple_call_copy_flags (new_call
, call_stmt
);
1943 gimple_call_set_va_arg_pack (new_call
, false);
1944 gimple_set_location (new_call
, gimple_location (stmt
));
1945 gimple_set_block (new_call
, gimple_block (stmt
));
1946 gimple_call_set_lhs (new_call
, gimple_call_lhs (call_stmt
));
1948 gsi_replace (©_gsi
, new_call
, false);
1953 && (decl
= gimple_call_fndecl (stmt
))
1954 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
1955 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_VA_ARG_PACK_LEN
1956 && ! gimple_call_va_arg_pack_p (id
->call_stmt
))
1958 /* __builtin_va_arg_pack_len () should be replaced by
1959 the number of anonymous arguments. */
1960 size_t nargs
= gimple_call_num_args (id
->call_stmt
), i
;
1964 for (p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
1967 /* For instrumented calls we should ignore bounds. */
1968 for (i
= gimple_call_num_args (id
->call_stmt
) - nargs
;
1969 i
< gimple_call_num_args (id
->call_stmt
);
1971 if (POINTER_BOUNDS_P (gimple_call_arg (id
->call_stmt
, i
)))
1974 count
= build_int_cst (integer_type_node
, nargs
);
1975 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
), count
);
1976 gsi_replace (©_gsi
, new_stmt
, false);
1981 && gimple_call_internal_p (stmt
)
1982 && gimple_call_internal_fn (stmt
) == IFN_TSAN_FUNC_EXIT
)
1984 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
1985 gsi_remove (©_gsi
, false);
1989 /* Statements produced by inlining can be unfolded, especially
1990 when we constant propagated some operands. We can't fold
1991 them right now for two reasons:
1992 1) folding require SSA_NAME_DEF_STMTs to be correct
1993 2) we can't change function calls to builtins.
1994 So we just mark statement for later folding. We mark
1995 all new statements, instead just statements that has changed
1996 by some nontrivial substitution so even statements made
1997 foldable indirectly are updated. If this turns out to be
1998 expensive, copy_body can be told to watch for nontrivial
2000 if (id
->statements_to_fold
)
2001 id
->statements_to_fold
->add (stmt
);
2003 /* We're duplicating a CALL_EXPR. Find any corresponding
2004 callgraph edges and update or duplicate them. */
2005 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
2007 struct cgraph_edge
*edge
;
2009 switch (id
->transform_call_graph_edges
)
2011 case CB_CGE_DUPLICATE
:
2012 edge
= id
->src_node
->get_edge (orig_stmt
);
2015 int edge_freq
= edge
->frequency
;
2017 struct cgraph_edge
*old_edge
= edge
;
2018 edge
= edge
->clone (id
->dst_node
, call_stmt
,
2020 profile_count::one (),
2021 profile_count::one (),
2024 /* We could also just rescale the frequency, but
2025 doing so would introduce roundoff errors and make
2026 verifier unhappy. */
2027 new_freq
= compute_call_stmt_bb_frequency (id
->dst_node
->decl
,
2030 /* Speculative calls consist of two edges - direct and indirect.
2031 Duplicate the whole thing and distribute frequencies accordingly. */
2032 if (edge
->speculative
)
2034 struct cgraph_edge
*direct
, *indirect
;
2035 struct ipa_ref
*ref
;
2037 gcc_assert (!edge
->indirect_unknown_callee
);
2038 old_edge
->speculative_call_info (direct
, indirect
, ref
);
2039 indirect
= indirect
->clone (id
->dst_node
, call_stmt
,
2041 profile_count::one (),
2042 profile_count::one (),
2045 if (old_edge
->frequency
+ indirect
->frequency
)
2047 edge
->frequency
= MIN (RDIV ((gcov_type
)new_freq
* old_edge
->frequency
,
2048 (old_edge
->frequency
+ indirect
->frequency
)),
2050 indirect
->frequency
= MIN (RDIV ((gcov_type
)new_freq
* indirect
->frequency
,
2051 (old_edge
->frequency
+ indirect
->frequency
)),
2054 id
->dst_node
->clone_reference (ref
, stmt
);
2058 edge
->frequency
= new_freq
;
2060 && profile_status_for_fn (cfun
) != PROFILE_ABSENT
2061 && (edge_freq
> edge
->frequency
+ 10
2062 || edge_freq
< edge
->frequency
- 10))
2064 fprintf (dump_file
, "Edge frequency estimated by "
2065 "cgraph %i diverge from inliner's estimate %i\n",
2069 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
2072 copy_basic_block
->frequency
);
2078 case CB_CGE_MOVE_CLONES
:
2079 id
->dst_node
->set_call_stmt_including_clones (orig_stmt
,
2081 edge
= id
->dst_node
->get_edge (stmt
);
2085 edge
= id
->dst_node
->get_edge (orig_stmt
);
2087 edge
->set_call_stmt (call_stmt
);
2094 /* Constant propagation on argument done during inlining
2095 may create new direct call. Produce an edge for it. */
2097 || (edge
->indirect_inlining_edge
2098 && id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
))
2099 && id
->dst_node
->definition
2100 && (fn
= gimple_call_fndecl (stmt
)) != NULL
)
2102 struct cgraph_node
*dest
= cgraph_node::get_create (fn
);
2104 /* We have missing edge in the callgraph. This can happen
2105 when previous inlining turned an indirect call into a
2106 direct call by constant propagating arguments or we are
2107 producing dead clone (for further cloning). In all
2108 other cases we hit a bug (incorrect node sharing is the
2109 most common reason for missing edges). */
2110 gcc_assert (!dest
->definition
2111 || dest
->address_taken
2112 || !id
->src_node
->definition
2113 || !id
->dst_node
->definition
);
2114 if (id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
)
2115 id
->dst_node
->create_edge_including_clones
2116 (dest
, orig_stmt
, call_stmt
, bb
->count
,
2117 compute_call_stmt_bb_frequency (id
->dst_node
->decl
,
2119 CIF_ORIGINALLY_INDIRECT_CALL
);
2121 id
->dst_node
->create_edge (dest
, call_stmt
,
2123 compute_call_stmt_bb_frequency
2124 (id
->dst_node
->decl
,
2125 copy_basic_block
))->inline_failed
2126 = CIF_ORIGINALLY_INDIRECT_CALL
;
2129 fprintf (dump_file
, "Created new direct edge to %s\n",
2134 notice_special_calls (as_a
<gcall
*> (stmt
));
2137 maybe_duplicate_eh_stmt_fn (cfun
, stmt
, id
->src_cfun
, orig_stmt
,
2138 id
->eh_map
, id
->eh_lp_nr
);
2140 gsi_next (©_gsi
);
2142 while (!gsi_end_p (copy_gsi
));
2144 copy_gsi
= gsi_last_bb (copy_basic_block
);
2147 return copy_basic_block
;
2150 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2151 form is quite easy, since dominator relationship for old basic blocks does
2154 There is however exception where inlining might change dominator relation
2155 across EH edges from basic block within inlined functions destinating
2156 to landing pads in function we inline into.
2158 The function fills in PHI_RESULTs of such PHI nodes if they refer
2159 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2160 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2161 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2162 set, and this means that there will be no overlapping live ranges
2163 for the underlying symbol.
2165 This might change in future if we allow redirecting of EH edges and
2166 we might want to change way build CFG pre-inlining to include
2167 all the possible edges then. */
2169 update_ssa_across_abnormal_edges (basic_block bb
, basic_block ret_bb
,
2170 bool can_throw
, bool nonlocal_goto
)
2175 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2177 || ((basic_block
)e
->dest
->aux
)->index
== ENTRY_BLOCK
)
2183 gcc_assert (e
->flags
& EDGE_EH
);
2186 gcc_assert (!(e
->flags
& EDGE_EH
));
2188 for (si
= gsi_start_phis (e
->dest
); !gsi_end_p (si
); gsi_next (&si
))
2194 /* For abnormal goto/call edges the receiver can be the
2195 ENTRY_BLOCK. Do not assert this cannot happen. */
2197 gcc_assert ((e
->flags
& EDGE_EH
)
2198 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)));
2200 re
= find_edge (ret_bb
, e
->dest
);
2201 gcc_checking_assert (re
);
2202 gcc_assert ((re
->flags
& (EDGE_EH
| EDGE_ABNORMAL
))
2203 == (e
->flags
& (EDGE_EH
| EDGE_ABNORMAL
)));
2205 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
),
2206 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, re
)));
2212 /* Copy edges from BB into its copy constructed earlier, scale profile
2213 accordingly. Edges will be taken care of later. Assume aux
2214 pointers to point to the copies of each BB. Return true if any
2215 debug stmts are left after a statement that must end the basic block. */
2218 copy_edges_for_bb (basic_block bb
,
2219 basic_block ret_bb
, basic_block abnormal_goto_dest
)
2221 basic_block new_bb
= (basic_block
) bb
->aux
;
2224 gimple_stmt_iterator si
;
2226 bool need_debug_cleanup
= false;
2228 /* Use the indices from the original blocks to create edges for the
2230 FOR_EACH_EDGE (old_edge
, ei
, bb
->succs
)
2231 if (!(old_edge
->flags
& EDGE_EH
))
2235 flags
= old_edge
->flags
;
2237 /* Return edges do get a FALLTHRU flag when the get inlined. */
2238 if (old_edge
->dest
->index
== EXIT_BLOCK
2239 && !(old_edge
->flags
& (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
|EDGE_FAKE
))
2240 && old_edge
->dest
->aux
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
2241 flags
|= EDGE_FALLTHRU
;
2242 new_edge
= make_edge (new_bb
, (basic_block
) old_edge
->dest
->aux
, flags
);
2243 new_edge
->probability
= old_edge
->probability
;
2246 if (bb
->index
== ENTRY_BLOCK
|| bb
->index
== EXIT_BLOCK
)
2249 for (si
= gsi_start_bb (new_bb
); !gsi_end_p (si
);)
2252 bool can_throw
, nonlocal_goto
;
2254 copy_stmt
= gsi_stmt (si
);
2255 if (!is_gimple_debug (copy_stmt
))
2256 update_stmt (copy_stmt
);
2258 /* Do this before the possible split_block. */
2261 /* If this tree could throw an exception, there are two
2262 cases where we need to add abnormal edge(s): the
2263 tree wasn't in a region and there is a "current
2264 region" in the caller; or the original tree had
2265 EH edges. In both cases split the block after the tree,
2266 and add abnormal edge(s) as needed; we need both
2267 those from the callee and the caller.
2268 We check whether the copy can throw, because the const
2269 propagation can change an INDIRECT_REF which throws
2270 into a COMPONENT_REF which doesn't. If the copy
2271 can throw, the original could also throw. */
2272 can_throw
= stmt_can_throw_internal (copy_stmt
);
2274 = (stmt_can_make_abnormal_goto (copy_stmt
)
2275 && !computed_goto_p (copy_stmt
));
2277 if (can_throw
|| nonlocal_goto
)
2279 if (!gsi_end_p (si
))
2281 while (!gsi_end_p (si
) && is_gimple_debug (gsi_stmt (si
)))
2284 need_debug_cleanup
= true;
2286 if (!gsi_end_p (si
))
2287 /* Note that bb's predecessor edges aren't necessarily
2288 right at this point; split_block doesn't care. */
2290 edge e
= split_block (new_bb
, copy_stmt
);
2293 new_bb
->aux
= e
->src
->aux
;
2294 si
= gsi_start_bb (new_bb
);
2298 bool update_probs
= false;
2300 if (gimple_code (copy_stmt
) == GIMPLE_EH_DISPATCH
)
2302 make_eh_dispatch_edges (as_a
<geh_dispatch
*> (copy_stmt
));
2303 update_probs
= true;
2307 make_eh_edges (copy_stmt
);
2308 update_probs
= true;
2311 /* EH edges may not match old edges. Copy as much as possible. */
2316 basic_block copy_stmt_bb
= gimple_bb (copy_stmt
);
2318 FOR_EACH_EDGE (old_edge
, ei
, bb
->succs
)
2319 if ((old_edge
->flags
& EDGE_EH
)
2320 && (e
= find_edge (copy_stmt_bb
,
2321 (basic_block
) old_edge
->dest
->aux
))
2322 && (e
->flags
& EDGE_EH
))
2323 e
->probability
= old_edge
->probability
;
2325 FOR_EACH_EDGE (e
, ei
, copy_stmt_bb
->succs
)
2326 if ((e
->flags
& EDGE_EH
) && !e
->probability
.initialized_p ())
2327 e
->probability
= profile_probability::never ();
2331 /* If the call we inline cannot make abnormal goto do not add
2332 additional abnormal edges but only retain those already present
2333 in the original function body. */
2334 if (abnormal_goto_dest
== NULL
)
2335 nonlocal_goto
= false;
2338 basic_block copy_stmt_bb
= gimple_bb (copy_stmt
);
2340 if (get_abnormal_succ_dispatcher (copy_stmt_bb
))
2341 nonlocal_goto
= false;
2342 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2343 in OpenMP regions which aren't allowed to be left abnormally.
2344 So, no need to add abnormal edge in that case. */
2345 else if (is_gimple_call (copy_stmt
)
2346 && gimple_call_internal_p (copy_stmt
)
2347 && (gimple_call_internal_fn (copy_stmt
)
2348 == IFN_ABNORMAL_DISPATCHER
)
2349 && gimple_call_arg (copy_stmt
, 0) == boolean_true_node
)
2350 nonlocal_goto
= false;
2352 make_single_succ_edge (copy_stmt_bb
, abnormal_goto_dest
,
2356 if ((can_throw
|| nonlocal_goto
)
2357 && gimple_in_ssa_p (cfun
))
2358 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt
), ret_bb
,
2359 can_throw
, nonlocal_goto
);
2361 return need_debug_cleanup
;
2364 /* Copy the PHIs. All blocks and edges are copied, some blocks
2365 was possibly split and new outgoing EH edges inserted.
2366 BB points to the block of original function and AUX pointers links
2367 the original and newly copied blocks. */
2370 copy_phis_for_bb (basic_block bb
, copy_body_data
*id
)
2372 basic_block
const new_bb
= (basic_block
) bb
->aux
;
2377 bool inserted
= false;
2379 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
2385 res
= PHI_RESULT (phi
);
2387 if (!virtual_operand_p (res
))
2389 walk_tree (&new_res
, copy_tree_body_r
, id
, NULL
);
2390 if (EDGE_COUNT (new_bb
->preds
) == 0)
2392 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2393 SSA_NAME_DEF_STMT (new_res
) = gimple_build_nop ();
2397 new_phi
= create_phi_node (new_res
, new_bb
);
2398 FOR_EACH_EDGE (new_edge
, ei
, new_bb
->preds
)
2400 edge old_edge
= find_edge ((basic_block
) new_edge
->src
->aux
,
2407 /* When doing partial cloning, we allow PHIs on the entry
2408 block as long as all the arguments are the same.
2409 Find any input edge to see argument to copy. */
2411 FOR_EACH_EDGE (old_edge
, ei2
, bb
->preds
)
2412 if (!old_edge
->src
->aux
)
2415 arg
= PHI_ARG_DEF_FROM_EDGE (phi
, old_edge
);
2417 walk_tree (&new_arg
, copy_tree_body_r
, id
, NULL
);
2418 gcc_assert (new_arg
);
2419 /* With return slot optimization we can end up with
2420 non-gimple (foo *)&this->m, fix that here. */
2421 if (TREE_CODE (new_arg
) != SSA_NAME
2422 && TREE_CODE (new_arg
) != FUNCTION_DECL
2423 && !is_gimple_val (new_arg
))
2425 gimple_seq stmts
= NULL
;
2426 new_arg
= force_gimple_operand (new_arg
, &stmts
, true,
2428 gsi_insert_seq_on_edge (new_edge
, stmts
);
2431 locus
= gimple_phi_arg_location_from_edge (phi
, old_edge
);
2432 if (LOCATION_BLOCK (locus
))
2435 n
= id
->decl_map
->get (LOCATION_BLOCK (locus
));
2437 locus
= set_block (locus
, *n
);
2440 locus
= LOCATION_LOCUS (locus
);
2442 add_phi_arg (new_phi
, new_arg
, new_edge
, locus
);
2448 /* Commit the delayed edge insertions. */
2450 FOR_EACH_EDGE (new_edge
, ei
, new_bb
->preds
)
2451 gsi_commit_one_edge_insert (new_edge
, NULL
);
2455 /* Wrapper for remap_decl so it can be used as a callback. */
2458 remap_decl_1 (tree decl
, void *data
)
2460 return remap_decl (decl
, (copy_body_data
*) data
);
2463 /* Build struct function and associated datastructures for the new clone
2464 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2465 the cfun to the function of new_fndecl (and current_function_decl too). */
2468 initialize_cfun (tree new_fndecl
, tree callee_fndecl
, profile_count count
)
2470 struct function
*src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2472 if (!DECL_ARGUMENTS (new_fndecl
))
2473 DECL_ARGUMENTS (new_fndecl
) = DECL_ARGUMENTS (callee_fndecl
);
2474 if (!DECL_RESULT (new_fndecl
))
2475 DECL_RESULT (new_fndecl
) = DECL_RESULT (callee_fndecl
);
2477 /* Register specific tree functions. */
2478 gimple_register_cfg_hooks ();
2480 /* Get clean struct function. */
2481 push_struct_function (new_fndecl
);
2483 /* We will rebuild these, so just sanity check that they are empty. */
2484 gcc_assert (VALUE_HISTOGRAMS (cfun
) == NULL
);
2485 gcc_assert (cfun
->local_decls
== NULL
);
2486 gcc_assert (cfun
->cfg
== NULL
);
2487 gcc_assert (cfun
->decl
== new_fndecl
);
2489 /* Copy items we preserve during cloning. */
2490 cfun
->static_chain_decl
= src_cfun
->static_chain_decl
;
2491 cfun
->nonlocal_goto_save_area
= src_cfun
->nonlocal_goto_save_area
;
2492 cfun
->function_end_locus
= src_cfun
->function_end_locus
;
2493 cfun
->curr_properties
= src_cfun
->curr_properties
;
2494 cfun
->last_verified
= src_cfun
->last_verified
;
2495 cfun
->va_list_gpr_size
= src_cfun
->va_list_gpr_size
;
2496 cfun
->va_list_fpr_size
= src_cfun
->va_list_fpr_size
;
2497 cfun
->has_nonlocal_label
= src_cfun
->has_nonlocal_label
;
2498 cfun
->stdarg
= src_cfun
->stdarg
;
2499 cfun
->after_inlining
= src_cfun
->after_inlining
;
2500 cfun
->can_throw_non_call_exceptions
2501 = src_cfun
->can_throw_non_call_exceptions
;
2502 cfun
->can_delete_dead_exceptions
= src_cfun
->can_delete_dead_exceptions
;
2503 cfun
->returns_struct
= src_cfun
->returns_struct
;
2504 cfun
->returns_pcc_struct
= src_cfun
->returns_pcc_struct
;
2506 init_empty_tree_cfg ();
2508 profile_status_for_fn (cfun
) = profile_status_for_fn (src_cfun
);
2510 /* FIXME: When all counts are known to be zero, scaling is also meaningful.
2512 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
.initialized_p ()
2513 && count
.initialized_p ()
2514 && ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
.initialized_p ())
2516 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
=
2517 ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
.apply_scale (count
,
2518 ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
);
2519 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
=
2520 EXIT_BLOCK_PTR_FOR_FN (src_cfun
)->count
.apply_scale (count
,
2521 ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
);
2523 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
2524 = ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->frequency
;
2525 EXIT_BLOCK_PTR_FOR_FN (cfun
)->frequency
=
2526 EXIT_BLOCK_PTR_FOR_FN (src_cfun
)->frequency
;
2528 init_eh_for_function ();
2530 if (src_cfun
->gimple_df
)
2532 init_tree_ssa (cfun
);
2533 cfun
->gimple_df
->in_ssa_p
= src_cfun
->gimple_df
->in_ssa_p
;
2534 if (cfun
->gimple_df
->in_ssa_p
)
2535 init_ssa_operands (cfun
);
2539 /* Helper function for copy_cfg_body. Move debug stmts from the end
2540 of NEW_BB to the beginning of successor basic blocks when needed. If the
2541 successor has multiple predecessors, reset them, otherwise keep
2545 maybe_move_debug_stmts_to_successors (copy_body_data
*id
, basic_block new_bb
)
2549 gimple_stmt_iterator si
= gsi_last_nondebug_bb (new_bb
);
2552 || gsi_one_before_end_p (si
)
2553 || !(stmt_can_throw_internal (gsi_stmt (si
))
2554 || stmt_can_make_abnormal_goto (gsi_stmt (si
))))
2557 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
2559 gimple_stmt_iterator ssi
= gsi_last_bb (new_bb
);
2560 gimple_stmt_iterator dsi
= gsi_after_labels (e
->dest
);
2561 while (is_gimple_debug (gsi_stmt (ssi
)))
2563 gimple
*stmt
= gsi_stmt (ssi
);
2568 /* For the last edge move the debug stmts instead of copying
2570 if (ei_one_before_end_p (ei
))
2574 if (!single_pred_p (e
->dest
) && gimple_debug_bind_p (stmt
))
2575 gimple_debug_bind_reset_value (stmt
);
2576 gsi_remove (&si
, false);
2577 gsi_insert_before (&dsi
, stmt
, GSI_SAME_STMT
);
2581 if (gimple_debug_bind_p (stmt
))
2583 var
= gimple_debug_bind_get_var (stmt
);
2584 if (single_pred_p (e
->dest
))
2586 value
= gimple_debug_bind_get_value (stmt
);
2587 value
= unshare_expr (value
);
2591 new_stmt
= gimple_build_debug_bind (var
, value
, stmt
);
2593 else if (gimple_debug_source_bind_p (stmt
))
2595 var
= gimple_debug_source_bind_get_var (stmt
);
2596 value
= gimple_debug_source_bind_get_value (stmt
);
2597 new_stmt
= gimple_build_debug_source_bind (var
, value
, stmt
);
2601 gsi_insert_before (&dsi
, new_stmt
, GSI_SAME_STMT
);
2602 id
->debug_stmts
.safe_push (new_stmt
);
2608 /* Make a copy of the sub-loops of SRC_PARENT and place them
2609 as siblings of DEST_PARENT. */
2612 copy_loops (copy_body_data
*id
,
2613 struct loop
*dest_parent
, struct loop
*src_parent
)
2615 struct loop
*src_loop
= src_parent
->inner
;
2618 if (!id
->blocks_to_copy
2619 || bitmap_bit_p (id
->blocks_to_copy
, src_loop
->header
->index
))
2621 struct loop
*dest_loop
= alloc_loop ();
2623 /* Assign the new loop its header and latch and associate
2624 those with the new loop. */
2625 dest_loop
->header
= (basic_block
)src_loop
->header
->aux
;
2626 dest_loop
->header
->loop_father
= dest_loop
;
2627 if (src_loop
->latch
!= NULL
)
2629 dest_loop
->latch
= (basic_block
)src_loop
->latch
->aux
;
2630 dest_loop
->latch
->loop_father
= dest_loop
;
2633 /* Copy loop meta-data. */
2634 copy_loop_info (src_loop
, dest_loop
);
2636 /* Finally place it into the loop array and the loop tree. */
2637 place_new_loop (cfun
, dest_loop
);
2638 flow_loop_tree_node_add (dest_parent
, dest_loop
);
2640 dest_loop
->safelen
= src_loop
->safelen
;
2641 dest_loop
->dont_vectorize
= src_loop
->dont_vectorize
;
2642 if (src_loop
->force_vectorize
)
2644 dest_loop
->force_vectorize
= true;
2645 cfun
->has_force_vectorize_loops
= true;
2647 if (src_loop
->simduid
)
2649 dest_loop
->simduid
= remap_decl (src_loop
->simduid
, id
);
2650 cfun
->has_simduid_loops
= true;
2654 copy_loops (id
, dest_loop
, src_loop
);
2656 src_loop
= src_loop
->next
;
2660 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2663 redirect_all_calls (copy_body_data
* id
, basic_block bb
)
2665 gimple_stmt_iterator si
;
2666 gimple
*last
= last_stmt (bb
);
2667 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
2669 gimple
*stmt
= gsi_stmt (si
);
2670 if (is_gimple_call (stmt
))
2672 struct cgraph_edge
*edge
= id
->dst_node
->get_edge (stmt
);
2675 edge
->redirect_call_stmt_to_callee ();
2676 if (stmt
== last
&& id
->call_stmt
&& maybe_clean_eh_stmt (stmt
))
2677 gimple_purge_dead_eh_edges (bb
);
2683 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2684 with each bb's frequency. Used when NODE has a 0-weight entry
2685 but we are about to inline it into a non-zero count call bb.
2686 See the comments for handle_missing_profiles() in predict.c for
2687 when this can happen for COMDATs. */
2690 freqs_to_counts (struct cgraph_node
*node
, profile_count count
)
2693 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->decl
);
2695 FOR_ALL_BB_FN(bb
, fn
)
2696 bb
->count
= count
.apply_scale (bb
->frequency
, BB_FREQ_MAX
);
2699 /* Make a copy of the body of FN so that it can be inserted inline in
2700 another function. Walks FN via CFG, returns new fndecl. */
2703 copy_cfg_body (copy_body_data
* id
, profile_count count
, int frequency_scale
,
2704 basic_block entry_block_map
, basic_block exit_block_map
,
2705 basic_block new_entry
)
2707 tree callee_fndecl
= id
->src_fn
;
2708 /* Original cfun for the callee, doesn't change. */
2709 struct function
*src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2710 struct function
*cfun_to_copy
;
2712 tree new_fndecl
= NULL
;
2713 bool need_debug_cleanup
= false;
2715 int incoming_frequency
= 0;
2716 profile_count incoming_count
= profile_count::zero ();
2717 profile_count num
= count
;
2718 profile_count den
= ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
;
2719 bool scale
= num
.initialized_p ()
2720 && (den
> 0 || num
== profile_count::zero ());
2722 /* This can happen for COMDAT routines that end up with 0 counts
2723 despite being called (see the comments for handle_missing_profiles()
2724 in predict.c as to why). Apply counts to the blocks in the callee
2725 before inlining, using the guessed edge frequencies, so that we don't
2726 end up with a 0-count inline body which can confuse downstream
2727 optimizations such as function splitting. */
2728 if (!(ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
> 0) && count
> 0)
2730 /* Apply the larger of the call bb count and the total incoming
2731 call edge count to the callee. */
2732 profile_count in_count
= profile_count::zero ();
2733 struct cgraph_edge
*in_edge
;
2734 for (in_edge
= id
->src_node
->callers
; in_edge
;
2735 in_edge
= in_edge
->next_caller
)
2736 if (in_edge
->count
.initialized_p ())
2737 in_count
+= in_edge
->count
;
2738 freqs_to_counts (id
->src_node
, count
> in_count
? count
: in_count
);
2741 /* Register specific tree functions. */
2742 gimple_register_cfg_hooks ();
2744 /* If we are inlining just region of the function, make sure to connect
2745 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2746 part of loop, we must compute frequency and probability of
2747 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2748 probabilities of edges incoming from nonduplicated region. */
2754 FOR_EACH_EDGE (e
, ei
, new_entry
->preds
)
2756 incoming_frequency
+= EDGE_FREQUENCY (e
);
2758 incoming_count
= incoming_count
.apply_scale (num
, den
);
2760 incoming_count
= profile_count::uninitialized ();
2762 = apply_scale ((gcov_type
)incoming_frequency
, frequency_scale
);
2763 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= incoming_count
;
2764 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
= incoming_frequency
;
2767 /* Must have a CFG here at this point. */
2768 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2769 (DECL_STRUCT_FUNCTION (callee_fndecl
)));
2771 cfun_to_copy
= id
->src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2773 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy
)->aux
= entry_block_map
;
2774 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy
)->aux
= exit_block_map
;
2775 entry_block_map
->aux
= ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy
);
2776 exit_block_map
->aux
= EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy
);
2778 /* Duplicate any exception-handling regions. */
2780 id
->eh_map
= duplicate_eh_regions (cfun_to_copy
, NULL
, id
->eh_lp_nr
,
2783 /* Use aux pointers to map the original blocks to copy. */
2784 FOR_EACH_BB_FN (bb
, cfun_to_copy
)
2785 if (!id
->blocks_to_copy
|| bitmap_bit_p (id
->blocks_to_copy
, bb
->index
))
2787 basic_block new_bb
= copy_bb (id
, bb
, frequency_scale
, num
, den
);
2790 new_bb
->loop_father
= entry_block_map
->loop_father
;
2793 last
= last_basic_block_for_fn (cfun
);
2795 /* Now that we've duplicated the blocks, duplicate their edges. */
2796 basic_block abnormal_goto_dest
= NULL
;
2798 && stmt_can_make_abnormal_goto (id
->call_stmt
))
2800 gimple_stmt_iterator gsi
= gsi_for_stmt (id
->call_stmt
);
2802 bb
= gimple_bb (id
->call_stmt
);
2804 if (gsi_end_p (gsi
))
2805 abnormal_goto_dest
= get_abnormal_succ_dispatcher (bb
);
2807 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2808 if (!id
->blocks_to_copy
2809 || (bb
->index
> 0 && bitmap_bit_p (id
->blocks_to_copy
, bb
->index
)))
2810 need_debug_cleanup
|= copy_edges_for_bb (bb
, exit_block_map
,
2811 abnormal_goto_dest
);
2815 edge e
= make_edge (entry_block_map
, (basic_block
)new_entry
->aux
, EDGE_FALLTHRU
);
2816 e
->probability
= profile_probability::always ();
2819 /* Duplicate the loop tree, if available and wanted. */
2820 if (loops_for_fn (src_cfun
) != NULL
2821 && current_loops
!= NULL
)
2823 copy_loops (id
, entry_block_map
->loop_father
,
2824 get_loop (src_cfun
, 0));
2825 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2826 loops_state_set (LOOPS_NEED_FIXUP
);
2829 /* If the loop tree in the source function needed fixup, mark the
2830 destination loop tree for fixup, too. */
2831 if (loops_for_fn (src_cfun
)->state
& LOOPS_NEED_FIXUP
)
2832 loops_state_set (LOOPS_NEED_FIXUP
);
2834 if (gimple_in_ssa_p (cfun
))
2835 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2836 if (!id
->blocks_to_copy
2837 || (bb
->index
> 0 && bitmap_bit_p (id
->blocks_to_copy
, bb
->index
)))
2838 copy_phis_for_bb (bb
, id
);
2840 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2843 if (need_debug_cleanup
2844 && bb
->index
!= ENTRY_BLOCK
2845 && bb
->index
!= EXIT_BLOCK
)
2846 maybe_move_debug_stmts_to_successors (id
, (basic_block
) bb
->aux
);
2847 /* Update call edge destinations. This can not be done before loop
2848 info is updated, because we may split basic blocks. */
2849 if (id
->transform_call_graph_edges
== CB_CGE_DUPLICATE
2850 && bb
->index
!= ENTRY_BLOCK
2851 && bb
->index
!= EXIT_BLOCK
)
2852 redirect_all_calls (id
, (basic_block
)bb
->aux
);
2853 ((basic_block
)bb
->aux
)->aux
= NULL
;
2857 /* Zero out AUX fields of newly created block during EH edge
2859 for (; last
< last_basic_block_for_fn (cfun
); last
++)
2861 if (need_debug_cleanup
)
2862 maybe_move_debug_stmts_to_successors (id
,
2863 BASIC_BLOCK_FOR_FN (cfun
, last
));
2864 BASIC_BLOCK_FOR_FN (cfun
, last
)->aux
= NULL
;
2865 /* Update call edge destinations. This can not be done before loop
2866 info is updated, because we may split basic blocks. */
2867 if (id
->transform_call_graph_edges
== CB_CGE_DUPLICATE
)
2868 redirect_all_calls (id
, BASIC_BLOCK_FOR_FN (cfun
, last
));
2870 entry_block_map
->aux
= NULL
;
2871 exit_block_map
->aux
= NULL
;
2878 if (id
->dependence_map
)
2880 delete id
->dependence_map
;
2881 id
->dependence_map
= NULL
;
2887 /* Copy the debug STMT using ID. We deal with these statements in a
2888 special way: if any variable in their VALUE expression wasn't
2889 remapped yet, we won't remap it, because that would get decl uids
2890 out of sync, causing codegen differences between -g and -g0. If
2891 this arises, we drop the VALUE expression altogether. */
2894 copy_debug_stmt (gdebug
*stmt
, copy_body_data
*id
)
2897 struct walk_stmt_info wi
;
2899 if (gimple_block (stmt
))
2901 n
= id
->decl_map
->get (gimple_block (stmt
));
2902 gimple_set_block (stmt
, n
? *n
: id
->block
);
2905 /* Remap all the operands in COPY. */
2906 memset (&wi
, 0, sizeof (wi
));
2909 processing_debug_stmt
= 1;
2911 if (gimple_debug_source_bind_p (stmt
))
2912 t
= gimple_debug_source_bind_get_var (stmt
);
2914 t
= gimple_debug_bind_get_var (stmt
);
2916 if (TREE_CODE (t
) == PARM_DECL
&& id
->debug_map
2917 && (n
= id
->debug_map
->get (t
)))
2919 gcc_assert (VAR_P (*n
));
2922 else if (VAR_P (t
) && !is_global_var (t
) && !id
->decl_map
->get (t
))
2923 /* T is a non-localized variable. */;
2925 walk_tree (&t
, remap_gimple_op_r
, &wi
, NULL
);
2927 if (gimple_debug_bind_p (stmt
))
2929 gimple_debug_bind_set_var (stmt
, t
);
2931 if (gimple_debug_bind_has_value_p (stmt
))
2932 walk_tree (gimple_debug_bind_get_value_ptr (stmt
),
2933 remap_gimple_op_r
, &wi
, NULL
);
2935 /* Punt if any decl couldn't be remapped. */
2936 if (processing_debug_stmt
< 0)
2937 gimple_debug_bind_reset_value (stmt
);
2939 else if (gimple_debug_source_bind_p (stmt
))
2941 gimple_debug_source_bind_set_var (stmt
, t
);
2942 /* When inlining and source bind refers to one of the optimized
2943 away parameters, change the source bind into normal debug bind
2944 referring to the corresponding DEBUG_EXPR_DECL that should have
2945 been bound before the call stmt. */
2946 t
= gimple_debug_source_bind_get_value (stmt
);
2948 && TREE_CODE (t
) == PARM_DECL
2951 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (id
->src_fn
);
2953 if (debug_args
!= NULL
)
2955 for (i
= 0; i
< vec_safe_length (*debug_args
); i
+= 2)
2956 if ((**debug_args
)[i
] == DECL_ORIGIN (t
)
2957 && TREE_CODE ((**debug_args
)[i
+ 1]) == DEBUG_EXPR_DECL
)
2959 t
= (**debug_args
)[i
+ 1];
2960 stmt
->subcode
= GIMPLE_DEBUG_BIND
;
2961 gimple_debug_bind_set_value (stmt
, t
);
2966 if (gimple_debug_source_bind_p (stmt
))
2967 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt
),
2968 remap_gimple_op_r
, &wi
, NULL
);
2971 processing_debug_stmt
= 0;
2976 /* Process deferred debug stmts. In order to give values better odds
2977 of being successfully remapped, we delay the processing of debug
2978 stmts until all other stmts that might require remapping are
2982 copy_debug_stmts (copy_body_data
*id
)
2987 if (!id
->debug_stmts
.exists ())
2990 FOR_EACH_VEC_ELT (id
->debug_stmts
, i
, stmt
)
2991 copy_debug_stmt (stmt
, id
);
2993 id
->debug_stmts
.release ();
2996 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2997 another function. */
3000 copy_tree_body (copy_body_data
*id
)
3002 tree fndecl
= id
->src_fn
;
3003 tree body
= DECL_SAVED_TREE (fndecl
);
3005 walk_tree (&body
, copy_tree_body_r
, id
, NULL
);
3010 /* Make a copy of the body of FN so that it can be inserted inline in
3011 another function. */
3014 copy_body (copy_body_data
*id
, profile_count count
, int frequency_scale
,
3015 basic_block entry_block_map
, basic_block exit_block_map
,
3016 basic_block new_entry
)
3018 tree fndecl
= id
->src_fn
;
3021 /* If this body has a CFG, walk CFG and copy. */
3022 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl
)));
3023 body
= copy_cfg_body (id
, count
, frequency_scale
, entry_block_map
, exit_block_map
,
3025 copy_debug_stmts (id
);
3030 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3031 defined in function FN, or of a data member thereof. */
3034 self_inlining_addr_expr (tree value
, tree fn
)
3038 if (TREE_CODE (value
) != ADDR_EXPR
)
3041 var
= get_base_address (TREE_OPERAND (value
, 0));
3043 return var
&& auto_var_in_fn_p (var
, fn
);
3046 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3047 lexical block and line number information from base_stmt, if given,
3048 or from the last stmt of the block otherwise. */
3051 insert_init_debug_bind (copy_body_data
*id
,
3052 basic_block bb
, tree var
, tree value
,
3056 gimple_stmt_iterator gsi
;
3059 if (!gimple_in_ssa_p (id
->src_cfun
))
3062 if (!opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
))
3065 tracked_var
= target_for_debug_bind (var
);
3071 gsi
= gsi_last_bb (bb
);
3072 if (!base_stmt
&& !gsi_end_p (gsi
))
3073 base_stmt
= gsi_stmt (gsi
);
3076 note
= gimple_build_debug_bind (tracked_var
, unshare_expr (value
), base_stmt
);
3080 if (!gsi_end_p (gsi
))
3081 gsi_insert_after (&gsi
, note
, GSI_SAME_STMT
);
3083 gsi_insert_before (&gsi
, note
, GSI_SAME_STMT
);
3090 insert_init_stmt (copy_body_data
*id
, basic_block bb
, gimple
*init_stmt
)
3092 /* If VAR represents a zero-sized variable, it's possible that the
3093 assignment statement may result in no gimple statements. */
3096 gimple_stmt_iterator si
= gsi_last_bb (bb
);
3098 /* We can end up with init statements that store to a non-register
3099 from a rhs with a conversion. Handle that here by forcing the
3100 rhs into a temporary. gimple_regimplify_operands is not
3101 prepared to do this for us. */
3102 if (!is_gimple_debug (init_stmt
)
3103 && !is_gimple_reg (gimple_assign_lhs (init_stmt
))
3104 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt
)))
3105 && gimple_assign_rhs_class (init_stmt
) == GIMPLE_UNARY_RHS
)
3107 tree rhs
= build1 (gimple_assign_rhs_code (init_stmt
),
3108 gimple_expr_type (init_stmt
),
3109 gimple_assign_rhs1 (init_stmt
));
3110 rhs
= force_gimple_operand_gsi (&si
, rhs
, true, NULL_TREE
, false,
3112 gimple_assign_set_rhs_code (init_stmt
, TREE_CODE (rhs
));
3113 gimple_assign_set_rhs1 (init_stmt
, rhs
);
3115 gsi_insert_after (&si
, init_stmt
, GSI_NEW_STMT
);
3116 gimple_regimplify_operands (init_stmt
, &si
);
3118 if (!is_gimple_debug (init_stmt
))
3120 tree def
= gimple_assign_lhs (init_stmt
);
3121 insert_init_debug_bind (id
, bb
, def
, def
, init_stmt
);
3126 /* Initialize parameter P with VALUE. If needed, produce init statement
3127 at the end of BB. When BB is NULL, we return init statement to be
3130 setup_one_parameter (copy_body_data
*id
, tree p
, tree value
, tree fn
,
3131 basic_block bb
, tree
*vars
)
3133 gimple
*init_stmt
= NULL
;
3136 tree def
= (gimple_in_ssa_p (cfun
)
3137 ? ssa_default_def (id
->src_cfun
, p
) : NULL
);
3140 && value
!= error_mark_node
3141 && !useless_type_conversion_p (TREE_TYPE (p
), TREE_TYPE (value
)))
3143 /* If we can match up types by promotion/demotion do so. */
3144 if (fold_convertible_p (TREE_TYPE (p
), value
))
3145 rhs
= fold_convert (TREE_TYPE (p
), value
);
3148 /* ??? For valid programs we should not end up here.
3149 Still if we end up with truly mismatched types here, fall back
3150 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3151 GIMPLE to the following passes. */
3152 if (!is_gimple_reg_type (TREE_TYPE (value
))
3153 || TYPE_SIZE (TREE_TYPE (p
)) == TYPE_SIZE (TREE_TYPE (value
)))
3154 rhs
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (p
), value
);
3156 rhs
= build_zero_cst (TREE_TYPE (p
));
3160 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3161 here since the type of this decl must be visible to the calling
3163 var
= copy_decl_to_var (p
, id
);
3165 /* Declare this new variable. */
3166 DECL_CHAIN (var
) = *vars
;
3169 /* Make gimplifier happy about this variable. */
3170 DECL_SEEN_IN_BIND_EXPR_P (var
) = 1;
3172 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3173 we would not need to create a new variable here at all, if it
3174 weren't for debug info. Still, we can just use the argument
3176 if (TREE_READONLY (p
)
3177 && !TREE_ADDRESSABLE (p
)
3178 && value
&& !TREE_SIDE_EFFECTS (value
)
3181 /* We may produce non-gimple trees by adding NOPs or introduce
3182 invalid sharing when operand is not really constant.
3183 It is not big deal to prohibit constant propagation here as
3184 we will constant propagate in DOM1 pass anyway. */
3185 if (is_gimple_min_invariant (value
)
3186 && useless_type_conversion_p (TREE_TYPE (p
),
3188 /* We have to be very careful about ADDR_EXPR. Make sure
3189 the base variable isn't a local variable of the inlined
3190 function, e.g., when doing recursive inlining, direct or
3191 mutually-recursive or whatever, which is why we don't
3192 just test whether fn == current_function_decl. */
3193 && ! self_inlining_addr_expr (value
, fn
))
3195 insert_decl_map (id
, p
, value
);
3196 insert_debug_decl_map (id
, p
, var
);
3197 return insert_init_debug_bind (id
, bb
, var
, value
, NULL
);
3201 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3202 that way, when the PARM_DECL is encountered, it will be
3203 automatically replaced by the VAR_DECL. */
3204 insert_decl_map (id
, p
, var
);
3206 /* Even if P was TREE_READONLY, the new VAR should not be.
3207 In the original code, we would have constructed a
3208 temporary, and then the function body would have never
3209 changed the value of P. However, now, we will be
3210 constructing VAR directly. The constructor body may
3211 change its value multiple times as it is being
3212 constructed. Therefore, it must not be TREE_READONLY;
3213 the back-end assumes that TREE_READONLY variable is
3214 assigned to only once. */
3215 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p
)))
3216 TREE_READONLY (var
) = 0;
3218 /* If there is no setup required and we are in SSA, take the easy route
3219 replacing all SSA names representing the function parameter by the
3220 SSA name passed to function.
3222 We need to construct map for the variable anyway as it might be used
3223 in different SSA names when parameter is set in function.
3225 Do replacement at -O0 for const arguments replaced by constant.
3226 This is important for builtin_constant_p and other construct requiring
3227 constant argument to be visible in inlined function body. */
3228 if (gimple_in_ssa_p (cfun
) && rhs
&& def
&& is_gimple_reg (p
)
3230 || (TREE_READONLY (p
)
3231 && is_gimple_min_invariant (rhs
)))
3232 && (TREE_CODE (rhs
) == SSA_NAME
3233 || is_gimple_min_invariant (rhs
))
3234 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
3236 insert_decl_map (id
, def
, rhs
);
3237 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
3240 /* If the value of argument is never used, don't care about initializing
3242 if (optimize
&& gimple_in_ssa_p (cfun
) && !def
&& is_gimple_reg (p
))
3244 gcc_assert (!value
|| !TREE_SIDE_EFFECTS (value
));
3245 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
3248 /* Initialize this VAR_DECL from the equivalent argument. Convert
3249 the argument to the proper type in case it was promoted. */
3252 if (rhs
== error_mark_node
)
3254 insert_decl_map (id
, p
, var
);
3255 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
3258 STRIP_USELESS_TYPE_CONVERSION (rhs
);
3260 /* If we are in SSA form properly remap the default definition
3261 or assign to a dummy SSA name if the parameter is unused and
3262 we are not optimizing. */
3263 if (gimple_in_ssa_p (cfun
) && is_gimple_reg (p
))
3267 def
= remap_ssa_name (def
, id
);
3268 init_stmt
= gimple_build_assign (def
, rhs
);
3269 SSA_NAME_IS_DEFAULT_DEF (def
) = 0;
3270 set_ssa_default_def (cfun
, var
, NULL
);
3274 def
= make_ssa_name (var
);
3275 init_stmt
= gimple_build_assign (def
, rhs
);
3279 init_stmt
= gimple_build_assign (var
, rhs
);
3281 if (bb
&& init_stmt
)
3282 insert_init_stmt (id
, bb
, init_stmt
);
3287 /* Generate code to initialize the parameters of the function at the
3288 top of the stack in ID from the GIMPLE_CALL STMT. */
3291 initialize_inlined_parameters (copy_body_data
*id
, gimple
*stmt
,
3292 tree fn
, basic_block bb
)
3297 tree vars
= NULL_TREE
;
3298 tree static_chain
= gimple_call_chain (stmt
);
3300 /* Figure out what the parameters are. */
3301 parms
= DECL_ARGUMENTS (fn
);
3303 /* Loop through the parameter declarations, replacing each with an
3304 equivalent VAR_DECL, appropriately initialized. */
3305 for (p
= parms
, i
= 0; p
; p
= DECL_CHAIN (p
), i
++)
3308 val
= i
< gimple_call_num_args (stmt
) ? gimple_call_arg (stmt
, i
) : NULL
;
3309 setup_one_parameter (id
, p
, val
, fn
, bb
, &vars
);
3311 /* After remapping parameters remap their types. This has to be done
3312 in a second loop over all parameters to appropriately remap
3313 variable sized arrays when the size is specified in a
3314 parameter following the array. */
3315 for (p
= parms
, i
= 0; p
; p
= DECL_CHAIN (p
), i
++)
3317 tree
*varp
= id
->decl_map
->get (p
);
3318 if (varp
&& VAR_P (*varp
))
3320 tree def
= (gimple_in_ssa_p (cfun
) && is_gimple_reg (p
)
3321 ? ssa_default_def (id
->src_cfun
, p
) : NULL
);
3323 TREE_TYPE (var
) = remap_type (TREE_TYPE (var
), id
);
3324 /* Also remap the default definition if it was remapped
3325 to the default definition of the parameter replacement
3326 by the parameter setup. */
3329 tree
*defp
= id
->decl_map
->get (def
);
3331 && TREE_CODE (*defp
) == SSA_NAME
3332 && SSA_NAME_VAR (*defp
) == var
)
3333 TREE_TYPE (*defp
) = TREE_TYPE (var
);
3338 /* Initialize the static chain. */
3339 p
= DECL_STRUCT_FUNCTION (fn
)->static_chain_decl
;
3340 gcc_assert (fn
!= current_function_decl
);
3343 /* No static chain? Seems like a bug in tree-nested.c. */
3344 gcc_assert (static_chain
);
3346 setup_one_parameter (id
, p
, static_chain
, fn
, bb
, &vars
);
3349 declare_inline_vars (id
->block
, vars
);
3353 /* Declare a return variable to replace the RESULT_DECL for the
3354 function we are calling. An appropriate DECL_STMT is returned.
3355 The USE_STMT is filled to contain a use of the declaration to
3356 indicate the return value of the function.
3358 RETURN_SLOT, if non-null is place where to store the result. It
3359 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3360 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3362 RETURN_BOUNDS holds a destination for returned bounds.
3364 The return value is a (possibly null) value that holds the result
3365 as seen by the caller. */
3368 declare_return_variable (copy_body_data
*id
, tree return_slot
, tree modify_dest
,
3369 tree return_bounds
, basic_block entry_bb
)
3371 tree callee
= id
->src_fn
;
3372 tree result
= DECL_RESULT (callee
);
3373 tree callee_type
= TREE_TYPE (result
);
3377 /* Handle type-mismatches in the function declaration return type
3378 vs. the call expression. */
3380 caller_type
= TREE_TYPE (modify_dest
);
3382 caller_type
= TREE_TYPE (TREE_TYPE (callee
));
3384 /* We don't need to do anything for functions that don't return anything. */
3385 if (VOID_TYPE_P (callee_type
))
3388 /* If there was a return slot, then the return value is the
3389 dereferenced address of that object. */
3392 /* The front end shouldn't have used both return_slot and
3393 a modify expression. */
3394 gcc_assert (!modify_dest
);
3395 if (DECL_BY_REFERENCE (result
))
3397 tree return_slot_addr
= build_fold_addr_expr (return_slot
);
3398 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr
);
3400 /* We are going to construct *&return_slot and we can't do that
3401 for variables believed to be not addressable.
3403 FIXME: This check possibly can match, because values returned
3404 via return slot optimization are not believed to have address
3405 taken by alias analysis. */
3406 gcc_assert (TREE_CODE (return_slot
) != SSA_NAME
);
3407 var
= return_slot_addr
;
3412 gcc_assert (TREE_CODE (var
) != SSA_NAME
);
3413 if (TREE_ADDRESSABLE (result
))
3414 mark_addressable (var
);
3416 if ((TREE_CODE (TREE_TYPE (result
)) == COMPLEX_TYPE
3417 || TREE_CODE (TREE_TYPE (result
)) == VECTOR_TYPE
)
3418 && !DECL_GIMPLE_REG_P (result
)
3420 DECL_GIMPLE_REG_P (var
) = 0;
3425 /* All types requiring non-trivial constructors should have been handled. */
3426 gcc_assert (!TREE_ADDRESSABLE (callee_type
));
3428 /* Attempt to avoid creating a new temporary variable. */
3430 && TREE_CODE (modify_dest
) != SSA_NAME
)
3432 bool use_it
= false;
3434 /* We can't use MODIFY_DEST if there's type promotion involved. */
3435 if (!useless_type_conversion_p (callee_type
, caller_type
))
3438 /* ??? If we're assigning to a variable sized type, then we must
3439 reuse the destination variable, because we've no good way to
3440 create variable sized temporaries at this point. */
3441 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type
)) != INTEGER_CST
)
3444 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3445 reuse it as the result of the call directly. Don't do this if
3446 it would promote MODIFY_DEST to addressable. */
3447 else if (TREE_ADDRESSABLE (result
))
3451 tree base_m
= get_base_address (modify_dest
);
3453 /* If the base isn't a decl, then it's a pointer, and we don't
3454 know where that's going to go. */
3455 if (!DECL_P (base_m
))
3457 else if (is_global_var (base_m
))
3459 else if ((TREE_CODE (TREE_TYPE (result
)) == COMPLEX_TYPE
3460 || TREE_CODE (TREE_TYPE (result
)) == VECTOR_TYPE
)
3461 && !DECL_GIMPLE_REG_P (result
)
3462 && DECL_GIMPLE_REG_P (base_m
))
3464 else if (!TREE_ADDRESSABLE (base_m
))
3476 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type
)) == INTEGER_CST
);
3478 var
= copy_result_decl_to_var (result
, id
);
3479 DECL_SEEN_IN_BIND_EXPR_P (var
) = 1;
3481 /* Do not have the rest of GCC warn about this variable as it should
3482 not be visible to the user. */
3483 TREE_NO_WARNING (var
) = 1;
3485 declare_inline_vars (id
->block
, var
);
3487 /* Build the use expr. If the return type of the function was
3488 promoted, convert it back to the expected type. */
3490 if (!useless_type_conversion_p (caller_type
, TREE_TYPE (var
)))
3492 /* If we can match up types by promotion/demotion do so. */
3493 if (fold_convertible_p (caller_type
, var
))
3494 use
= fold_convert (caller_type
, var
);
3497 /* ??? For valid programs we should not end up here.
3498 Still if we end up with truly mismatched types here, fall back
3499 to using a MEM_REF to not leak invalid GIMPLE to the following
3501 /* Prevent var from being written into SSA form. */
3502 if (TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
3503 || TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
)
3504 DECL_GIMPLE_REG_P (var
) = false;
3505 else if (is_gimple_reg_type (TREE_TYPE (var
)))
3506 TREE_ADDRESSABLE (var
) = true;
3507 use
= fold_build2 (MEM_REF
, caller_type
,
3508 build_fold_addr_expr (var
),
3509 build_int_cst (ptr_type_node
, 0));
3513 STRIP_USELESS_TYPE_CONVERSION (use
);
3515 if (DECL_BY_REFERENCE (result
))
3517 TREE_ADDRESSABLE (var
) = 1;
3518 var
= build_fold_addr_expr (var
);
3522 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3523 way, when the RESULT_DECL is encountered, it will be
3524 automatically replaced by the VAR_DECL.
3526 When returning by reference, ensure that RESULT_DECL remaps to
3528 if (DECL_BY_REFERENCE (result
)
3529 && !is_gimple_val (var
))
3531 tree temp
= create_tmp_var (TREE_TYPE (result
), "retvalptr");
3532 insert_decl_map (id
, result
, temp
);
3533 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3534 it's default_def SSA_NAME. */
3535 if (gimple_in_ssa_p (id
->src_cfun
)
3536 && is_gimple_reg (result
))
3538 temp
= make_ssa_name (temp
);
3539 insert_decl_map (id
, ssa_default_def (id
->src_cfun
, result
), temp
);
3541 insert_init_stmt (id
, entry_bb
, gimple_build_assign (temp
, var
));
3544 insert_decl_map (id
, result
, var
);
3546 /* Remember this so we can ignore it in remap_decls. */
3549 /* If returned bounds are used, then make var for them. */
3552 tree bndtemp
= create_tmp_var (pointer_bounds_type_node
, "retbnd");
3553 DECL_SEEN_IN_BIND_EXPR_P (bndtemp
) = 1;
3554 TREE_NO_WARNING (bndtemp
) = 1;
3555 declare_inline_vars (id
->block
, bndtemp
);
3557 id
->retbnd
= bndtemp
;
3558 insert_init_stmt (id
, entry_bb
,
3559 gimple_build_assign (bndtemp
, chkp_get_zero_bounds_var ()));
3565 /* Determine if the function can be copied. If so return NULL. If
3566 not return a string describng the reason for failure. */
3569 copy_forbidden (struct function
*fun
)
3571 const char *reason
= fun
->cannot_be_copied_reason
;
3573 /* Only examine the function once. */
3574 if (fun
->cannot_be_copied_set
)
3577 /* We cannot copy a function that receives a non-local goto
3578 because we cannot remap the destination label used in the
3579 function that is performing the non-local goto. */
3580 /* ??? Actually, this should be possible, if we work at it.
3581 No doubt there's just a handful of places that simply
3582 assume it doesn't happen and don't substitute properly. */
3583 if (fun
->has_nonlocal_label
)
3585 reason
= G_("function %q+F can never be copied "
3586 "because it receives a non-local goto");
3590 if (fun
->has_forced_label_in_static
)
3592 reason
= G_("function %q+F can never be copied because it saves "
3593 "address of local label in a static variable");
3598 fun
->cannot_be_copied_reason
= reason
;
3599 fun
->cannot_be_copied_set
= true;
3604 static const char *inline_forbidden_reason
;
3606 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3607 iff a function can not be inlined. Also sets the reason why. */
3610 inline_forbidden_p_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3611 struct walk_stmt_info
*wip
)
3613 tree fn
= (tree
) wip
->info
;
3615 gimple
*stmt
= gsi_stmt (*gsi
);
3617 switch (gimple_code (stmt
))
3620 /* Refuse to inline alloca call unless user explicitly forced so as
3621 this may change program's memory overhead drastically when the
3622 function using alloca is called in loop. In GCC present in
3623 SPEC2000 inlining into schedule_block cause it to require 2GB of
3624 RAM instead of 256MB. Don't do so for alloca calls emitted for
3625 VLA objects as those can't cause unbounded growth (they're always
3626 wrapped inside stack_save/stack_restore regions. */
3627 if (gimple_maybe_alloca_call_p (stmt
)
3628 && !gimple_call_alloca_for_var_p (as_a
<gcall
*> (stmt
))
3629 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
)))
3631 inline_forbidden_reason
3632 = G_("function %q+F can never be inlined because it uses "
3633 "alloca (override using the always_inline attribute)");
3634 *handled_ops_p
= true;
3638 t
= gimple_call_fndecl (stmt
);
3642 /* We cannot inline functions that call setjmp. */
3643 if (setjmp_call_p (t
))
3645 inline_forbidden_reason
3646 = G_("function %q+F can never be inlined because it uses setjmp");
3647 *handled_ops_p
= true;
3651 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
)
3652 switch (DECL_FUNCTION_CODE (t
))
3654 /* We cannot inline functions that take a variable number of
3656 case BUILT_IN_VA_START
:
3657 case BUILT_IN_NEXT_ARG
:
3658 case BUILT_IN_VA_END
:
3659 inline_forbidden_reason
3660 = G_("function %q+F can never be inlined because it "
3661 "uses variable argument lists");
3662 *handled_ops_p
= true;
3665 case BUILT_IN_LONGJMP
:
3666 /* We can't inline functions that call __builtin_longjmp at
3667 all. The non-local goto machinery really requires the
3668 destination be in a different function. If we allow the
3669 function calling __builtin_longjmp to be inlined into the
3670 function calling __builtin_setjmp, Things will Go Awry. */
3671 inline_forbidden_reason
3672 = G_("function %q+F can never be inlined because "
3673 "it uses setjmp-longjmp exception handling");
3674 *handled_ops_p
= true;
3677 case BUILT_IN_NONLOCAL_GOTO
:
3679 inline_forbidden_reason
3680 = G_("function %q+F can never be inlined because "
3681 "it uses non-local goto");
3682 *handled_ops_p
= true;
3685 case BUILT_IN_RETURN
:
3686 case BUILT_IN_APPLY_ARGS
:
3687 /* If a __builtin_apply_args caller would be inlined,
3688 it would be saving arguments of the function it has
3689 been inlined into. Similarly __builtin_return would
3690 return from the function the inline has been inlined into. */
3691 inline_forbidden_reason
3692 = G_("function %q+F can never be inlined because "
3693 "it uses __builtin_return or __builtin_apply_args");
3694 *handled_ops_p
= true;
3703 t
= gimple_goto_dest (stmt
);
3705 /* We will not inline a function which uses computed goto. The
3706 addresses of its local labels, which may be tucked into
3707 global storage, are of course not constant across
3708 instantiations, which causes unexpected behavior. */
3709 if (TREE_CODE (t
) != LABEL_DECL
)
3711 inline_forbidden_reason
3712 = G_("function %q+F can never be inlined "
3713 "because it contains a computed goto");
3714 *handled_ops_p
= true;
3723 *handled_ops_p
= false;
3727 /* Return true if FNDECL is a function that cannot be inlined into
3731 inline_forbidden_p (tree fndecl
)
3733 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
3734 struct walk_stmt_info wi
;
3736 bool forbidden_p
= false;
3738 /* First check for shared reasons not to copy the code. */
3739 inline_forbidden_reason
= copy_forbidden (fun
);
3740 if (inline_forbidden_reason
!= NULL
)
3743 /* Next, walk the statements of the function looking for
3744 constraucts we can't handle, or are non-optimal for inlining. */
3745 hash_set
<tree
> visited_nodes
;
3746 memset (&wi
, 0, sizeof (wi
));
3747 wi
.info
= (void *) fndecl
;
3748 wi
.pset
= &visited_nodes
;
3750 FOR_EACH_BB_FN (bb
, fun
)
3753 gimple_seq seq
= bb_seq (bb
);
3754 ret
= walk_gimple_seq (seq
, inline_forbidden_p_stmt
, NULL
, &wi
);
3755 forbidden_p
= (ret
!= NULL
);
3763 /* Return false if the function FNDECL cannot be inlined on account of its
3764 attributes, true otherwise. */
3766 function_attribute_inlinable_p (const_tree fndecl
)
3768 if (targetm
.attribute_table
)
3772 for (a
= DECL_ATTRIBUTES (fndecl
); a
; a
= TREE_CHAIN (a
))
3774 const_tree name
= TREE_PURPOSE (a
);
3777 for (i
= 0; targetm
.attribute_table
[i
].name
!= NULL
; i
++)
3778 if (is_attribute_p (targetm
.attribute_table
[i
].name
, name
))
3779 return targetm
.function_attribute_inlinable_p (fndecl
);
3786 /* Returns nonzero if FN is a function that does not have any
3787 fundamental inline blocking properties. */
3790 tree_inlinable_function_p (tree fn
)
3792 bool inlinable
= true;
3796 /* If we've already decided this function shouldn't be inlined,
3797 there's no need to check again. */
3798 if (DECL_UNINLINABLE (fn
))
3801 /* We only warn for functions declared `inline' by the user. */
3802 do_warning
= (warn_inline
3803 && DECL_DECLARED_INLINE_P (fn
)
3804 && !DECL_NO_INLINE_WARNING_P (fn
)
3805 && !DECL_IN_SYSTEM_HEADER (fn
));
3807 always_inline
= lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
));
3810 && always_inline
== NULL
)
3813 warning (OPT_Winline
, "function %q+F can never be inlined because it "
3814 "is suppressed using -fno-inline", fn
);
3818 else if (!function_attribute_inlinable_p (fn
))
3821 warning (OPT_Winline
, "function %q+F can never be inlined because it "
3822 "uses attributes conflicting with inlining", fn
);
3826 else if (inline_forbidden_p (fn
))
3828 /* See if we should warn about uninlinable functions. Previously,
3829 some of these warnings would be issued while trying to expand
3830 the function inline, but that would cause multiple warnings
3831 about functions that would for example call alloca. But since
3832 this a property of the function, just one warning is enough.
3833 As a bonus we can now give more details about the reason why a
3834 function is not inlinable. */
3836 error (inline_forbidden_reason
, fn
);
3837 else if (do_warning
)
3838 warning (OPT_Winline
, inline_forbidden_reason
, fn
);
3843 /* Squirrel away the result so that we don't have to check again. */
3844 DECL_UNINLINABLE (fn
) = !inlinable
;
3849 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3850 word size and take possible memcpy call into account and return
3851 cost based on whether optimizing for size or speed according to SPEED_P. */
3854 estimate_move_cost (tree type
, bool ARG_UNUSED (speed_p
))
3858 gcc_assert (!VOID_TYPE_P (type
));
3860 if (TREE_CODE (type
) == VECTOR_TYPE
)
3862 scalar_mode inner
= SCALAR_TYPE_MODE (TREE_TYPE (type
));
3864 = targetm
.vectorize
.preferred_simd_mode (inner
);
3865 int simd_mode_size
= GET_MODE_SIZE (simd
);
3866 return ((GET_MODE_SIZE (TYPE_MODE (type
)) + simd_mode_size
- 1)
3870 size
= int_size_in_bytes (type
);
3872 if (size
< 0 || size
> MOVE_MAX_PIECES
* MOVE_RATIO (speed_p
))
3873 /* Cost of a memcpy call, 3 arguments and the call. */
3876 return ((size
+ MOVE_MAX_PIECES
- 1) / MOVE_MAX_PIECES
);
3879 /* Returns cost of operation CODE, according to WEIGHTS */
3882 estimate_operator_cost (enum tree_code code
, eni_weights
*weights
,
3883 tree op1 ATTRIBUTE_UNUSED
, tree op2
)
3887 /* These are "free" conversions, or their presumed cost
3888 is folded into other operations. */
3893 case VIEW_CONVERT_EXPR
:
3896 /* Assign cost of 1 to usual operations.
3897 ??? We may consider mapping RTL costs to this. */
3903 case POINTER_PLUS_EXPR
:
3906 case MULT_HIGHPART_EXPR
:
3909 case ADDR_SPACE_CONVERT_EXPR
:
3910 case FIXED_CONVERT_EXPR
:
3911 case FIX_TRUNC_EXPR
:
3929 case TRUTH_ANDIF_EXPR
:
3930 case TRUTH_ORIF_EXPR
:
3931 case TRUTH_AND_EXPR
:
3933 case TRUTH_XOR_EXPR
:
3934 case TRUTH_NOT_EXPR
:
3943 case UNORDERED_EXPR
:
3954 case PREDECREMENT_EXPR
:
3955 case PREINCREMENT_EXPR
:
3956 case POSTDECREMENT_EXPR
:
3957 case POSTINCREMENT_EXPR
:
3959 case REALIGN_LOAD_EXPR
:
3961 case REDUC_MAX_EXPR
:
3962 case REDUC_MIN_EXPR
:
3963 case REDUC_PLUS_EXPR
:
3964 case WIDEN_SUM_EXPR
:
3965 case WIDEN_MULT_EXPR
:
3968 case WIDEN_MULT_PLUS_EXPR
:
3969 case WIDEN_MULT_MINUS_EXPR
:
3970 case WIDEN_LSHIFT_EXPR
:
3972 case VEC_WIDEN_MULT_HI_EXPR
:
3973 case VEC_WIDEN_MULT_LO_EXPR
:
3974 case VEC_WIDEN_MULT_EVEN_EXPR
:
3975 case VEC_WIDEN_MULT_ODD_EXPR
:
3976 case VEC_UNPACK_HI_EXPR
:
3977 case VEC_UNPACK_LO_EXPR
:
3978 case VEC_UNPACK_FLOAT_HI_EXPR
:
3979 case VEC_UNPACK_FLOAT_LO_EXPR
:
3980 case VEC_PACK_TRUNC_EXPR
:
3981 case VEC_PACK_SAT_EXPR
:
3982 case VEC_PACK_FIX_TRUNC_EXPR
:
3983 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3984 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3988 /* Few special cases of expensive operations. This is useful
3989 to avoid inlining on functions having too many of these. */
3990 case TRUNC_DIV_EXPR
:
3992 case FLOOR_DIV_EXPR
:
3993 case ROUND_DIV_EXPR
:
3994 case EXACT_DIV_EXPR
:
3995 case TRUNC_MOD_EXPR
:
3997 case FLOOR_MOD_EXPR
:
3998 case ROUND_MOD_EXPR
:
4000 if (TREE_CODE (op2
) != INTEGER_CST
)
4001 return weights
->div_mod_cost
;
4004 /* Bit-field insertion needs several shift and mask operations. */
4005 case BIT_INSERT_EXPR
:
4009 /* We expect a copy assignment with no operator. */
4010 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_SINGLE_RHS
);
4016 /* Estimate number of instructions that will be created by expanding
4017 the statements in the statement sequence STMTS.
4018 WEIGHTS contains weights attributed to various constructs. */
4021 estimate_num_insns_seq (gimple_seq stmts
, eni_weights
*weights
)
4024 gimple_stmt_iterator gsi
;
4027 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4028 cost
+= estimate_num_insns (gsi_stmt (gsi
), weights
);
4034 /* Estimate number of instructions that will be created by expanding STMT.
4035 WEIGHTS contains weights attributed to various constructs. */
4038 estimate_num_insns (gimple
*stmt
, eni_weights
*weights
)
4041 enum gimple_code code
= gimple_code (stmt
);
4048 /* Try to estimate the cost of assignments. We have three cases to
4050 1) Simple assignments to registers;
4051 2) Stores to things that must live in memory. This includes
4052 "normal" stores to scalars, but also assignments of large
4053 structures, or constructors of big arrays;
4055 Let us look at the first two cases, assuming we have "a = b + C":
4056 <GIMPLE_ASSIGN <var_decl "a">
4057 <plus_expr <var_decl "b"> <constant C>>
4058 If "a" is a GIMPLE register, the assignment to it is free on almost
4059 any target, because "a" usually ends up in a real register. Hence
4060 the only cost of this expression comes from the PLUS_EXPR, and we
4061 can ignore the GIMPLE_ASSIGN.
4062 If "a" is not a GIMPLE register, the assignment to "a" will most
4063 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4064 of moving something into "a", which we compute using the function
4065 estimate_move_cost. */
4066 if (gimple_clobber_p (stmt
))
4067 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4069 lhs
= gimple_assign_lhs (stmt
);
4070 rhs
= gimple_assign_rhs1 (stmt
);
4074 /* Account for the cost of moving to / from memory. */
4075 if (gimple_store_p (stmt
))
4076 cost
+= estimate_move_cost (TREE_TYPE (lhs
), weights
->time_based
);
4077 if (gimple_assign_load_p (stmt
))
4078 cost
+= estimate_move_cost (TREE_TYPE (rhs
), weights
->time_based
);
4080 cost
+= estimate_operator_cost (gimple_assign_rhs_code (stmt
), weights
,
4081 gimple_assign_rhs1 (stmt
),
4082 get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
4083 == GIMPLE_BINARY_RHS
4084 ? gimple_assign_rhs2 (stmt
) : NULL
);
4088 cost
= 1 + estimate_operator_cost (gimple_cond_code (stmt
), weights
,
4089 gimple_op (stmt
, 0),
4090 gimple_op (stmt
, 1));
4095 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
4096 /* Take into account cost of the switch + guess 2 conditional jumps for
4099 TODO: once the switch expansion logic is sufficiently separated, we can
4100 do better job on estimating cost of the switch. */
4101 if (weights
->time_based
)
4102 cost
= floor_log2 (gimple_switch_num_labels (switch_stmt
)) * 2;
4104 cost
= gimple_switch_num_labels (switch_stmt
) * 2;
4112 if (gimple_call_internal_p (stmt
))
4114 else if ((decl
= gimple_call_fndecl (stmt
))
4115 && DECL_BUILT_IN (decl
))
4117 /* Do not special case builtins where we see the body.
4118 This just confuse inliner. */
4119 struct cgraph_node
*node
;
4120 if (!(node
= cgraph_node::get (decl
))
4121 || node
->definition
)
4123 /* For buitins that are likely expanded to nothing or
4124 inlined do not account operand costs. */
4125 else if (is_simple_builtin (decl
))
4127 else if (is_inexpensive_builtin (decl
))
4128 return weights
->target_builtin_call_cost
;
4129 else if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
4131 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4132 specialize the cheap expansion we do here.
4133 ??? This asks for a more general solution. */
4134 switch (DECL_FUNCTION_CODE (decl
))
4139 if (TREE_CODE (gimple_call_arg (stmt
, 1)) == REAL_CST
4141 (&TREE_REAL_CST (gimple_call_arg (stmt
, 1)),
4143 return estimate_operator_cost
4144 (MULT_EXPR
, weights
, gimple_call_arg (stmt
, 0),
4145 gimple_call_arg (stmt
, 0));
4154 cost
= decl
? weights
->call_cost
: weights
->indirect_call_cost
;
4155 if (gimple_call_lhs (stmt
))
4156 cost
+= estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt
)),
4157 weights
->time_based
);
4158 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4160 tree arg
= gimple_call_arg (stmt
, i
);
4161 cost
+= estimate_move_cost (TREE_TYPE (arg
),
4162 weights
->time_based
);
4168 return weights
->return_cost
;
4174 case GIMPLE_PREDICT
:
4180 int count
= asm_str_count (gimple_asm_string (as_a
<gasm
*> (stmt
)));
4181 /* 1000 means infinity. This avoids overflows later
4182 with very long asm statements. */
4189 /* This is either going to be an external function call with one
4190 argument, or two register copy statements plus a goto. */
4193 case GIMPLE_EH_DISPATCH
:
4194 /* ??? This is going to turn into a switch statement. Ideally
4195 we'd have a look at the eh region and estimate the number of
4200 return estimate_num_insns_seq (
4201 gimple_bind_body (as_a
<gbind
*> (stmt
)),
4204 case GIMPLE_EH_FILTER
:
4205 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt
), weights
);
4208 return estimate_num_insns_seq (gimple_catch_handler (
4209 as_a
<gcatch
*> (stmt
)),
4213 return (estimate_num_insns_seq (gimple_try_eval (stmt
), weights
)
4214 + estimate_num_insns_seq (gimple_try_cleanup (stmt
), weights
));
4216 /* OMP directives are generally very expensive. */
4218 case GIMPLE_OMP_RETURN
:
4219 case GIMPLE_OMP_SECTIONS_SWITCH
:
4220 case GIMPLE_OMP_ATOMIC_STORE
:
4221 case GIMPLE_OMP_CONTINUE
:
4222 /* ...except these, which are cheap. */
4225 case GIMPLE_OMP_ATOMIC_LOAD
:
4226 return weights
->omp_cost
;
4228 case GIMPLE_OMP_FOR
:
4229 return (weights
->omp_cost
4230 + estimate_num_insns_seq (gimple_omp_body (stmt
), weights
)
4231 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt
), weights
));
4233 case GIMPLE_OMP_PARALLEL
:
4234 case GIMPLE_OMP_TASK
:
4235 case GIMPLE_OMP_CRITICAL
:
4236 case GIMPLE_OMP_MASTER
:
4237 case GIMPLE_OMP_TASKGROUP
:
4238 case GIMPLE_OMP_ORDERED
:
4239 case GIMPLE_OMP_SECTION
:
4240 case GIMPLE_OMP_SECTIONS
:
4241 case GIMPLE_OMP_SINGLE
:
4242 case GIMPLE_OMP_TARGET
:
4243 case GIMPLE_OMP_TEAMS
:
4244 return (weights
->omp_cost
4245 + estimate_num_insns_seq (gimple_omp_body (stmt
), weights
));
4247 case GIMPLE_TRANSACTION
:
4248 return (weights
->tm_cost
4249 + estimate_num_insns_seq (gimple_transaction_body (
4250 as_a
<gtransaction
*> (stmt
)),
4260 /* Estimate number of instructions that will be created by expanding
4261 function FNDECL. WEIGHTS contains weights attributed to various
4265 estimate_num_insns_fn (tree fndecl
, eni_weights
*weights
)
4267 struct function
*my_function
= DECL_STRUCT_FUNCTION (fndecl
);
4268 gimple_stmt_iterator bsi
;
4272 gcc_assert (my_function
&& my_function
->cfg
);
4273 FOR_EACH_BB_FN (bb
, my_function
)
4275 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
4276 n
+= estimate_num_insns (gsi_stmt (bsi
), weights
);
4283 /* Initializes weights used by estimate_num_insns. */
4286 init_inline_once (void)
4288 eni_size_weights
.call_cost
= 1;
4289 eni_size_weights
.indirect_call_cost
= 3;
4290 eni_size_weights
.target_builtin_call_cost
= 1;
4291 eni_size_weights
.div_mod_cost
= 1;
4292 eni_size_weights
.omp_cost
= 40;
4293 eni_size_weights
.tm_cost
= 10;
4294 eni_size_weights
.time_based
= false;
4295 eni_size_weights
.return_cost
= 1;
4297 /* Estimating time for call is difficult, since we have no idea what the
4298 called function does. In the current uses of eni_time_weights,
4299 underestimating the cost does less harm than overestimating it, so
4300 we choose a rather small value here. */
4301 eni_time_weights
.call_cost
= 10;
4302 eni_time_weights
.indirect_call_cost
= 15;
4303 eni_time_weights
.target_builtin_call_cost
= 1;
4304 eni_time_weights
.div_mod_cost
= 10;
4305 eni_time_weights
.omp_cost
= 40;
4306 eni_time_weights
.tm_cost
= 40;
4307 eni_time_weights
.time_based
= true;
4308 eni_time_weights
.return_cost
= 2;
4312 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4315 prepend_lexical_block (tree current_block
, tree new_block
)
4317 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (current_block
);
4318 BLOCK_SUBBLOCKS (current_block
) = new_block
;
4319 BLOCK_SUPERCONTEXT (new_block
) = current_block
;
4322 /* Add local variables from CALLEE to CALLER. */
4325 add_local_variables (struct function
*callee
, struct function
*caller
,
4331 FOR_EACH_LOCAL_DECL (callee
, ix
, var
)
4332 if (!can_be_nonlocal (var
, id
))
4334 tree new_var
= remap_decl (var
, id
);
4336 /* Remap debug-expressions. */
4338 && DECL_HAS_DEBUG_EXPR_P (var
)
4341 tree tem
= DECL_DEBUG_EXPR (var
);
4342 bool old_regimplify
= id
->regimplify
;
4343 id
->remapping_type_depth
++;
4344 walk_tree (&tem
, copy_tree_body_r
, id
, NULL
);
4345 id
->remapping_type_depth
--;
4346 id
->regimplify
= old_regimplify
;
4347 SET_DECL_DEBUG_EXPR (new_var
, tem
);
4348 DECL_HAS_DEBUG_EXPR_P (new_var
) = 1;
4350 add_local_decl (caller
, new_var
);
4354 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4355 have brought in or introduced any debug stmts for SRCVAR. */
4358 reset_debug_binding (copy_body_data
*id
, tree srcvar
, gimple_seq
*bindings
)
4360 tree
*remappedvarp
= id
->decl_map
->get (srcvar
);
4365 if (!VAR_P (*remappedvarp
))
4368 if (*remappedvarp
== id
->retvar
|| *remappedvarp
== id
->retbnd
)
4371 tree tvar
= target_for_debug_bind (*remappedvarp
);
4375 gdebug
*stmt
= gimple_build_debug_bind (tvar
, NULL_TREE
,
4377 gimple_seq_add_stmt (bindings
, stmt
);
4380 /* For each inlined variable for which we may have debug bind stmts,
4381 add before GSI a final debug stmt resetting it, marking the end of
4382 its life, so that var-tracking knows it doesn't have to compute
4383 further locations for it. */
4386 reset_debug_bindings (copy_body_data
*id
, gimple_stmt_iterator gsi
)
4390 gimple_seq bindings
= NULL
;
4392 if (!gimple_in_ssa_p (id
->src_cfun
))
4395 if (!opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
))
4398 for (var
= DECL_ARGUMENTS (id
->src_fn
);
4399 var
; var
= DECL_CHAIN (var
))
4400 reset_debug_binding (id
, var
, &bindings
);
4402 FOR_EACH_LOCAL_DECL (id
->src_cfun
, ix
, var
)
4403 reset_debug_binding (id
, var
, &bindings
);
4405 gsi_insert_seq_before_without_update (&gsi
, bindings
, GSI_SAME_STMT
);
4408 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4411 expand_call_inline (basic_block bb
, gimple
*stmt
, copy_body_data
*id
)
4415 hash_map
<tree
, tree
> *dst
;
4416 hash_map
<tree
, tree
> *st
= NULL
;
4419 tree return_bounds
= NULL
;
4420 struct cgraph_edge
*cg_edge
;
4421 cgraph_inline_failed_t reason
;
4422 basic_block return_block
;
4424 gimple_stmt_iterator gsi
, stmt_gsi
;
4425 bool successfully_inlined
= false;
4426 bool purge_dead_abnormal_edges
;
4429 unsigned int prop_mask
, src_properties
;
4430 struct function
*dst_cfun
;
4433 gimple
*simtenter_stmt
= NULL
;
4434 vec
<tree
> *simtvars_save
;
4436 /* The gimplifier uses input_location in too many places, such as
4437 internal_get_tmp_var (). */
4438 location_t saved_location
= input_location
;
4439 input_location
= gimple_location (stmt
);
4441 /* From here on, we're only interested in CALL_EXPRs. */
4442 call_stmt
= dyn_cast
<gcall
*> (stmt
);
4446 cg_edge
= id
->dst_node
->get_edge (stmt
);
4447 gcc_checking_assert (cg_edge
);
4448 /* First, see if we can figure out what function is being called.
4449 If we cannot, then there is no hope of inlining the function. */
4450 if (cg_edge
->indirect_unknown_callee
)
4452 fn
= cg_edge
->callee
->decl
;
4453 gcc_checking_assert (fn
);
4455 /* If FN is a declaration of a function in a nested scope that was
4456 globally declared inline, we don't set its DECL_INITIAL.
4457 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4458 C++ front-end uses it for cdtors to refer to their internal
4459 declarations, that are not real functions. Fortunately those
4460 don't have trees to be saved, so we can tell by checking their
4462 if (!DECL_INITIAL (fn
)
4463 && DECL_ABSTRACT_ORIGIN (fn
)
4464 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn
)))
4465 fn
= DECL_ABSTRACT_ORIGIN (fn
);
4467 /* Don't try to inline functions that are not well-suited to inlining. */
4468 if (cg_edge
->inline_failed
)
4470 reason
= cg_edge
->inline_failed
;
4471 /* If this call was originally indirect, we do not want to emit any
4472 inlining related warnings or sorry messages because there are no
4473 guarantees regarding those. */
4474 if (cg_edge
->indirect_inlining_edge
)
4477 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
))
4478 /* For extern inline functions that get redefined we always
4479 silently ignored always_inline flag. Better behavior would
4480 be to be able to keep both bodies and use extern inline body
4481 for inlining, but we can't do that because frontends overwrite
4483 && !cg_edge
->callee
->local
.redefined_extern_inline
4484 /* During early inline pass, report only when optimization is
4486 && (symtab
->global_info_ready
4488 || cgraph_inline_failed_type (reason
) == CIF_FINAL_ERROR
)
4489 /* PR 20090218-1_0.c. Body can be provided by another module. */
4490 && (reason
!= CIF_BODY_NOT_AVAILABLE
|| !flag_generate_lto
))
4492 error ("inlining failed in call to always_inline %q+F: %s", fn
,
4493 cgraph_inline_failed_string (reason
));
4494 if (gimple_location (stmt
) != UNKNOWN_LOCATION
)
4495 inform (gimple_location (stmt
), "called from here");
4496 else if (DECL_SOURCE_LOCATION (cfun
->decl
) != UNKNOWN_LOCATION
)
4497 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
4498 "called from this function");
4500 else if (warn_inline
4501 && DECL_DECLARED_INLINE_P (fn
)
4502 && !DECL_NO_INLINE_WARNING_P (fn
)
4503 && !DECL_IN_SYSTEM_HEADER (fn
)
4504 && reason
!= CIF_UNSPECIFIED
4505 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn
))
4506 /* Do not warn about not inlined recursive calls. */
4507 && !cg_edge
->recursive_p ()
4508 /* Avoid warnings during early inline pass. */
4509 && symtab
->global_info_ready
)
4511 if (warning (OPT_Winline
, "inlining failed in call to %q+F: %s",
4512 fn
, _(cgraph_inline_failed_string (reason
))))
4514 if (gimple_location (stmt
) != UNKNOWN_LOCATION
)
4515 inform (gimple_location (stmt
), "called from here");
4516 else if (DECL_SOURCE_LOCATION (cfun
->decl
) != UNKNOWN_LOCATION
)
4517 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
4518 "called from this function");
4523 id
->src_node
= cg_edge
->callee
;
4525 /* If callee is thunk, all we need is to adjust the THIS pointer
4526 and redirect to function being thunked. */
4527 if (id
->src_node
->thunk
.thunk_p
)
4530 tree virtual_offset
= NULL
;
4531 int freq
= cg_edge
->frequency
;
4532 profile_count count
= cg_edge
->count
;
4534 gimple_stmt_iterator iter
= gsi_for_stmt (stmt
);
4537 edge
= id
->src_node
->callees
->clone (id
->dst_node
, call_stmt
,
4539 profile_count::one (),
4540 profile_count::one (),
4543 edge
->frequency
= freq
;
4544 edge
->count
= count
;
4545 if (id
->src_node
->thunk
.virtual_offset_p
)
4546 virtual_offset
= size_int (id
->src_node
->thunk
.virtual_value
);
4547 op
= create_tmp_reg_fn (cfun
, TREE_TYPE (gimple_call_arg (stmt
, 0)),
4549 gsi_insert_before (&iter
, gimple_build_assign (op
,
4550 gimple_call_arg (stmt
, 0)),
4552 gcc_assert (id
->src_node
->thunk
.this_adjusting
);
4553 op
= thunk_adjust (&iter
, op
, 1, id
->src_node
->thunk
.fixed_offset
,
4556 gimple_call_set_arg (stmt
, 0, op
);
4557 gimple_call_set_fndecl (stmt
, edge
->callee
->decl
);
4559 id
->src_node
->remove ();
4560 expand_call_inline (bb
, stmt
, id
);
4561 maybe_remove_unused_call_args (cfun
, stmt
);
4564 fn
= cg_edge
->callee
->decl
;
4565 cg_edge
->callee
->get_untransformed_body ();
4567 if (flag_checking
&& cg_edge
->callee
->decl
!= id
->dst_node
->decl
)
4568 cg_edge
->callee
->verify ();
4570 /* We will be inlining this callee. */
4571 id
->eh_lp_nr
= lookup_stmt_eh_lp (stmt
);
4572 id
->assign_stmts
.create (0);
4574 /* Update the callers EH personality. */
4575 if (DECL_FUNCTION_PERSONALITY (cg_edge
->callee
->decl
))
4576 DECL_FUNCTION_PERSONALITY (cg_edge
->caller
->decl
)
4577 = DECL_FUNCTION_PERSONALITY (cg_edge
->callee
->decl
);
4579 /* Split the block before the GIMPLE_CALL. */
4580 stmt_gsi
= gsi_for_stmt (stmt
);
4581 gsi_prev (&stmt_gsi
);
4582 e
= split_block (bb
, gsi_end_p (stmt_gsi
) ? NULL
: gsi_stmt (stmt_gsi
));
4584 return_block
= e
->dest
;
4587 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4588 been the source of abnormal edges. In this case, schedule
4589 the removal of dead abnormal edges. */
4590 gsi
= gsi_start_bb (return_block
);
4592 purge_dead_abnormal_edges
= gsi_end_p (gsi
);
4594 stmt_gsi
= gsi_start_bb (return_block
);
4596 /* Build a block containing code to initialize the arguments, the
4597 actual inline expansion of the body, and a label for the return
4598 statements within the function to jump to. The type of the
4599 statement expression is the return type of the function call.
4600 ??? If the call does not have an associated block then we will
4601 remap all callee blocks to NULL, effectively dropping most of
4602 its debug information. This should only happen for calls to
4603 artificial decls inserted by the compiler itself. We need to
4604 either link the inlined blocks into the caller block tree or
4605 not refer to them in any way to not break GC for locations. */
4606 if (gimple_block (stmt
))
4608 id
->block
= make_node (BLOCK
);
4609 BLOCK_ABSTRACT_ORIGIN (id
->block
) = fn
;
4610 BLOCK_SOURCE_LOCATION (id
->block
)
4611 = LOCATION_LOCUS (gimple_location (stmt
));
4612 prepend_lexical_block (gimple_block (stmt
), id
->block
);
4615 /* Local declarations will be replaced by their equivalents in this
4618 id
->decl_map
= new hash_map
<tree
, tree
>;
4619 dst
= id
->debug_map
;
4620 id
->debug_map
= NULL
;
4622 /* Record the function we are about to inline. */
4624 id
->src_cfun
= DECL_STRUCT_FUNCTION (fn
);
4625 id
->call_stmt
= call_stmt
;
4627 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4628 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4629 dst_cfun
= DECL_STRUCT_FUNCTION (id
->dst_fn
);
4630 simtvars_save
= id
->dst_simt_vars
;
4631 if (!(dst_cfun
->curr_properties
& PROP_gimple_lomp_dev
)
4632 && (simduid
= bb
->loop_father
->simduid
) != NULL_TREE
4633 && (simduid
= ssa_default_def (dst_cfun
, simduid
)) != NULL_TREE
4634 && single_imm_use (simduid
, &use
, &simtenter_stmt
)
4635 && is_gimple_call (simtenter_stmt
)
4636 && gimple_call_internal_p (simtenter_stmt
, IFN_GOMP_SIMT_ENTER
))
4637 vec_alloc (id
->dst_simt_vars
, 0);
4639 id
->dst_simt_vars
= NULL
;
4641 if (profile_status_for_fn (id
->src_cfun
) == PROFILE_ABSENT
)
4642 profile_status_for_fn (dst_cfun
) = PROFILE_ABSENT
;
4644 /* If the src function contains an IFN_VA_ARG, then so will the dst
4645 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4646 prop_mask
= PROP_gimple_lva
| PROP_gimple_lomp_dev
;
4647 src_properties
= id
->src_cfun
->curr_properties
& prop_mask
;
4648 if (src_properties
!= prop_mask
)
4649 dst_cfun
->curr_properties
&= src_properties
| ~prop_mask
;
4651 gcc_assert (!id
->src_cfun
->after_inlining
);
4654 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn
)))
4656 gimple_stmt_iterator si
= gsi_last_bb (bb
);
4657 gsi_insert_after (&si
, gimple_build_predict (PRED_COLD_FUNCTION
,
4661 initialize_inlined_parameters (id
, stmt
, fn
, bb
);
4663 if (DECL_INITIAL (fn
))
4665 if (gimple_block (stmt
))
4669 prepend_lexical_block (id
->block
,
4670 remap_blocks (DECL_INITIAL (fn
), id
));
4671 gcc_checking_assert (BLOCK_SUBBLOCKS (id
->block
)
4672 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id
->block
))
4674 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4675 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4676 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4677 under it. The parameters can be then evaluated in the debugger,
4678 but don't show in backtraces. */
4679 for (var
= &BLOCK_VARS (BLOCK_SUBBLOCKS (id
->block
)); *var
; )
4680 if (TREE_CODE (DECL_ORIGIN (*var
)) == PARM_DECL
)
4683 *var
= TREE_CHAIN (v
);
4684 TREE_CHAIN (v
) = BLOCK_VARS (id
->block
);
4685 BLOCK_VARS (id
->block
) = v
;
4688 var
= &TREE_CHAIN (*var
);
4691 remap_blocks_to_null (DECL_INITIAL (fn
), id
);
4694 /* Return statements in the function body will be replaced by jumps
4695 to the RET_LABEL. */
4696 gcc_assert (DECL_INITIAL (fn
));
4697 gcc_assert (TREE_CODE (DECL_INITIAL (fn
)) == BLOCK
);
4699 /* Find the LHS to which the result of this call is assigned. */
4701 if (gimple_call_lhs (stmt
))
4703 modify_dest
= gimple_call_lhs (stmt
);
4705 /* Remember where to copy returned bounds. */
4706 if (gimple_call_with_bounds_p (stmt
)
4707 && TREE_CODE (modify_dest
) == SSA_NAME
)
4709 gcall
*retbnd
= chkp_retbnd_call_by_val (modify_dest
);
4712 return_bounds
= gimple_call_lhs (retbnd
);
4713 /* If returned bounds are not used then just
4714 remove unused call. */
4717 gimple_stmt_iterator iter
= gsi_for_stmt (retbnd
);
4718 gsi_remove (&iter
, true);
4723 /* The function which we are inlining might not return a value,
4724 in which case we should issue a warning that the function
4725 does not return a value. In that case the optimizers will
4726 see that the variable to which the value is assigned was not
4727 initialized. We do not want to issue a warning about that
4728 uninitialized variable. */
4729 if (DECL_P (modify_dest
))
4730 TREE_NO_WARNING (modify_dest
) = 1;
4732 if (gimple_call_return_slot_opt_p (call_stmt
))
4734 return_slot
= modify_dest
;
4741 /* If we are inlining a call to the C++ operator new, we don't want
4742 to use type based alias analysis on the return value. Otherwise
4743 we may get confused if the compiler sees that the inlined new
4744 function returns a pointer which was just deleted. See bug
4746 if (DECL_IS_OPERATOR_NEW (fn
))
4752 /* Declare the return variable for the function. */
4753 use_retvar
= declare_return_variable (id
, return_slot
, modify_dest
,
4756 /* Add local vars in this inlined callee to caller. */
4757 add_local_variables (id
->src_cfun
, cfun
, id
);
4759 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4761 fprintf (dump_file
, "Inlining ");
4762 print_generic_expr (dump_file
, id
->src_fn
);
4763 fprintf (dump_file
, " to ");
4764 print_generic_expr (dump_file
, id
->dst_fn
);
4765 fprintf (dump_file
, " with frequency %i\n", cg_edge
->frequency
);
4768 /* This is it. Duplicate the callee body. Assume callee is
4769 pre-gimplified. Note that we must not alter the caller
4770 function in any way before this point, as this CALL_EXPR may be
4771 a self-referential call; if we're calling ourselves, we need to
4772 duplicate our body before altering anything. */
4773 copy_body (id
, cg_edge
->callee
->count
,
4774 GCOV_COMPUTE_SCALE (cg_edge
->frequency
, CGRAPH_FREQ_BASE
),
4775 bb
, return_block
, NULL
);
4777 reset_debug_bindings (id
, stmt_gsi
);
4779 if (flag_stack_reuse
!= SR_NONE
)
4780 for (tree p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
4781 if (!TREE_THIS_VOLATILE (p
))
4783 tree
*varp
= id
->decl_map
->get (p
);
4784 if (varp
&& VAR_P (*varp
) && !is_gimple_reg (*varp
))
4786 tree clobber
= build_constructor (TREE_TYPE (*varp
), NULL
);
4787 gimple
*clobber_stmt
;
4788 TREE_THIS_VOLATILE (clobber
) = 1;
4789 clobber_stmt
= gimple_build_assign (*varp
, clobber
);
4790 gimple_set_location (clobber_stmt
, gimple_location (stmt
));
4791 gsi_insert_before (&stmt_gsi
, clobber_stmt
, GSI_SAME_STMT
);
4795 /* Reset the escaped solution. */
4796 if (cfun
->gimple_df
)
4797 pt_solution_reset (&cfun
->gimple_df
->escaped
);
4799 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
4800 if (id
->dst_simt_vars
&& id
->dst_simt_vars
->length () > 0)
4802 size_t nargs
= gimple_call_num_args (simtenter_stmt
);
4803 vec
<tree
> *vars
= id
->dst_simt_vars
;
4804 auto_vec
<tree
> newargs (nargs
+ vars
->length ());
4805 for (size_t i
= 0; i
< nargs
; i
++)
4806 newargs
.quick_push (gimple_call_arg (simtenter_stmt
, i
));
4807 for (tree
*pvar
= vars
->begin (); pvar
!= vars
->end (); pvar
++)
4809 tree ptrtype
= build_pointer_type (TREE_TYPE (*pvar
));
4810 newargs
.quick_push (build1 (ADDR_EXPR
, ptrtype
, *pvar
));
4812 gcall
*g
= gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, newargs
);
4813 gimple_call_set_lhs (g
, gimple_call_lhs (simtenter_stmt
));
4814 gimple_stmt_iterator gsi
= gsi_for_stmt (simtenter_stmt
);
4815 gsi_replace (&gsi
, g
, false);
4817 vec_free (id
->dst_simt_vars
);
4818 id
->dst_simt_vars
= simtvars_save
;
4823 delete id
->debug_map
;
4824 id
->debug_map
= dst
;
4826 delete id
->decl_map
;
4829 /* Unlink the calls virtual operands before replacing it. */
4830 unlink_stmt_vdef (stmt
);
4831 if (gimple_vdef (stmt
)
4832 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
4833 release_ssa_name (gimple_vdef (stmt
));
4835 /* If the inlined function returns a result that we care about,
4836 substitute the GIMPLE_CALL with an assignment of the return
4837 variable to the LHS of the call. That is, if STMT was
4838 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4839 if (use_retvar
&& gimple_call_lhs (stmt
))
4841 gimple
*old_stmt
= stmt
;
4842 stmt
= gimple_build_assign (gimple_call_lhs (stmt
), use_retvar
);
4843 gsi_replace (&stmt_gsi
, stmt
, false);
4844 maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
);
4845 /* Append a clobber for id->retvar if easily possible. */
4846 if (flag_stack_reuse
!= SR_NONE
4848 && VAR_P (id
->retvar
)
4849 && id
->retvar
!= return_slot
4850 && id
->retvar
!= modify_dest
4851 && !TREE_THIS_VOLATILE (id
->retvar
)
4852 && !is_gimple_reg (id
->retvar
)
4853 && !stmt_ends_bb_p (stmt
))
4855 tree clobber
= build_constructor (TREE_TYPE (id
->retvar
), NULL
);
4856 gimple
*clobber_stmt
;
4857 TREE_THIS_VOLATILE (clobber
) = 1;
4858 clobber_stmt
= gimple_build_assign (id
->retvar
, clobber
);
4859 gimple_set_location (clobber_stmt
, gimple_location (old_stmt
));
4860 gsi_insert_after (&stmt_gsi
, clobber_stmt
, GSI_SAME_STMT
);
4863 /* Copy bounds if we copy structure with bounds. */
4864 if (chkp_function_instrumented_p (id
->dst_fn
)
4865 && !BOUNDED_P (use_retvar
)
4866 && chkp_type_has_pointer (TREE_TYPE (use_retvar
)))
4867 id
->assign_stmts
.safe_push (stmt
);
4871 /* Handle the case of inlining a function with no return
4872 statement, which causes the return value to become undefined. */
4873 if (gimple_call_lhs (stmt
)
4874 && TREE_CODE (gimple_call_lhs (stmt
)) == SSA_NAME
)
4876 tree name
= gimple_call_lhs (stmt
);
4877 tree var
= SSA_NAME_VAR (name
);
4878 tree def
= var
? ssa_default_def (cfun
, var
) : NULL
;
4882 /* If the variable is used undefined, make this name
4883 undefined via a move. */
4884 stmt
= gimple_build_assign (gimple_call_lhs (stmt
), def
);
4885 gsi_replace (&stmt_gsi
, stmt
, true);
4891 var
= create_tmp_reg_fn (cfun
, TREE_TYPE (name
), NULL
);
4892 SET_SSA_NAME_VAR_OR_IDENTIFIER (name
, var
);
4894 /* Otherwise make this variable undefined. */
4895 gsi_remove (&stmt_gsi
, true);
4896 set_ssa_default_def (cfun
, var
, name
);
4897 SSA_NAME_DEF_STMT (name
) = gimple_build_nop ();
4900 /* Replace with a clobber for id->retvar. */
4901 else if (flag_stack_reuse
!= SR_NONE
4903 && VAR_P (id
->retvar
)
4904 && id
->retvar
!= return_slot
4905 && id
->retvar
!= modify_dest
4906 && !TREE_THIS_VOLATILE (id
->retvar
)
4907 && !is_gimple_reg (id
->retvar
))
4909 tree clobber
= build_constructor (TREE_TYPE (id
->retvar
), NULL
);
4910 gimple
*clobber_stmt
;
4911 TREE_THIS_VOLATILE (clobber
) = 1;
4912 clobber_stmt
= gimple_build_assign (id
->retvar
, clobber
);
4913 gimple_set_location (clobber_stmt
, gimple_location (stmt
));
4914 gsi_replace (&stmt_gsi
, clobber_stmt
, false);
4915 maybe_clean_or_replace_eh_stmt (stmt
, clobber_stmt
);
4918 gsi_remove (&stmt_gsi
, true);
4921 /* Put returned bounds into the correct place if required. */
4924 gimple
*old_stmt
= SSA_NAME_DEF_STMT (return_bounds
);
4925 gimple
*new_stmt
= gimple_build_assign (return_bounds
, id
->retbnd
);
4926 gimple_stmt_iterator bnd_gsi
= gsi_for_stmt (old_stmt
);
4927 unlink_stmt_vdef (old_stmt
);
4928 gsi_replace (&bnd_gsi
, new_stmt
, false);
4929 maybe_clean_or_replace_eh_stmt (old_stmt
, new_stmt
);
4930 cgraph_update_edges_for_call_stmt (old_stmt
,
4931 gimple_call_fndecl (old_stmt
),
4935 if (purge_dead_abnormal_edges
)
4937 gimple_purge_dead_eh_edges (return_block
);
4938 gimple_purge_dead_abnormal_call_edges (return_block
);
4941 /* If the value of the new expression is ignored, that's OK. We
4942 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4943 the equivalent inlined version either. */
4944 if (is_gimple_assign (stmt
))
4946 gcc_assert (gimple_assign_single_p (stmt
)
4947 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)));
4948 TREE_USED (gimple_assign_rhs1 (stmt
)) = 1;
4951 /* Copy bounds for all generated assigns that need it. */
4952 for (i
= 0; i
< id
->assign_stmts
.length (); i
++)
4953 chkp_copy_bounds_for_assign (id
->assign_stmts
[i
], cg_edge
);
4954 id
->assign_stmts
.release ();
4956 /* Output the inlining info for this abstract function, since it has been
4957 inlined. If we don't do this now, we can lose the information about the
4958 variables in the function when the blocks get blown away as soon as we
4959 remove the cgraph node. */
4960 if (gimple_block (stmt
))
4961 (*debug_hooks
->outlining_inline_function
) (cg_edge
->callee
->decl
);
4963 /* Update callgraph if needed. */
4964 cg_edge
->callee
->remove ();
4966 id
->block
= NULL_TREE
;
4967 id
->retvar
= NULL_TREE
;
4968 id
->retbnd
= NULL_TREE
;
4969 successfully_inlined
= true;
4972 input_location
= saved_location
;
4973 return successfully_inlined
;
4976 /* Expand call statements reachable from STMT_P.
4977 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4978 in a MODIFY_EXPR. */
4981 gimple_expand_calls_inline (basic_block bb
, copy_body_data
*id
)
4983 gimple_stmt_iterator gsi
;
4984 bool inlined
= false;
4986 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
);)
4988 gimple
*stmt
= gsi_stmt (gsi
);
4991 if (is_gimple_call (stmt
)
4992 && !gimple_call_internal_p (stmt
))
4993 inlined
|= expand_call_inline (bb
, stmt
, id
);
5000 /* Walk all basic blocks created after FIRST and try to fold every statement
5001 in the STATEMENTS pointer set. */
5004 fold_marked_statements (int first
, hash_set
<gimple
*> *statements
)
5006 for (; first
< n_basic_blocks_for_fn (cfun
); first
++)
5007 if (BASIC_BLOCK_FOR_FN (cfun
, first
))
5009 gimple_stmt_iterator gsi
;
5011 for (gsi
= gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, first
));
5014 if (statements
->contains (gsi_stmt (gsi
)))
5016 gimple
*old_stmt
= gsi_stmt (gsi
);
5017 tree old_decl
= is_gimple_call (old_stmt
) ? gimple_call_fndecl (old_stmt
) : 0;
5019 if (old_decl
&& DECL_BUILT_IN (old_decl
))
5021 /* Folding builtins can create multiple instructions,
5022 we need to look at all of them. */
5023 gimple_stmt_iterator i2
= gsi
;
5025 if (fold_stmt (&gsi
))
5028 /* If a builtin at the end of a bb folded into nothing,
5029 the following loop won't work. */
5030 if (gsi_end_p (gsi
))
5032 cgraph_update_edges_for_call_stmt (old_stmt
,
5037 i2
= gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, first
));
5042 new_stmt
= gsi_stmt (i2
);
5043 update_stmt (new_stmt
);
5044 cgraph_update_edges_for_call_stmt (old_stmt
, old_decl
,
5047 if (new_stmt
== gsi_stmt (gsi
))
5049 /* It is okay to check only for the very last
5050 of these statements. If it is a throwing
5051 statement nothing will change. If it isn't
5052 this can remove EH edges. If that weren't
5053 correct then because some intermediate stmts
5054 throw, but not the last one. That would mean
5055 we'd have to split the block, which we can't
5056 here and we'd loose anyway. And as builtins
5057 probably never throw, this all
5059 if (maybe_clean_or_replace_eh_stmt (old_stmt
,
5061 gimple_purge_dead_eh_edges (
5062 BASIC_BLOCK_FOR_FN (cfun
, first
));
5069 else if (fold_stmt (&gsi
))
5071 /* Re-read the statement from GSI as fold_stmt() may
5073 gimple
*new_stmt
= gsi_stmt (gsi
);
5074 update_stmt (new_stmt
);
5076 if (is_gimple_call (old_stmt
)
5077 || is_gimple_call (new_stmt
))
5078 cgraph_update_edges_for_call_stmt (old_stmt
, old_decl
,
5081 if (maybe_clean_or_replace_eh_stmt (old_stmt
, new_stmt
))
5082 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun
,
5089 /* Expand calls to inline functions in the body of FN. */
5092 optimize_inline_calls (tree fn
)
5096 int last
= n_basic_blocks_for_fn (cfun
);
5097 bool inlined_p
= false;
5100 memset (&id
, 0, sizeof (id
));
5102 id
.src_node
= id
.dst_node
= cgraph_node::get (fn
);
5103 gcc_assert (id
.dst_node
->definition
);
5105 /* Or any functions that aren't finished yet. */
5106 if (current_function_decl
)
5107 id
.dst_fn
= current_function_decl
;
5109 id
.copy_decl
= copy_decl_maybe_to_var
;
5110 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
5111 id
.transform_new_cfg
= false;
5112 id
.transform_return_to_modify
= true;
5113 id
.transform_parameter
= true;
5114 id
.transform_lang_insert_block
= NULL
;
5115 id
.statements_to_fold
= new hash_set
<gimple
*>;
5117 push_gimplify_context ();
5119 /* We make no attempts to keep dominance info up-to-date. */
5120 free_dominance_info (CDI_DOMINATORS
);
5121 free_dominance_info (CDI_POST_DOMINATORS
);
5123 /* Register specific gimple functions. */
5124 gimple_register_cfg_hooks ();
5126 /* Reach the trees by walking over the CFG, and note the
5127 enclosing basic-blocks in the call edges. */
5128 /* We walk the blocks going forward, because inlined function bodies
5129 will split id->current_basic_block, and the new blocks will
5130 follow it; we'll trudge through them, processing their CALL_EXPRs
5132 FOR_EACH_BB_FN (bb
, cfun
)
5133 inlined_p
|= gimple_expand_calls_inline (bb
, &id
);
5135 pop_gimplify_context (NULL
);
5139 struct cgraph_edge
*e
;
5141 id
.dst_node
->verify ();
5143 /* Double check that we inlined everything we are supposed to inline. */
5144 for (e
= id
.dst_node
->callees
; e
; e
= e
->next_callee
)
5145 gcc_assert (e
->inline_failed
);
5148 /* Fold queued statements. */
5149 fold_marked_statements (last
, id
.statements_to_fold
);
5150 delete id
.statements_to_fold
;
5152 gcc_assert (!id
.debug_stmts
.exists ());
5154 /* If we didn't inline into the function there is nothing to do. */
5158 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5161 delete_unreachable_blocks_update_callgraph (&id
);
5163 id
.dst_node
->verify ();
5165 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5166 not possible yet - the IPA passes might make various functions to not
5167 throw and they don't care to proactively update local EH info. This is
5168 done later in fixup_cfg pass that also execute the verification. */
5169 return (TODO_update_ssa
5171 | (gimple_in_ssa_p (cfun
) ? TODO_remove_unused_locals
: 0)
5172 | (gimple_in_ssa_p (cfun
) ? TODO_update_address_taken
: 0)
5173 | (profile_status_for_fn (cfun
) != PROFILE_ABSENT
5174 ? TODO_rebuild_frequencies
: 0));
5177 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5180 copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
5182 enum tree_code code
= TREE_CODE (*tp
);
5183 enum tree_code_class cl
= TREE_CODE_CLASS (code
);
5185 /* We make copies of most nodes. */
5186 if (IS_EXPR_CODE_CLASS (cl
)
5187 || code
== TREE_LIST
5189 || code
== TYPE_DECL
5190 || code
== OMP_CLAUSE
)
5192 /* Because the chain gets clobbered when we make a copy, we save it
5194 tree chain
= NULL_TREE
, new_tree
;
5196 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
5197 chain
= TREE_CHAIN (*tp
);
5199 /* Copy the node. */
5200 new_tree
= copy_node (*tp
);
5204 /* Now, restore the chain, if appropriate. That will cause
5205 walk_tree to walk into the chain as well. */
5206 if (code
== PARM_DECL
5207 || code
== TREE_LIST
5208 || code
== OMP_CLAUSE
)
5209 TREE_CHAIN (*tp
) = chain
;
5211 /* For now, we don't update BLOCKs when we make copies. So, we
5212 have to nullify all BIND_EXPRs. */
5213 if (TREE_CODE (*tp
) == BIND_EXPR
)
5214 BIND_EXPR_BLOCK (*tp
) = NULL_TREE
;
5216 else if (code
== CONSTRUCTOR
)
5218 /* CONSTRUCTOR nodes need special handling because
5219 we need to duplicate the vector of elements. */
5222 new_tree
= copy_node (*tp
);
5223 CONSTRUCTOR_ELTS (new_tree
) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp
));
5226 else if (code
== STATEMENT_LIST
)
5227 /* We used to just abort on STATEMENT_LIST, but we can run into them
5228 with statement-expressions (c++/40975). */
5229 copy_statement_list (tp
);
5230 else if (TREE_CODE_CLASS (code
) == tcc_type
)
5232 else if (TREE_CODE_CLASS (code
) == tcc_declaration
)
5234 else if (TREE_CODE_CLASS (code
) == tcc_constant
)
5239 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5240 information indicating to what new SAVE_EXPR this one should be mapped,
5241 use that one. Otherwise, create a new node and enter it in ST. FN is
5242 the function into which the copy will be placed. */
5245 remap_save_expr (tree
*tp
, hash_map
<tree
, tree
> *st
, int *walk_subtrees
)
5250 /* See if we already encountered this SAVE_EXPR. */
5253 /* If we didn't already remap this SAVE_EXPR, do so now. */
5256 t
= copy_node (*tp
);
5258 /* Remember this SAVE_EXPR. */
5260 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5265 /* We've already walked into this SAVE_EXPR; don't do it again. */
5270 /* Replace this SAVE_EXPR with the copy. */
5274 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5275 label, copies the declaration and enters it in the splay_tree in DATA (which
5276 is really a 'copy_body_data *'. */
5279 mark_local_labels_stmt (gimple_stmt_iterator
*gsip
,
5280 bool *handled_ops_p ATTRIBUTE_UNUSED
,
5281 struct walk_stmt_info
*wi
)
5283 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
5284 glabel
*stmt
= dyn_cast
<glabel
*> (gsi_stmt (*gsip
));
5288 tree decl
= gimple_label_label (stmt
);
5290 /* Copy the decl and remember the copy. */
5291 insert_decl_map (id
, decl
, id
->copy_decl (decl
, id
));
5297 static gimple_seq
duplicate_remap_omp_clause_seq (gimple_seq seq
,
5298 struct walk_stmt_info
*wi
);
5300 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5301 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5302 remaps all local declarations to appropriate replacements in gimple
5306 replace_locals_op (tree
*tp
, int *walk_subtrees
, void *data
)
5308 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5309 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
5310 hash_map
<tree
, tree
> *st
= id
->decl_map
;
5314 /* For recursive invocations this is no longer the LHS itself. */
5315 bool is_lhs
= wi
->is_lhs
;
5318 if (TREE_CODE (expr
) == SSA_NAME
)
5320 *tp
= remap_ssa_name (*tp
, id
);
5323 SSA_NAME_DEF_STMT (*tp
) = gsi_stmt (wi
->gsi
);
5325 /* Only a local declaration (variable or label). */
5326 else if ((VAR_P (expr
) && !TREE_STATIC (expr
))
5327 || TREE_CODE (expr
) == LABEL_DECL
)
5329 /* Lookup the declaration. */
5332 /* If it's there, remap it. */
5337 else if (TREE_CODE (expr
) == STATEMENT_LIST
5338 || TREE_CODE (expr
) == BIND_EXPR
5339 || TREE_CODE (expr
) == SAVE_EXPR
)
5341 else if (TREE_CODE (expr
) == TARGET_EXPR
)
5343 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5344 It's OK for this to happen if it was part of a subtree that
5345 isn't immediately expanded, such as operand 2 of another
5347 if (!TREE_OPERAND (expr
, 1))
5349 TREE_OPERAND (expr
, 1) = TREE_OPERAND (expr
, 3);
5350 TREE_OPERAND (expr
, 3) = NULL_TREE
;
5353 else if (TREE_CODE (expr
) == OMP_CLAUSE
)
5355 /* Before the omplower pass completes, some OMP clauses can contain
5356 sequences that are neither copied by gimple_seq_copy nor walked by
5357 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5358 in those situations, we have to copy and process them explicitely. */
5360 if (OMP_CLAUSE_CODE (expr
) == OMP_CLAUSE_LASTPRIVATE
)
5362 gimple_seq seq
= OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr
);
5363 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5364 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr
) = seq
;
5366 else if (OMP_CLAUSE_CODE (expr
) == OMP_CLAUSE_LINEAR
)
5368 gimple_seq seq
= OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr
);
5369 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5370 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr
) = seq
;
5372 else if (OMP_CLAUSE_CODE (expr
) == OMP_CLAUSE_REDUCTION
)
5374 gimple_seq seq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr
);
5375 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5376 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr
) = seq
;
5377 seq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr
);
5378 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5379 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr
) = seq
;
5383 /* Keep iterating. */
5388 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5389 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5390 remaps all local declarations to appropriate replacements in gimple
5394 replace_locals_stmt (gimple_stmt_iterator
*gsip
,
5395 bool *handled_ops_p ATTRIBUTE_UNUSED
,
5396 struct walk_stmt_info
*wi
)
5398 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
5399 gimple
*gs
= gsi_stmt (*gsip
);
5401 if (gbind
*stmt
= dyn_cast
<gbind
*> (gs
))
5403 tree block
= gimple_bind_block (stmt
);
5407 remap_block (&block
, id
);
5408 gimple_bind_set_block (stmt
, block
);
5411 /* This will remap a lot of the same decls again, but this should be
5413 if (gimple_bind_vars (stmt
))
5415 tree old_var
, decls
= gimple_bind_vars (stmt
);
5417 for (old_var
= decls
; old_var
; old_var
= DECL_CHAIN (old_var
))
5418 if (!can_be_nonlocal (old_var
, id
)
5419 && ! variably_modified_type_p (TREE_TYPE (old_var
), id
->src_fn
))
5420 remap_decl (old_var
, id
);
5422 gcc_checking_assert (!id
->prevent_decl_creation_for_types
);
5423 id
->prevent_decl_creation_for_types
= true;
5424 gimple_bind_set_vars (stmt
, remap_decls (decls
, NULL
, id
));
5425 id
->prevent_decl_creation_for_types
= false;
5429 /* Keep iterating. */
5433 /* Create a copy of SEQ and remap all decls in it. */
5436 duplicate_remap_omp_clause_seq (gimple_seq seq
, struct walk_stmt_info
*wi
)
5441 /* If there are any labels in OMP sequences, they can be only referred to in
5442 the sequence itself and therefore we can do both here. */
5443 walk_gimple_seq (seq
, mark_local_labels_stmt
, NULL
, wi
);
5444 gimple_seq copy
= gimple_seq_copy (seq
);
5445 walk_gimple_seq (copy
, replace_locals_stmt
, replace_locals_op
, wi
);
5449 /* Copies everything in SEQ and replaces variables and labels local to
5450 current_function_decl. */
5453 copy_gimple_seq_and_replace_locals (gimple_seq seq
)
5456 struct walk_stmt_info wi
;
5459 /* There's nothing to do for NULL_TREE. */
5464 memset (&id
, 0, sizeof (id
));
5465 id
.src_fn
= current_function_decl
;
5466 id
.dst_fn
= current_function_decl
;
5468 id
.decl_map
= new hash_map
<tree
, tree
>;
5469 id
.debug_map
= NULL
;
5471 id
.copy_decl
= copy_decl_no_change
;
5472 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
5473 id
.transform_new_cfg
= false;
5474 id
.transform_return_to_modify
= false;
5475 id
.transform_parameter
= false;
5476 id
.transform_lang_insert_block
= NULL
;
5478 /* Walk the tree once to find local labels. */
5479 memset (&wi
, 0, sizeof (wi
));
5480 hash_set
<tree
> visited
;
5483 walk_gimple_seq (seq
, mark_local_labels_stmt
, NULL
, &wi
);
5485 copy
= gimple_seq_copy (seq
);
5487 /* Walk the copy, remapping decls. */
5488 memset (&wi
, 0, sizeof (wi
));
5490 walk_gimple_seq (copy
, replace_locals_stmt
, replace_locals_op
, &wi
);
5495 delete id
.debug_map
;
5496 if (id
.dependence_map
)
5498 delete id
.dependence_map
;
5499 id
.dependence_map
= NULL
;
5506 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5509 debug_find_tree_1 (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
, void *data
)
5518 debug_find_tree (tree top
, tree search
)
5520 return walk_tree_without_duplicates (&top
, debug_find_tree_1
, search
) != 0;
5524 /* Declare the variables created by the inliner. Add all the variables in
5525 VARS to BIND_EXPR. */
5528 declare_inline_vars (tree block
, tree vars
)
5531 for (t
= vars
; t
; t
= DECL_CHAIN (t
))
5533 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
5534 gcc_assert (!TREE_STATIC (t
) && !TREE_ASM_WRITTEN (t
));
5535 add_local_decl (cfun
, t
);
5539 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), vars
);
5542 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5543 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5544 VAR_DECL translation. */
5547 copy_decl_for_dup_finish (copy_body_data
*id
, tree decl
, tree copy
)
5549 /* Don't generate debug information for the copy if we wouldn't have
5550 generated it for the copy either. */
5551 DECL_ARTIFICIAL (copy
) = DECL_ARTIFICIAL (decl
);
5552 DECL_IGNORED_P (copy
) = DECL_IGNORED_P (decl
);
5554 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5555 declaration inspired this copy. */
5556 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (decl
);
5558 /* The new variable/label has no RTL, yet. */
5559 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy
), TS_DECL_WRTL
)
5560 && !TREE_STATIC (copy
) && !DECL_EXTERNAL (copy
))
5561 SET_DECL_RTL (copy
, 0);
5563 /* These args would always appear unused, if not for this. */
5564 TREE_USED (copy
) = 1;
5566 /* Set the context for the new declaration. */
5567 if (!DECL_CONTEXT (decl
))
5568 /* Globals stay global. */
5570 else if (DECL_CONTEXT (decl
) != id
->src_fn
)
5571 /* Things that weren't in the scope of the function we're inlining
5572 from aren't in the scope we're inlining to, either. */
5574 else if (TREE_STATIC (decl
))
5575 /* Function-scoped static variables should stay in the original
5580 /* Ordinary automatic local variables are now in the scope of the
5582 DECL_CONTEXT (copy
) = id
->dst_fn
;
5583 if (VAR_P (copy
) && id
->dst_simt_vars
&& !is_gimple_reg (copy
))
5585 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy
)))
5586 DECL_ATTRIBUTES (copy
)
5587 = tree_cons (get_identifier ("omp simt private"), NULL
,
5588 DECL_ATTRIBUTES (copy
));
5589 id
->dst_simt_vars
->safe_push (copy
);
5597 copy_decl_to_var (tree decl
, copy_body_data
*id
)
5601 gcc_assert (TREE_CODE (decl
) == PARM_DECL
5602 || TREE_CODE (decl
) == RESULT_DECL
);
5604 type
= TREE_TYPE (decl
);
5606 copy
= build_decl (DECL_SOURCE_LOCATION (id
->dst_fn
),
5607 VAR_DECL
, DECL_NAME (decl
), type
);
5608 if (DECL_PT_UID_SET_P (decl
))
5609 SET_DECL_PT_UID (copy
, DECL_PT_UID (decl
));
5610 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
5611 TREE_READONLY (copy
) = TREE_READONLY (decl
);
5612 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
5613 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (decl
);
5615 return copy_decl_for_dup_finish (id
, decl
, copy
);
5618 /* Like copy_decl_to_var, but create a return slot object instead of a
5619 pointer variable for return by invisible reference. */
5622 copy_result_decl_to_var (tree decl
, copy_body_data
*id
)
5626 gcc_assert (TREE_CODE (decl
) == PARM_DECL
5627 || TREE_CODE (decl
) == RESULT_DECL
);
5629 type
= TREE_TYPE (decl
);
5630 if (DECL_BY_REFERENCE (decl
))
5631 type
= TREE_TYPE (type
);
5633 copy
= build_decl (DECL_SOURCE_LOCATION (id
->dst_fn
),
5634 VAR_DECL
, DECL_NAME (decl
), type
);
5635 if (DECL_PT_UID_SET_P (decl
))
5636 SET_DECL_PT_UID (copy
, DECL_PT_UID (decl
));
5637 TREE_READONLY (copy
) = TREE_READONLY (decl
);
5638 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
5639 if (!DECL_BY_REFERENCE (decl
))
5641 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
5642 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (decl
);
5645 return copy_decl_for_dup_finish (id
, decl
, copy
);
5649 copy_decl_no_change (tree decl
, copy_body_data
*id
)
5653 copy
= copy_node (decl
);
5655 /* The COPY is not abstract; it will be generated in DST_FN. */
5656 DECL_ABSTRACT_P (copy
) = false;
5657 lang_hooks
.dup_lang_specific_decl (copy
);
5659 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5660 been taken; it's for internal bookkeeping in expand_goto_internal. */
5661 if (TREE_CODE (copy
) == LABEL_DECL
)
5663 TREE_ADDRESSABLE (copy
) = 0;
5664 LABEL_DECL_UID (copy
) = -1;
5667 return copy_decl_for_dup_finish (id
, decl
, copy
);
5671 copy_decl_maybe_to_var (tree decl
, copy_body_data
*id
)
5673 if (TREE_CODE (decl
) == PARM_DECL
|| TREE_CODE (decl
) == RESULT_DECL
)
5674 return copy_decl_to_var (decl
, id
);
5676 return copy_decl_no_change (decl
, id
);
5679 /* Return a copy of the function's argument tree. */
5681 copy_arguments_for_versioning (tree orig_parm
, copy_body_data
* id
,
5682 bitmap args_to_skip
, tree
*vars
)
5685 tree new_parm
= NULL
;
5690 for (arg
= orig_parm
; arg
; arg
= DECL_CHAIN (arg
), i
++)
5691 if (!args_to_skip
|| !bitmap_bit_p (args_to_skip
, i
))
5693 tree new_tree
= remap_decl (arg
, id
);
5694 if (TREE_CODE (new_tree
) != PARM_DECL
)
5695 new_tree
= id
->copy_decl (arg
, id
);
5696 lang_hooks
.dup_lang_specific_decl (new_tree
);
5698 parg
= &DECL_CHAIN (new_tree
);
5700 else if (!id
->decl_map
->get (arg
))
5702 /* Make an equivalent VAR_DECL. If the argument was used
5703 as temporary variable later in function, the uses will be
5704 replaced by local variable. */
5705 tree var
= copy_decl_to_var (arg
, id
);
5706 insert_decl_map (id
, arg
, var
);
5707 /* Declare this new variable. */
5708 DECL_CHAIN (var
) = *vars
;
5714 /* Return a copy of the function's static chain. */
5716 copy_static_chain (tree static_chain
, copy_body_data
* id
)
5718 tree
*chain_copy
, *pvar
;
5720 chain_copy
= &static_chain
;
5721 for (pvar
= chain_copy
; *pvar
; pvar
= &DECL_CHAIN (*pvar
))
5723 tree new_tree
= remap_decl (*pvar
, id
);
5724 lang_hooks
.dup_lang_specific_decl (new_tree
);
5725 DECL_CHAIN (new_tree
) = DECL_CHAIN (*pvar
);
5728 return static_chain
;
5731 /* Return true if the function is allowed to be versioned.
5732 This is a guard for the versioning functionality. */
5735 tree_versionable_function_p (tree fndecl
)
5737 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl
))
5738 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl
)) == NULL
);
5741 /* Delete all unreachable basic blocks and update callgraph.
5742 Doing so is somewhat nontrivial because we need to update all clones and
5743 remove inline function that become unreachable. */
5746 delete_unreachable_blocks_update_callgraph (copy_body_data
*id
)
5748 bool changed
= false;
5749 basic_block b
, next_bb
;
5751 find_unreachable_blocks ();
5753 /* Delete all unreachable basic blocks. */
5755 for (b
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
; b
5756 != EXIT_BLOCK_PTR_FOR_FN (cfun
); b
= next_bb
)
5758 next_bb
= b
->next_bb
;
5760 if (!(b
->flags
& BB_REACHABLE
))
5762 gimple_stmt_iterator bsi
;
5764 for (bsi
= gsi_start_bb (b
); !gsi_end_p (bsi
); gsi_next (&bsi
))
5766 struct cgraph_edge
*e
;
5767 struct cgraph_node
*node
;
5769 id
->dst_node
->remove_stmt_references (gsi_stmt (bsi
));
5771 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_CALL
5772 &&(e
= id
->dst_node
->get_edge (gsi_stmt (bsi
))) != NULL
)
5774 if (!e
->inline_failed
)
5775 e
->callee
->remove_symbol_and_inline_clones (id
->dst_node
);
5779 if (id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
5780 && id
->dst_node
->clones
)
5781 for (node
= id
->dst_node
->clones
; node
!= id
->dst_node
;)
5783 node
->remove_stmt_references (gsi_stmt (bsi
));
5784 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_CALL
5785 && (e
= node
->get_edge (gsi_stmt (bsi
))) != NULL
)
5787 if (!e
->inline_failed
)
5788 e
->callee
->remove_symbol_and_inline_clones (id
->dst_node
);
5794 node
= node
->clones
;
5795 else if (node
->next_sibling_clone
)
5796 node
= node
->next_sibling_clone
;
5799 while (node
!= id
->dst_node
&& !node
->next_sibling_clone
)
5800 node
= node
->clone_of
;
5801 if (node
!= id
->dst_node
)
5802 node
= node
->next_sibling_clone
;
5806 delete_basic_block (b
);
5814 /* Update clone info after duplication. */
5817 update_clone_info (copy_body_data
* id
)
5819 struct cgraph_node
*node
;
5820 if (!id
->dst_node
->clones
)
5822 for (node
= id
->dst_node
->clones
; node
!= id
->dst_node
;)
5824 /* First update replace maps to match the new body. */
5825 if (node
->clone
.tree_map
)
5828 for (i
= 0; i
< vec_safe_length (node
->clone
.tree_map
); i
++)
5830 struct ipa_replace_map
*replace_info
;
5831 replace_info
= (*node
->clone
.tree_map
)[i
];
5832 walk_tree (&replace_info
->old_tree
, copy_tree_body_r
, id
, NULL
);
5833 walk_tree (&replace_info
->new_tree
, copy_tree_body_r
, id
, NULL
);
5837 node
= node
->clones
;
5838 else if (node
->next_sibling_clone
)
5839 node
= node
->next_sibling_clone
;
5842 while (node
!= id
->dst_node
&& !node
->next_sibling_clone
)
5843 node
= node
->clone_of
;
5844 if (node
!= id
->dst_node
)
5845 node
= node
->next_sibling_clone
;
5850 /* Create a copy of a function's tree.
5851 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5852 of the original function and the new copied function
5853 respectively. In case we want to replace a DECL
5854 tree with another tree while duplicating the function's
5855 body, TREE_MAP represents the mapping between these
5856 trees. If UPDATE_CLONES is set, the call_stmt fields
5857 of edges of clones of the function will be updated.
5859 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5861 If SKIP_RETURN is true, the new version will return void.
5862 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5863 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5866 tree_function_versioning (tree old_decl
, tree new_decl
,
5867 vec
<ipa_replace_map
*, va_gc
> *tree_map
,
5868 bool update_clones
, bitmap args_to_skip
,
5869 bool skip_return
, bitmap blocks_to_copy
,
5870 basic_block new_entry
)
5872 struct cgraph_node
*old_version_node
;
5873 struct cgraph_node
*new_version_node
;
5877 struct ipa_replace_map
*replace_info
;
5878 basic_block old_entry_block
, bb
;
5879 auto_vec
<gimple
*, 10> init_stmts
;
5880 tree vars
= NULL_TREE
;
5881 bitmap debug_args_to_skip
= args_to_skip
;
5883 gcc_assert (TREE_CODE (old_decl
) == FUNCTION_DECL
5884 && TREE_CODE (new_decl
) == FUNCTION_DECL
);
5885 DECL_POSSIBLY_INLINED (old_decl
) = 1;
5887 old_version_node
= cgraph_node::get (old_decl
);
5888 gcc_checking_assert (old_version_node
);
5889 new_version_node
= cgraph_node::get (new_decl
);
5890 gcc_checking_assert (new_version_node
);
5892 /* Copy over debug args. */
5893 if (DECL_HAS_DEBUG_ARGS_P (old_decl
))
5895 vec
<tree
, va_gc
> **new_debug_args
, **old_debug_args
;
5896 gcc_checking_assert (decl_debug_args_lookup (new_decl
) == NULL
);
5897 DECL_HAS_DEBUG_ARGS_P (new_decl
) = 0;
5898 old_debug_args
= decl_debug_args_lookup (old_decl
);
5901 new_debug_args
= decl_debug_args_insert (new_decl
);
5902 *new_debug_args
= vec_safe_copy (*old_debug_args
);
5906 /* Output the inlining info for this abstract function, since it has been
5907 inlined. If we don't do this now, we can lose the information about the
5908 variables in the function when the blocks get blown away as soon as we
5909 remove the cgraph node. */
5910 (*debug_hooks
->outlining_inline_function
) (old_decl
);
5912 DECL_ARTIFICIAL (new_decl
) = 1;
5913 DECL_ABSTRACT_ORIGIN (new_decl
) = DECL_ORIGIN (old_decl
);
5914 if (DECL_ORIGIN (old_decl
) == old_decl
)
5915 old_version_node
->used_as_abstract_origin
= true;
5916 DECL_FUNCTION_PERSONALITY (new_decl
) = DECL_FUNCTION_PERSONALITY (old_decl
);
5918 /* Prepare the data structures for the tree copy. */
5919 memset (&id
, 0, sizeof (id
));
5921 /* Generate a new name for the new version. */
5922 id
.statements_to_fold
= new hash_set
<gimple
*>;
5924 id
.decl_map
= new hash_map
<tree
, tree
>;
5925 id
.debug_map
= NULL
;
5926 id
.src_fn
= old_decl
;
5927 id
.dst_fn
= new_decl
;
5928 id
.src_node
= old_version_node
;
5929 id
.dst_node
= new_version_node
;
5930 id
.src_cfun
= DECL_STRUCT_FUNCTION (old_decl
);
5931 id
.blocks_to_copy
= blocks_to_copy
;
5933 id
.copy_decl
= copy_decl_no_change
;
5934 id
.transform_call_graph_edges
5935 = update_clones
? CB_CGE_MOVE_CLONES
: CB_CGE_MOVE
;
5936 id
.transform_new_cfg
= true;
5937 id
.transform_return_to_modify
= false;
5938 id
.transform_parameter
= false;
5939 id
.transform_lang_insert_block
= NULL
;
5941 old_entry_block
= ENTRY_BLOCK_PTR_FOR_FN
5942 (DECL_STRUCT_FUNCTION (old_decl
));
5943 DECL_RESULT (new_decl
) = DECL_RESULT (old_decl
);
5944 DECL_ARGUMENTS (new_decl
) = DECL_ARGUMENTS (old_decl
);
5945 initialize_cfun (new_decl
, old_decl
,
5946 old_entry_block
->count
);
5947 if (DECL_STRUCT_FUNCTION (new_decl
)->gimple_df
)
5948 DECL_STRUCT_FUNCTION (new_decl
)->gimple_df
->ipa_pta
5949 = id
.src_cfun
->gimple_df
->ipa_pta
;
5951 /* Copy the function's static chain. */
5952 p
= DECL_STRUCT_FUNCTION (old_decl
)->static_chain_decl
;
5954 DECL_STRUCT_FUNCTION (new_decl
)->static_chain_decl
5955 = copy_static_chain (p
, &id
);
5957 /* If there's a tree_map, prepare for substitution. */
5959 for (i
= 0; i
< tree_map
->length (); i
++)
5962 replace_info
= (*tree_map
)[i
];
5963 if (replace_info
->replace_p
)
5966 if (!replace_info
->old_tree
)
5968 int p
= replace_info
->parm_num
;
5970 tree req_type
, new_type
;
5972 for (parm
= DECL_ARGUMENTS (old_decl
); p
;
5973 parm
= DECL_CHAIN (parm
))
5975 replace_info
->old_tree
= parm
;
5976 parm_num
= replace_info
->parm_num
;
5977 req_type
= TREE_TYPE (parm
);
5978 new_type
= TREE_TYPE (replace_info
->new_tree
);
5979 if (!useless_type_conversion_p (req_type
, new_type
))
5981 if (fold_convertible_p (req_type
, replace_info
->new_tree
))
5982 replace_info
->new_tree
5983 = fold_build1 (NOP_EXPR
, req_type
,
5984 replace_info
->new_tree
);
5985 else if (TYPE_SIZE (req_type
) == TYPE_SIZE (new_type
))
5986 replace_info
->new_tree
5987 = fold_build1 (VIEW_CONVERT_EXPR
, req_type
,
5988 replace_info
->new_tree
);
5993 fprintf (dump_file
, " const ");
5994 print_generic_expr (dump_file
,
5995 replace_info
->new_tree
);
5997 " can't be converted to param ");
5998 print_generic_expr (dump_file
, parm
);
5999 fprintf (dump_file
, "\n");
6001 replace_info
->old_tree
= NULL
;
6006 gcc_assert (TREE_CODE (replace_info
->old_tree
) == PARM_DECL
);
6007 if (replace_info
->old_tree
)
6009 init
= setup_one_parameter (&id
, replace_info
->old_tree
,
6010 replace_info
->new_tree
, id
.src_fn
,
6014 init_stmts
.safe_push (init
);
6015 if (MAY_HAVE_DEBUG_STMTS
&& args_to_skip
)
6021 for (parm
= DECL_ARGUMENTS (old_decl
), p
= 0; parm
;
6022 parm
= DECL_CHAIN (parm
), p
++)
6023 if (parm
== replace_info
->old_tree
)
6031 if (debug_args_to_skip
== args_to_skip
)
6033 debug_args_to_skip
= BITMAP_ALLOC (NULL
);
6034 bitmap_copy (debug_args_to_skip
, args_to_skip
);
6036 bitmap_clear_bit (debug_args_to_skip
, parm_num
);
6042 /* Copy the function's arguments. */
6043 if (DECL_ARGUMENTS (old_decl
) != NULL_TREE
)
6044 DECL_ARGUMENTS (new_decl
)
6045 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl
), &id
,
6046 args_to_skip
, &vars
);
6048 DECL_INITIAL (new_decl
) = remap_blocks (DECL_INITIAL (id
.src_fn
), &id
);
6049 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl
)) = new_decl
;
6051 declare_inline_vars (DECL_INITIAL (new_decl
), vars
);
6053 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl
)->local_decls
))
6054 /* Add local vars. */
6055 add_local_variables (DECL_STRUCT_FUNCTION (old_decl
), cfun
, &id
);
6057 if (DECL_RESULT (old_decl
) == NULL_TREE
)
6059 else if (skip_return
&& !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl
))))
6061 DECL_RESULT (new_decl
)
6062 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl
)),
6063 RESULT_DECL
, NULL_TREE
, void_type_node
);
6064 DECL_CONTEXT (DECL_RESULT (new_decl
)) = new_decl
;
6065 cfun
->returns_struct
= 0;
6066 cfun
->returns_pcc_struct
= 0;
6071 DECL_RESULT (new_decl
) = remap_decl (DECL_RESULT (old_decl
), &id
);
6072 lang_hooks
.dup_lang_specific_decl (DECL_RESULT (new_decl
));
6073 if (gimple_in_ssa_p (id
.src_cfun
)
6074 && DECL_BY_REFERENCE (DECL_RESULT (old_decl
))
6075 && (old_name
= ssa_default_def (id
.src_cfun
, DECL_RESULT (old_decl
))))
6077 tree new_name
= make_ssa_name (DECL_RESULT (new_decl
));
6078 insert_decl_map (&id
, old_name
, new_name
);
6079 SSA_NAME_DEF_STMT (new_name
) = gimple_build_nop ();
6080 set_ssa_default_def (cfun
, DECL_RESULT (new_decl
), new_name
);
6084 /* Set up the destination functions loop tree. */
6085 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl
)) != NULL
)
6087 cfun
->curr_properties
&= ~PROP_loops
;
6088 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
6089 cfun
->curr_properties
|= PROP_loops
;
6092 /* Copy the Function's body. */
6093 copy_body (&id
, old_entry_block
->count
, REG_BR_PROB_BASE
,
6094 ENTRY_BLOCK_PTR_FOR_FN (cfun
), EXIT_BLOCK_PTR_FOR_FN (cfun
),
6097 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6098 number_blocks (new_decl
);
6100 /* We want to create the BB unconditionally, so that the addition of
6101 debug stmts doesn't affect BB count, which may in the end cause
6102 codegen differences. */
6103 bb
= split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
6104 while (init_stmts
.length ())
6105 insert_init_stmt (&id
, bb
, init_stmts
.pop ());
6106 update_clone_info (&id
);
6108 /* Remap the nonlocal_goto_save_area, if any. */
6109 if (cfun
->nonlocal_goto_save_area
)
6111 struct walk_stmt_info wi
;
6113 memset (&wi
, 0, sizeof (wi
));
6115 walk_tree (&cfun
->nonlocal_goto_save_area
, remap_gimple_op_r
, &wi
, NULL
);
6121 delete id
.debug_map
;
6122 free_dominance_info (CDI_DOMINATORS
);
6123 free_dominance_info (CDI_POST_DOMINATORS
);
6125 fold_marked_statements (0, id
.statements_to_fold
);
6126 delete id
.statements_to_fold
;
6127 delete_unreachable_blocks_update_callgraph (&id
);
6128 if (id
.dst_node
->definition
)
6129 cgraph_edge::rebuild_references ();
6130 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP
))
6132 calculate_dominance_info (CDI_DOMINATORS
);
6133 fix_loop_structure (NULL
);
6135 update_ssa (TODO_update_ssa
);
6137 /* After partial cloning we need to rescale frequencies, so they are
6138 within proper range in the cloned function. */
6141 struct cgraph_edge
*e
;
6142 rebuild_frequencies ();
6144 new_version_node
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
6145 for (e
= new_version_node
->callees
; e
; e
= e
->next_callee
)
6147 basic_block bb
= gimple_bb (e
->call_stmt
);
6148 e
->frequency
= compute_call_stmt_bb_frequency (current_function_decl
,
6150 e
->count
= bb
->count
;
6152 for (e
= new_version_node
->indirect_calls
; e
; e
= e
->next_callee
)
6154 basic_block bb
= gimple_bb (e
->call_stmt
);
6155 e
->frequency
= compute_call_stmt_bb_frequency (current_function_decl
,
6157 e
->count
= bb
->count
;
6161 if (debug_args_to_skip
&& MAY_HAVE_DEBUG_STMTS
)
6164 vec
<tree
, va_gc
> **debug_args
= NULL
;
6165 unsigned int len
= 0;
6166 for (parm
= DECL_ARGUMENTS (old_decl
), i
= 0;
6167 parm
; parm
= DECL_CHAIN (parm
), i
++)
6168 if (bitmap_bit_p (debug_args_to_skip
, i
) && is_gimple_reg (parm
))
6172 if (debug_args
== NULL
)
6174 debug_args
= decl_debug_args_insert (new_decl
);
6175 len
= vec_safe_length (*debug_args
);
6177 ddecl
= make_node (DEBUG_EXPR_DECL
);
6178 DECL_ARTIFICIAL (ddecl
) = 1;
6179 TREE_TYPE (ddecl
) = TREE_TYPE (parm
);
6180 SET_DECL_MODE (ddecl
, DECL_MODE (parm
));
6181 vec_safe_push (*debug_args
, DECL_ORIGIN (parm
));
6182 vec_safe_push (*debug_args
, ddecl
);
6184 if (debug_args
!= NULL
)
6186 /* On the callee side, add
6189 stmts to the first bb where var is a VAR_DECL created for the
6190 optimized away parameter in DECL_INITIAL block. This hints
6191 in the debug info that var (whole DECL_ORIGIN is the parm
6192 PARM_DECL) is optimized away, but could be looked up at the
6193 call site as value of D#X there. */
6194 tree var
= vars
, vexpr
;
6195 gimple_stmt_iterator cgsi
6196 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
6199 i
= vec_safe_length (*debug_args
);
6203 while (var
!= NULL_TREE
6204 && DECL_ABSTRACT_ORIGIN (var
) != (**debug_args
)[i
])
6205 var
= TREE_CHAIN (var
);
6206 if (var
== NULL_TREE
)
6208 vexpr
= make_node (DEBUG_EXPR_DECL
);
6209 parm
= (**debug_args
)[i
];
6210 DECL_ARTIFICIAL (vexpr
) = 1;
6211 TREE_TYPE (vexpr
) = TREE_TYPE (parm
);
6212 SET_DECL_MODE (vexpr
, DECL_MODE (parm
));
6213 def_temp
= gimple_build_debug_bind (var
, vexpr
, NULL
);
6214 gsi_insert_before (&cgsi
, def_temp
, GSI_NEW_STMT
);
6215 def_temp
= gimple_build_debug_source_bind (vexpr
, parm
, NULL
);
6216 gsi_insert_before (&cgsi
, def_temp
, GSI_NEW_STMT
);
6222 if (debug_args_to_skip
&& debug_args_to_skip
!= args_to_skip
)
6223 BITMAP_FREE (debug_args_to_skip
);
6224 free_dominance_info (CDI_DOMINATORS
);
6225 free_dominance_info (CDI_POST_DOMINATORS
);
6227 gcc_assert (!id
.debug_stmts
.exists ());
6232 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6233 the callee and return the inlined body on success. */
6236 maybe_inline_call_in_expr (tree exp
)
6238 tree fn
= get_callee_fndecl (exp
);
6240 /* We can only try to inline "const" functions. */
6241 if (fn
&& TREE_READONLY (fn
) && DECL_SAVED_TREE (fn
))
6243 call_expr_arg_iterator iter
;
6246 hash_map
<tree
, tree
> decl_map
;
6248 /* Remap the parameters. */
6249 for (param
= DECL_ARGUMENTS (fn
), arg
= first_call_expr_arg (exp
, &iter
);
6251 param
= DECL_CHAIN (param
), arg
= next_call_expr_arg (&iter
))
6252 decl_map
.put (param
, arg
);
6254 memset (&id
, 0, sizeof (id
));
6256 id
.dst_fn
= current_function_decl
;
6257 id
.src_cfun
= DECL_STRUCT_FUNCTION (fn
);
6258 id
.decl_map
= &decl_map
;
6260 id
.copy_decl
= copy_decl_no_change
;
6261 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
6262 id
.transform_new_cfg
= false;
6263 id
.transform_return_to_modify
= true;
6264 id
.transform_parameter
= true;
6265 id
.transform_lang_insert_block
= NULL
;
6267 /* Make sure not to unshare trees behind the front-end's back
6268 since front-end specific mechanisms may rely on sharing. */
6269 id
.regimplify
= false;
6270 id
.do_not_unshare
= true;
6272 /* We're not inside any EH region. */
6275 t
= copy_tree_body (&id
);
6277 /* We can only return something suitable for use in a GENERIC
6279 if (TREE_CODE (t
) == MODIFY_EXPR
)
6280 return TREE_OPERAND (t
, 1);
6286 /* Duplicate a type, fields and all. */
6289 build_duplicate_type (tree type
)
6291 struct copy_body_data id
;
6293 memset (&id
, 0, sizeof (id
));
6294 id
.src_fn
= current_function_decl
;
6295 id
.dst_fn
= current_function_decl
;
6297 id
.decl_map
= new hash_map
<tree
, tree
>;
6298 id
.debug_map
= NULL
;
6299 id
.copy_decl
= copy_decl_no_change
;
6301 type
= remap_type_1 (type
, &id
);
6305 delete id
.debug_map
;
6307 TYPE_CANONICAL (type
) = type
;
6312 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6313 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6317 copy_fn (tree fn
, tree
& parms
, tree
& result
)
6321 hash_map
<tree
, tree
> decl_map
;
6326 memset (&id
, 0, sizeof (id
));
6328 id
.dst_fn
= current_function_decl
;
6329 id
.src_cfun
= DECL_STRUCT_FUNCTION (fn
);
6330 id
.decl_map
= &decl_map
;
6332 id
.copy_decl
= copy_decl_no_change
;
6333 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
6334 id
.transform_new_cfg
= false;
6335 id
.transform_return_to_modify
= false;
6336 id
.transform_parameter
= true;
6337 id
.transform_lang_insert_block
= NULL
;
6339 /* Make sure not to unshare trees behind the front-end's back
6340 since front-end specific mechanisms may rely on sharing. */
6341 id
.regimplify
= false;
6342 id
.do_not_unshare
= true;
6344 /* We're not inside any EH region. */
6347 /* Remap the parameters and result and return them to the caller. */
6348 for (param
= DECL_ARGUMENTS (fn
);
6350 param
= DECL_CHAIN (param
))
6352 *p
= remap_decl (param
, &id
);
6353 p
= &DECL_CHAIN (*p
);
6356 if (DECL_RESULT (fn
))
6357 result
= remap_decl (DECL_RESULT (fn
), &id
);
6361 return copy_tree_body (&id
);