2 Copyright (C) 2001-2023 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "tree-pass.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
42 #include "tree-iterator.h"
44 #include "gimple-iterator.h"
45 #include "gimple-fold.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
51 #include "tree-into-ssa.h"
56 #include "value-prof.h"
59 #include "stringpool.h"
62 #include "tree-cfgcleanup.h"
63 #include "tree-ssa-live.h"
64 #include "alloc-pool.h"
65 #include "symbol-summary.h"
66 #include "symtab-thunks.h"
67 #include "symtab-clones.h"
69 /* I'm not real happy about this, but we need to handle gimple and
72 /* Inlining, Cloning, Versioning, Parallelization
74 Inlining: a function body is duplicated, but the PARM_DECLs are
75 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
76 MODIFY_EXPRs that store to a dedicated returned-value variable.
77 The duplicated eh_region info of the copy will later be appended
78 to the info for the caller; the eh_region info in copied throwing
79 statements and RESX statements are adjusted accordingly.
81 Cloning: (only in C++) We have one body for a con/de/structor, and
82 multiple function decls, each with a unique parameter list.
83 Duplicate the body, using the given splay tree; some parameters
84 will become constants (like 0 or 1).
86 Versioning: a function body is duplicated and the result is a new
87 function rather than into blocks of an existing function as with
88 inlining. Some parameters will become constants.
90 Parallelization: a region of a function is duplicated resulting in
91 a new function. Variables may be replaced with complex expressions
92 to enable shared variable semantics.
94 All of these will simultaneously lookup any callgraph edges. If
95 we're going to inline the duplicated function body, and the given
96 function has some cloned callgraph nodes (one for each place this
97 function will be inlined) those callgraph edges will be duplicated.
98 If we're cloning the body, those callgraph edges will be
99 updated to point into the new body. (Note that the original
100 callgraph node and edge list will not be altered.)
102 See the CALL_EXPR handling case in copy_tree_body_r (). */
106 o In order to make inlining-on-trees work, we pessimized
107 function-local static constants. In particular, they are now
108 always output, even when not addressed. Fix this by treating
109 function-local static constants just like global static
110 constants; the back-end already knows not to output them if they
113 o Provide heuristics to clamp inlining of recursive template
117 /* Weights that estimate_num_insns uses to estimate the size of the
120 eni_weights eni_size_weights
;
122 /* Weights that estimate_num_insns uses to estimate the time necessary
123 to execute the produced code. */
125 eni_weights eni_time_weights
;
129 static tree
declare_return_variable (copy_body_data
*, tree
, tree
,
131 static void remap_block (tree
*, copy_body_data
*);
132 static void copy_bind_expr (tree
*, int *, copy_body_data
*);
133 static void declare_inline_vars (tree
, tree
);
134 static void remap_save_expr (tree
*, hash_map
<tree
, tree
> *, int *);
135 static void prepend_lexical_block (tree current_block
, tree new_block
);
136 static tree
copy_result_decl_to_var (tree
, copy_body_data
*);
137 static tree
copy_decl_maybe_to_var (tree
, copy_body_data
*);
138 static gimple_seq
remap_gimple_stmt (gimple
*, copy_body_data
*);
139 static void insert_init_stmt (copy_body_data
*, basic_block
, gimple
*);
141 /* Insert a tree->tree mapping for ID. Despite the name suggests
142 that the trees should be variables, it is used for more than that. */
145 insert_decl_map (copy_body_data
*id
, tree key
, tree value
)
147 id
->decl_map
->put (key
, value
);
149 /* Always insert an identity map as well. If we see this same new
150 node again, we won't want to duplicate it a second time. */
151 if (key
!= value
&& value
)
152 id
->decl_map
->put (value
, value
);
155 /* If nonzero, we're remapping the contents of inlined debug
156 statements. If negative, an error has occurred, such as a
157 reference to a variable that isn't available in the inlined
159 static int processing_debug_stmt
= 0;
161 /* Construct new SSA name for old NAME. ID is the inline context. */
164 remap_ssa_name (tree name
, copy_body_data
*id
)
169 gcc_assert (TREE_CODE (name
) == SSA_NAME
);
171 n
= id
->decl_map
->get (name
);
174 /* When we perform edge redirection as part of CFG copy, IPA-SRA can
175 remove an unused LHS from a call statement. Such LHS can however
176 still appear in debug statements, but their value is lost in this
177 function and we do not want to map them. */
178 if (id
->killed_new_ssa_names
179 && id
->killed_new_ssa_names
->contains (*n
))
181 gcc_assert (processing_debug_stmt
);
182 processing_debug_stmt
= -1;
186 return unshare_expr (*n
);
189 if (processing_debug_stmt
)
191 if (SSA_NAME_IS_DEFAULT_DEF (name
)
192 && TREE_CODE (SSA_NAME_VAR (name
)) == PARM_DECL
193 && id
->entry_bb
== NULL
194 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun
)))
197 gimple_stmt_iterator gsi
;
198 tree val
= SSA_NAME_VAR (name
);
200 n
= id
->decl_map
->get (val
);
203 if (TREE_CODE (val
) != PARM_DECL
204 && !(VAR_P (val
) && DECL_ABSTRACT_ORIGIN (val
)))
206 processing_debug_stmt
= -1;
209 n
= id
->decl_map
->get (val
);
210 if (n
&& TREE_CODE (*n
) == DEBUG_EXPR_DECL
)
212 tree vexpr
= build_debug_expr_decl (TREE_TYPE (name
));
213 /* FIXME: Is setting the mode really necessary? */
214 SET_DECL_MODE (vexpr
, DECL_MODE (SSA_NAME_VAR (name
)));
215 def_temp
= gimple_build_debug_source_bind (vexpr
, val
, NULL
);
216 gsi
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
217 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
218 insert_decl_map (id
, val
, vexpr
);
222 processing_debug_stmt
= -1;
226 /* Remap anonymous SSA names or SSA names of anonymous decls. */
227 var
= SSA_NAME_VAR (name
);
229 || (!SSA_NAME_IS_DEFAULT_DEF (name
)
231 && !VAR_DECL_IS_VIRTUAL_OPERAND (var
)
232 && DECL_ARTIFICIAL (var
)
233 && DECL_IGNORED_P (var
)
234 && !DECL_NAME (var
)))
236 struct ptr_info_def
*pi
;
237 new_tree
= make_ssa_name (remap_type (TREE_TYPE (name
), id
));
238 if (!var
&& SSA_NAME_IDENTIFIER (name
))
239 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree
, SSA_NAME_IDENTIFIER (name
));
240 insert_decl_map (id
, name
, new_tree
);
241 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree
)
242 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
);
243 /* At least IPA points-to info can be directly transferred. */
244 if (id
->src_cfun
->gimple_df
245 && id
->src_cfun
->gimple_df
->ipa_pta
246 && POINTER_TYPE_P (TREE_TYPE (name
))
247 && (pi
= SSA_NAME_PTR_INFO (name
))
250 struct ptr_info_def
*new_pi
= get_ptr_info (new_tree
);
253 /* So can range-info. */
254 if (!POINTER_TYPE_P (TREE_TYPE (name
))
255 && SSA_NAME_RANGE_INFO (name
))
256 duplicate_ssa_name_range_info (new_tree
, name
);
260 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
262 new_tree
= remap_decl (var
, id
);
264 /* We might've substituted constant or another SSA_NAME for
267 Replace the SSA name representing RESULT_DECL by variable during
268 inlining: this saves us from need to introduce PHI node in a case
269 return value is just partly initialized. */
270 if ((VAR_P (new_tree
) || TREE_CODE (new_tree
) == PARM_DECL
)
271 && (!SSA_NAME_VAR (name
)
272 || TREE_CODE (SSA_NAME_VAR (name
)) != RESULT_DECL
273 || !id
->transform_return_to_modify
))
275 struct ptr_info_def
*pi
;
276 new_tree
= make_ssa_name (new_tree
);
277 insert_decl_map (id
, name
, new_tree
);
278 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree
)
279 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
);
280 /* At least IPA points-to info can be directly transferred. */
281 if (id
->src_cfun
->gimple_df
282 && id
->src_cfun
->gimple_df
->ipa_pta
283 && POINTER_TYPE_P (TREE_TYPE (name
))
284 && (pi
= SSA_NAME_PTR_INFO (name
))
287 struct ptr_info_def
*new_pi
= get_ptr_info (new_tree
);
290 /* So can range-info. */
291 if (!POINTER_TYPE_P (TREE_TYPE (name
))
292 && SSA_NAME_RANGE_INFO (name
))
293 duplicate_ssa_name_range_info (new_tree
, name
);
294 if (SSA_NAME_IS_DEFAULT_DEF (name
))
296 /* By inlining function having uninitialized variable, we might
297 extend the lifetime (variable might get reused). This cause
298 ICE in the case we end up extending lifetime of SSA name across
299 abnormal edge, but also increase register pressure.
301 We simply initialize all uninitialized vars by 0 except
302 for case we are inlining to very first BB. We can avoid
303 this for all BBs that are not inside strongly connected
304 regions of the CFG, but this is expensive to test. */
306 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
)
307 && (!SSA_NAME_VAR (name
)
308 || TREE_CODE (SSA_NAME_VAR (name
)) != PARM_DECL
)
309 && (id
->entry_bb
!= EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
311 || EDGE_COUNT (id
->entry_bb
->preds
) != 1))
313 gimple_stmt_iterator gsi
= gsi_last_bb (id
->entry_bb
);
315 tree zero
= build_zero_cst (TREE_TYPE (new_tree
));
317 init_stmt
= gimple_build_assign (new_tree
, zero
);
318 gsi_insert_after (&gsi
, init_stmt
, GSI_NEW_STMT
);
319 SSA_NAME_IS_DEFAULT_DEF (new_tree
) = 0;
323 SSA_NAME_DEF_STMT (new_tree
) = gimple_build_nop ();
324 set_ssa_default_def (cfun
, SSA_NAME_VAR (new_tree
), new_tree
);
329 insert_decl_map (id
, name
, new_tree
);
333 /* Remap DECL during the copying of the BLOCK tree for the function. */
336 remap_decl (tree decl
, copy_body_data
*id
)
340 /* We only remap local variables in the current function. */
342 /* See if we have remapped this declaration. */
344 n
= id
->decl_map
->get (decl
);
346 if (!n
&& processing_debug_stmt
)
348 processing_debug_stmt
= -1;
352 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
353 necessary DECLs have already been remapped and we do not want to duplicate
354 a decl coming from outside of the sequence we are copying. */
356 && id
->prevent_decl_creation_for_types
357 && id
->remapping_type_depth
> 0
358 && (VAR_P (decl
) || TREE_CODE (decl
) == PARM_DECL
))
361 /* If we didn't already have an equivalent for this declaration, create one
365 /* Make a copy of the variable or label. */
366 tree t
= id
->copy_decl (decl
, id
);
368 /* Remember it, so that if we encounter this local entity again
369 we can reuse this copy. Do this early because remap_type may
370 need this decl for TYPE_STUB_DECL. */
371 insert_decl_map (id
, decl
, t
);
376 /* Remap types, if necessary. */
377 TREE_TYPE (t
) = remap_type (TREE_TYPE (t
), id
);
378 if (TREE_CODE (t
) == TYPE_DECL
)
380 DECL_ORIGINAL_TYPE (t
) = remap_type (DECL_ORIGINAL_TYPE (t
), id
);
382 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
383 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
384 is not set on the TYPE_DECL, for example in LTO mode. */
385 if (DECL_ORIGINAL_TYPE (t
) == TREE_TYPE (t
))
387 tree x
= build_variant_type_copy (TREE_TYPE (t
));
388 TYPE_STUB_DECL (x
) = TYPE_STUB_DECL (TREE_TYPE (t
));
389 TYPE_NAME (x
) = TYPE_NAME (TREE_TYPE (t
));
390 DECL_ORIGINAL_TYPE (t
) = x
;
394 /* Remap sizes as necessary. */
395 walk_tree (&DECL_SIZE (t
), copy_tree_body_r
, id
, NULL
);
396 walk_tree (&DECL_SIZE_UNIT (t
), copy_tree_body_r
, id
, NULL
);
398 /* If fields, do likewise for offset and qualifier. */
399 if (TREE_CODE (t
) == FIELD_DECL
)
401 walk_tree (&DECL_FIELD_OFFSET (t
), copy_tree_body_r
, id
, NULL
);
402 if (TREE_CODE (DECL_CONTEXT (t
)) == QUAL_UNION_TYPE
)
403 walk_tree (&DECL_QUALIFIER (t
), copy_tree_body_r
, id
, NULL
);
409 if (id
->do_not_unshare
)
412 return unshare_expr (*n
);
416 remap_type_1 (tree type
, copy_body_data
*id
)
420 /* We do need a copy. build and register it now. If this is a pointer or
421 reference type, remap the designated type and make a new pointer or
423 if (TREE_CODE (type
) == POINTER_TYPE
)
425 new_tree
= build_pointer_type_for_mode (remap_type (TREE_TYPE (type
), id
),
427 TYPE_REF_CAN_ALIAS_ALL (type
));
428 if (TYPE_ATTRIBUTES (type
) || TYPE_QUALS (type
))
429 new_tree
= build_type_attribute_qual_variant (new_tree
,
430 TYPE_ATTRIBUTES (type
),
432 insert_decl_map (id
, type
, new_tree
);
435 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
437 new_tree
= build_reference_type_for_mode (remap_type (TREE_TYPE (type
), id
),
439 TYPE_REF_CAN_ALIAS_ALL (type
));
440 if (TYPE_ATTRIBUTES (type
) || TYPE_QUALS (type
))
441 new_tree
= build_type_attribute_qual_variant (new_tree
,
442 TYPE_ATTRIBUTES (type
),
444 insert_decl_map (id
, type
, new_tree
);
448 new_tree
= copy_node (type
);
450 insert_decl_map (id
, type
, new_tree
);
452 /* This is a new type, not a copy of an old type. Need to reassociate
453 variants. We can handle everything except the main variant lazily. */
454 t
= TYPE_MAIN_VARIANT (type
);
457 t
= remap_type (t
, id
);
458 TYPE_MAIN_VARIANT (new_tree
) = t
;
459 TYPE_NEXT_VARIANT (new_tree
) = TYPE_NEXT_VARIANT (t
);
460 TYPE_NEXT_VARIANT (t
) = new_tree
;
464 TYPE_MAIN_VARIANT (new_tree
) = new_tree
;
465 TYPE_NEXT_VARIANT (new_tree
) = NULL
;
468 if (TYPE_STUB_DECL (type
))
469 TYPE_STUB_DECL (new_tree
) = remap_decl (TYPE_STUB_DECL (type
), id
);
471 /* Lazily create pointer and reference types. */
472 TYPE_POINTER_TO (new_tree
) = NULL
;
473 TYPE_REFERENCE_TO (new_tree
) = NULL
;
475 /* Copy all types that may contain references to local variables; be sure to
476 preserve sharing in between type and its main variant when possible. */
477 switch (TREE_CODE (new_tree
))
481 case FIXED_POINT_TYPE
:
484 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
)
486 gcc_checking_assert (TYPE_MIN_VALUE (type
) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type
)));
487 gcc_checking_assert (TYPE_MAX_VALUE (type
) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type
)));
489 TYPE_MIN_VALUE (new_tree
) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree
));
490 TYPE_MAX_VALUE (new_tree
) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree
));
494 t
= TYPE_MIN_VALUE (new_tree
);
495 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
496 walk_tree (&TYPE_MIN_VALUE (new_tree
), copy_tree_body_r
, id
, NULL
);
498 t
= TYPE_MAX_VALUE (new_tree
);
499 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
500 walk_tree (&TYPE_MAX_VALUE (new_tree
), copy_tree_body_r
, id
, NULL
);
505 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
506 && TREE_TYPE (type
) == TREE_TYPE (TYPE_MAIN_VARIANT (type
)))
507 TREE_TYPE (new_tree
) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree
));
509 TREE_TYPE (new_tree
) = remap_type (TREE_TYPE (new_tree
), id
);
510 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
511 && TYPE_ARG_TYPES (type
) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type
)))
512 TYPE_ARG_TYPES (new_tree
) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree
));
514 walk_tree (&TYPE_ARG_TYPES (new_tree
), copy_tree_body_r
, id
, NULL
);
518 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
519 && TREE_TYPE (type
) == TREE_TYPE (TYPE_MAIN_VARIANT (type
)))
520 TREE_TYPE (new_tree
) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree
));
522 TREE_TYPE (new_tree
) = remap_type (TREE_TYPE (new_tree
), id
);
524 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
)
526 gcc_checking_assert (TYPE_DOMAIN (type
)
527 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type
)));
528 TYPE_DOMAIN (new_tree
) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree
));
532 TYPE_DOMAIN (new_tree
) = remap_type (TYPE_DOMAIN (new_tree
), id
);
533 /* For array bounds where we have decided not to copy over the bounds
534 variable which isn't used in OpenMP/OpenACC region, change them to
535 an uninitialized VAR_DECL temporary. */
536 if (id
->adjust_array_error_bounds
537 && TYPE_DOMAIN (new_tree
)
538 && TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree
)) == error_mark_node
539 && TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) != error_mark_node
)
541 tree v
= create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree
)));
543 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE
,
544 DECL_ATTRIBUTES (v
));
545 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree
)) = v
;
552 case QUAL_UNION_TYPE
:
553 if (TYPE_MAIN_VARIANT (type
) != type
554 && TYPE_FIELDS (type
) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type
)))
555 TYPE_FIELDS (new_tree
) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree
));
560 for (f
= TYPE_FIELDS (new_tree
); f
; f
= DECL_CHAIN (f
))
562 t
= remap_decl (f
, id
);
563 DECL_CONTEXT (t
) = new_tree
;
567 TYPE_FIELDS (new_tree
) = nreverse (nf
);
573 /* Shouldn't have been thought variable sized. */
577 /* All variants of type share the same size, so use the already remaped data. */
578 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
)
580 tree s
= TYPE_SIZE (type
);
581 tree mvs
= TYPE_SIZE (TYPE_MAIN_VARIANT (type
));
582 tree su
= TYPE_SIZE_UNIT (type
);
583 tree mvsu
= TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type
));
584 gcc_checking_assert ((TREE_CODE (s
) == PLACEHOLDER_EXPR
585 && (TREE_CODE (mvs
) == PLACEHOLDER_EXPR
))
587 gcc_checking_assert ((TREE_CODE (su
) == PLACEHOLDER_EXPR
588 && (TREE_CODE (mvsu
) == PLACEHOLDER_EXPR
))
590 TYPE_SIZE (new_tree
) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree
));
591 TYPE_SIZE_UNIT (new_tree
) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree
));
595 walk_tree (&TYPE_SIZE (new_tree
), copy_tree_body_r
, id
, NULL
);
596 walk_tree (&TYPE_SIZE_UNIT (new_tree
), copy_tree_body_r
, id
, NULL
);
602 /* Helper function for remap_type_2, called through walk_tree. */
605 remap_type_3 (tree
*tp
, int *walk_subtrees
, void *data
)
607 copy_body_data
*id
= (copy_body_data
*) data
;
612 else if (DECL_P (*tp
) && remap_decl (*tp
, id
) != *tp
)
618 /* Return true if TYPE needs to be remapped because remap_decl on any
619 needed embedded decl returns something other than that decl. */
622 remap_type_2 (tree type
, copy_body_data
*id
)
626 #define RETURN_TRUE_IF_VAR(T) \
632 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
634 if (!TYPE_SIZES_GIMPLIFIED (type) \
635 && walk_tree (&_t, remap_type_3, id, NULL)) \
641 switch (TREE_CODE (type
))
647 return remap_type_2 (TREE_TYPE (type
), id
);
651 case FIXED_POINT_TYPE
:
654 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type
));
655 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type
));
659 if (remap_type_2 (TREE_TYPE (type
), id
)
660 || (TYPE_DOMAIN (type
) && remap_type_2 (TYPE_DOMAIN (type
), id
)))
666 case QUAL_UNION_TYPE
:
667 for (t
= TYPE_FIELDS (type
); t
; t
= DECL_CHAIN (t
))
668 if (TREE_CODE (t
) == FIELD_DECL
)
670 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t
));
671 RETURN_TRUE_IF_VAR (DECL_SIZE (t
));
672 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t
));
673 if (TREE_CODE (type
) == QUAL_UNION_TYPE
)
674 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t
));
682 RETURN_TRUE_IF_VAR (TYPE_SIZE (type
));
683 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type
));
685 #undef RETURN_TRUE_IF_VAR
689 remap_type (tree type
, copy_body_data
*id
)
697 /* See if we have remapped this type. */
698 node
= id
->decl_map
->get (type
);
702 /* The type only needs remapping if it's variably modified. */
703 if (! variably_modified_type_p (type
, id
->src_fn
)
704 /* Don't remap if copy_decl method doesn't always return a new
705 decl and for all embedded decls returns the passed in decl. */
706 || (id
->dont_remap_vla_if_no_change
&& !remap_type_2 (type
, id
)))
708 insert_decl_map (id
, type
, type
);
712 id
->remapping_type_depth
++;
713 tmp
= remap_type_1 (type
, id
);
714 id
->remapping_type_depth
--;
719 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
722 can_be_nonlocal (tree decl
, copy_body_data
*id
)
724 /* We cannot duplicate function decls. */
725 if (TREE_CODE (decl
) == FUNCTION_DECL
)
728 /* Local static vars must be non-local or we get multiple declaration
730 if (VAR_P (decl
) && !auto_var_in_fn_p (decl
, id
->src_fn
))
737 remap_decls (tree decls
, vec
<tree
, va_gc
> **nonlocalized_list
,
741 tree new_decls
= NULL_TREE
;
743 /* Remap its variables. */
744 for (old_var
= decls
; old_var
; old_var
= DECL_CHAIN (old_var
))
748 if (can_be_nonlocal (old_var
, id
))
750 /* We need to add this variable to the local decls as otherwise
751 nothing else will do so. */
752 if (VAR_P (old_var
) && ! DECL_EXTERNAL (old_var
) && cfun
)
753 add_local_decl (cfun
, old_var
);
754 if ((!optimize
|| debug_info_level
> DINFO_LEVEL_TERSE
)
755 && !DECL_IGNORED_P (old_var
)
756 && nonlocalized_list
)
757 vec_safe_push (*nonlocalized_list
, old_var
);
761 /* Remap the variable. */
762 new_var
= remap_decl (old_var
, id
);
764 /* If we didn't remap this variable, we can't mess with its
765 TREE_CHAIN. If we remapped this variable to the return slot, it's
766 already declared somewhere else, so don't declare it here. */
768 if (new_var
== id
->retvar
)
772 if ((!optimize
|| debug_info_level
> DINFO_LEVEL_TERSE
)
773 && !DECL_IGNORED_P (old_var
)
774 && nonlocalized_list
)
775 vec_safe_push (*nonlocalized_list
, old_var
);
779 gcc_assert (DECL_P (new_var
));
780 DECL_CHAIN (new_var
) = new_decls
;
783 /* Also copy value-expressions. */
784 if (VAR_P (new_var
) && DECL_HAS_VALUE_EXPR_P (new_var
))
786 tree tem
= DECL_VALUE_EXPR (new_var
);
787 bool old_regimplify
= id
->regimplify
;
788 id
->remapping_type_depth
++;
789 walk_tree (&tem
, copy_tree_body_r
, id
, NULL
);
790 id
->remapping_type_depth
--;
791 id
->regimplify
= old_regimplify
;
792 SET_DECL_VALUE_EXPR (new_var
, tem
);
797 return nreverse (new_decls
);
800 /* Copy the BLOCK to contain remapped versions of the variables
801 therein. And hook the new block into the block-tree. */
804 remap_block (tree
*block
, copy_body_data
*id
)
809 /* Make the new block. */
811 new_block
= make_node (BLOCK
);
812 TREE_USED (new_block
) = TREE_USED (old_block
);
813 BLOCK_ABSTRACT_ORIGIN (new_block
) = BLOCK_ORIGIN (old_block
);
814 BLOCK_SOURCE_LOCATION (new_block
) = BLOCK_SOURCE_LOCATION (old_block
);
815 BLOCK_NONLOCALIZED_VARS (new_block
)
816 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block
));
819 /* Remap its variables. */
820 BLOCK_VARS (new_block
) = remap_decls (BLOCK_VARS (old_block
),
821 &BLOCK_NONLOCALIZED_VARS (new_block
),
824 /* Remember the remapped block. */
825 insert_decl_map (id
, old_block
, new_block
);
828 /* Copy the whole block tree and root it in id->block. */
831 remap_blocks (tree block
, copy_body_data
*id
)
834 tree new_tree
= block
;
839 remap_block (&new_tree
, id
);
840 gcc_assert (new_tree
!= block
);
841 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
842 prepend_lexical_block (new_tree
, remap_blocks (t
, id
));
843 /* Blocks are in arbitrary order, but make things slightly prettier and do
844 not swap order when producing a copy. */
845 BLOCK_SUBBLOCKS (new_tree
) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree
));
849 /* Remap the block tree rooted at BLOCK to nothing. */
852 remap_blocks_to_null (tree block
, copy_body_data
*id
)
855 insert_decl_map (id
, block
, NULL_TREE
);
856 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
857 remap_blocks_to_null (t
, id
);
860 /* Remap the location info pointed to by LOCUS. */
863 remap_location (location_t locus
, copy_body_data
*id
)
865 if (LOCATION_BLOCK (locus
))
867 tree
*n
= id
->decl_map
->get (LOCATION_BLOCK (locus
));
870 return set_block (locus
, *n
);
873 locus
= LOCATION_LOCUS (locus
);
875 if (locus
!= UNKNOWN_LOCATION
&& id
->block
)
876 return set_block (locus
, id
->block
);
882 copy_statement_list (tree
*tp
)
884 tree_stmt_iterator oi
, ni
;
887 new_tree
= alloc_stmt_list ();
888 ni
= tsi_start (new_tree
);
889 oi
= tsi_start (*tp
);
890 TREE_TYPE (new_tree
) = TREE_TYPE (*tp
);
893 for (; !tsi_end_p (oi
); tsi_next (&oi
))
895 tree stmt
= tsi_stmt (oi
);
896 if (TREE_CODE (stmt
) == STATEMENT_LIST
)
897 /* This copy is not redundant; tsi_link_after will smash this
898 STATEMENT_LIST into the end of the one we're building, and we
899 don't want to do that with the original. */
900 copy_statement_list (&stmt
);
901 tsi_link_after (&ni
, stmt
, TSI_CONTINUE_LINKING
);
906 copy_bind_expr (tree
*tp
, int *walk_subtrees
, copy_body_data
*id
)
908 tree block
= BIND_EXPR_BLOCK (*tp
);
909 /* Copy (and replace) the statement. */
910 copy_tree_r (tp
, walk_subtrees
, NULL
);
913 remap_block (&block
, id
);
914 BIND_EXPR_BLOCK (*tp
) = block
;
917 if (BIND_EXPR_VARS (*tp
))
918 /* This will remap a lot of the same decls again, but this should be
920 BIND_EXPR_VARS (*tp
) = remap_decls (BIND_EXPR_VARS (*tp
), NULL
, id
);
924 /* Create a new gimple_seq by remapping all the statements in BODY
925 using the inlining information in ID. */
928 remap_gimple_seq (gimple_seq body
, copy_body_data
*id
)
930 gimple_stmt_iterator si
;
931 gimple_seq new_body
= NULL
;
933 for (si
= gsi_start (body
); !gsi_end_p (si
); gsi_next (&si
))
935 gimple_seq new_stmts
= remap_gimple_stmt (gsi_stmt (si
), id
);
936 gimple_seq_add_seq (&new_body
, new_stmts
);
943 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
944 block using the mapping information in ID. */
947 copy_gimple_bind (gbind
*stmt
, copy_body_data
*id
)
950 tree new_block
, new_vars
;
951 gimple_seq body
, new_body
;
953 /* Copy the statement. Note that we purposely don't use copy_stmt
954 here because we need to remap statements as we copy. */
955 body
= gimple_bind_body (stmt
);
956 new_body
= remap_gimple_seq (body
, id
);
958 new_block
= gimple_bind_block (stmt
);
960 remap_block (&new_block
, id
);
962 /* This will remap a lot of the same decls again, but this should be
964 new_vars
= gimple_bind_vars (stmt
);
966 new_vars
= remap_decls (new_vars
, NULL
, id
);
968 new_bind
= gimple_build_bind (new_vars
, new_body
, new_block
);
973 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
978 if (TREE_CODE (decl
) == SSA_NAME
)
980 decl
= SSA_NAME_VAR (decl
);
985 return (TREE_CODE (decl
) == PARM_DECL
);
988 /* Remap the dependence CLIQUE from the source to the destination function
989 as specified in ID. */
991 static unsigned short
992 remap_dependence_clique (copy_body_data
*id
, unsigned short clique
)
994 if (clique
== 0 || processing_debug_stmt
)
996 if (!id
->dependence_map
)
997 id
->dependence_map
= new hash_map
<dependence_hash
, unsigned short>;
999 unsigned short &newc
= id
->dependence_map
->get_or_insert (clique
, &existed
);
1002 /* Clique 1 is reserved for local ones set by PTA. */
1003 if (cfun
->last_clique
== 0)
1004 cfun
->last_clique
= 1;
1005 newc
= ++cfun
->last_clique
;
1010 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1011 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1012 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1013 recursing into the children nodes of *TP. */
1016 remap_gimple_op_r (tree
*tp
, int *walk_subtrees
, void *data
)
1018 struct walk_stmt_info
*wi_p
= (struct walk_stmt_info
*) data
;
1019 copy_body_data
*id
= (copy_body_data
*) wi_p
->info
;
1020 tree fn
= id
->src_fn
;
1022 /* For recursive invocations this is no longer the LHS itself. */
1023 bool is_lhs
= wi_p
->is_lhs
;
1024 wi_p
->is_lhs
= false;
1026 if (TREE_CODE (*tp
) == SSA_NAME
)
1028 *tp
= remap_ssa_name (*tp
, id
);
1031 SSA_NAME_DEF_STMT (*tp
) = wi_p
->stmt
;
1034 else if (auto_var_in_fn_p (*tp
, fn
))
1036 /* Local variables and labels need to be replaced by equivalent
1037 variables. We don't want to copy static variables; there's
1038 only one of those, no matter how many times we inline the
1039 containing function. Similarly for globals from an outer
1043 /* Remap the declaration. */
1044 new_decl
= remap_decl (*tp
, id
);
1045 gcc_assert (new_decl
);
1046 /* Replace this variable with the copy. */
1047 STRIP_TYPE_NOPS (new_decl
);
1048 /* ??? The C++ frontend uses void * pointer zero to initialize
1049 any other type. This confuses the middle-end type verification.
1050 As cloned bodies do not go through gimplification again the fixup
1051 there doesn't trigger. */
1052 if (TREE_CODE (new_decl
) == INTEGER_CST
1053 && !useless_type_conversion_p (TREE_TYPE (*tp
), TREE_TYPE (new_decl
)))
1054 new_decl
= fold_convert (TREE_TYPE (*tp
), new_decl
);
1058 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
1060 else if (TREE_CODE (*tp
) == SAVE_EXPR
)
1062 else if (TREE_CODE (*tp
) == LABEL_DECL
1063 && (!DECL_CONTEXT (*tp
)
1064 || decl_function_context (*tp
) == id
->src_fn
))
1065 /* These may need to be remapped for EH handling. */
1066 *tp
= remap_decl (*tp
, id
);
1067 else if (TREE_CODE (*tp
) == FIELD_DECL
)
1069 /* If the enclosing record type is variably_modified_type_p, the field
1070 has already been remapped. Otherwise, it need not be. */
1071 tree
*n
= id
->decl_map
->get (*tp
);
1076 else if (TYPE_P (*tp
))
1077 /* Types may need remapping as well. */
1078 *tp
= remap_type (*tp
, id
);
1079 else if (CONSTANT_CLASS_P (*tp
))
1081 /* If this is a constant, we have to copy the node iff the type
1082 will be remapped. copy_tree_r will not copy a constant. */
1083 tree new_type
= remap_type (TREE_TYPE (*tp
), id
);
1085 if (new_type
== TREE_TYPE (*tp
))
1088 else if (TREE_CODE (*tp
) == INTEGER_CST
)
1089 *tp
= wide_int_to_tree (new_type
, wi::to_wide (*tp
));
1092 *tp
= copy_node (*tp
);
1093 TREE_TYPE (*tp
) = new_type
;
1098 /* Otherwise, just copy the node. Note that copy_tree_r already
1099 knows not to copy VAR_DECLs, etc., so this is safe. */
1101 if (TREE_CODE (*tp
) == MEM_REF
&& !id
->do_not_fold
)
1103 /* We need to re-canonicalize MEM_REFs from inline substitutions
1104 that can happen when a pointer argument is an ADDR_EXPR.
1105 Recurse here manually to allow that. */
1106 tree ptr
= TREE_OPERAND (*tp
, 0);
1107 tree type
= remap_type (TREE_TYPE (*tp
), id
);
1109 walk_tree (&ptr
, remap_gimple_op_r
, data
, NULL
);
1110 *tp
= fold_build2 (MEM_REF
, type
, ptr
, TREE_OPERAND (*tp
, 1));
1111 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1112 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1113 copy_warning (*tp
, old
);
1114 if (MR_DEPENDENCE_CLIQUE (old
) != 0)
1116 MR_DEPENDENCE_CLIQUE (*tp
)
1117 = remap_dependence_clique (id
, MR_DEPENDENCE_CLIQUE (old
));
1118 MR_DEPENDENCE_BASE (*tp
) = MR_DEPENDENCE_BASE (old
);
1120 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1121 remapped a parameter as the property might be valid only
1122 for the parameter itself. */
1123 if (TREE_THIS_NOTRAP (old
)
1124 && (!is_parm (TREE_OPERAND (old
, 0))
1125 || (!id
->transform_parameter
&& is_parm (ptr
))))
1126 TREE_THIS_NOTRAP (*tp
) = 1;
1127 REF_REVERSE_STORAGE_ORDER (*tp
) = REF_REVERSE_STORAGE_ORDER (old
);
1132 /* Here is the "usual case". Copy this tree node, and then
1133 tweak some special cases. */
1134 copy_tree_r (tp
, walk_subtrees
, NULL
);
1136 if (TREE_CODE (*tp
) != OMP_CLAUSE
)
1137 TREE_TYPE (*tp
) = remap_type (TREE_TYPE (*tp
), id
);
1139 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
1141 /* The copied TARGET_EXPR has never been expanded, even if the
1142 original node was expanded already. */
1143 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
1144 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
1146 else if (TREE_CODE (*tp
) == ADDR_EXPR
)
1148 /* Variable substitution need not be simple. In particular,
1149 the MEM_REF substitution above. Make sure that
1150 TREE_CONSTANT and friends are up-to-date. */
1151 int invariant
= is_gimple_min_invariant (*tp
);
1152 walk_tree (&TREE_OPERAND (*tp
, 0), remap_gimple_op_r
, data
, NULL
);
1153 recompute_tree_invariant_for_addr_expr (*tp
);
1155 /* If this used to be invariant, but is not any longer,
1156 then regimplification is probably needed. */
1157 if (invariant
&& !is_gimple_min_invariant (*tp
))
1158 id
->regimplify
= true;
1164 /* Update the TREE_BLOCK for the cloned expr. */
1167 tree new_block
= id
->remapping_type_depth
== 0 ? id
->block
: NULL
;
1168 tree old_block
= TREE_BLOCK (*tp
);
1172 n
= id
->decl_map
->get (TREE_BLOCK (*tp
));
1176 TREE_SET_BLOCK (*tp
, new_block
);
1179 /* Keep iterating. */
1184 /* Called from copy_body_id via walk_tree. DATA is really a
1185 `copy_body_data *'. */
1188 copy_tree_body_r (tree
*tp
, int *walk_subtrees
, void *data
)
1190 copy_body_data
*id
= (copy_body_data
*) data
;
1191 tree fn
= id
->src_fn
;
1194 /* Begin by recognizing trees that we'll completely rewrite for the
1195 inlining context. Our output for these trees is completely
1196 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1197 into an edge). Further down, we'll handle trees that get
1198 duplicated and/or tweaked. */
1200 /* When requested, RETURN_EXPRs should be transformed to just the
1201 contained MODIFY_EXPR. The branch semantics of the return will
1202 be handled elsewhere by manipulating the CFG rather than a statement. */
1203 if (TREE_CODE (*tp
) == RETURN_EXPR
&& id
->transform_return_to_modify
)
1205 tree assignment
= TREE_OPERAND (*tp
, 0);
1207 /* If we're returning something, just turn that into an
1208 assignment into the equivalent of the original RESULT_DECL.
1209 If the "assignment" is just the result decl, the result
1210 decl has already been set (e.g. a recent "foo (&result_decl,
1211 ...)"); just toss the entire RETURN_EXPR. */
1212 if (assignment
&& TREE_CODE (assignment
) == MODIFY_EXPR
)
1214 /* Replace the RETURN_EXPR with (a copy of) the
1215 MODIFY_EXPR hanging underneath. */
1216 *tp
= copy_node (assignment
);
1218 else /* Else the RETURN_EXPR returns no value. */
1221 return (tree
) (void *)1;
1224 else if (TREE_CODE (*tp
) == SSA_NAME
)
1226 *tp
= remap_ssa_name (*tp
, id
);
1231 /* Local variables and labels need to be replaced by equivalent
1232 variables. We don't want to copy static variables; there's only
1233 one of those, no matter how many times we inline the containing
1234 function. Similarly for globals from an outer function. */
1235 else if (auto_var_in_fn_p (*tp
, fn
))
1239 /* Remap the declaration. */
1240 new_decl
= remap_decl (*tp
, id
);
1241 gcc_assert (new_decl
);
1242 /* Replace this variable with the copy. */
1243 STRIP_TYPE_NOPS (new_decl
);
1247 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
1248 copy_statement_list (tp
);
1249 else if (TREE_CODE (*tp
) == SAVE_EXPR
1250 || TREE_CODE (*tp
) == TARGET_EXPR
)
1251 remap_save_expr (tp
, id
->decl_map
, walk_subtrees
);
1252 else if (TREE_CODE (*tp
) == LABEL_DECL
1253 && (! DECL_CONTEXT (*tp
)
1254 || decl_function_context (*tp
) == id
->src_fn
))
1255 /* These may need to be remapped for EH handling. */
1256 *tp
= remap_decl (*tp
, id
);
1257 else if (TREE_CODE (*tp
) == BIND_EXPR
)
1258 copy_bind_expr (tp
, walk_subtrees
, id
);
1259 /* Types may need remapping as well. */
1260 else if (TYPE_P (*tp
))
1261 *tp
= remap_type (*tp
, id
);
1263 /* If this is a constant, we have to copy the node iff the type will be
1264 remapped. copy_tree_r will not copy a constant. */
1265 else if (CONSTANT_CLASS_P (*tp
))
1267 tree new_type
= remap_type (TREE_TYPE (*tp
), id
);
1269 if (new_type
== TREE_TYPE (*tp
))
1272 else if (TREE_CODE (*tp
) == INTEGER_CST
)
1273 *tp
= wide_int_to_tree (new_type
, wi::to_wide (*tp
));
1276 *tp
= copy_node (*tp
);
1277 TREE_TYPE (*tp
) = new_type
;
1281 /* Otherwise, just copy the node. Note that copy_tree_r already
1282 knows not to copy VAR_DECLs, etc., so this is safe. */
1285 /* Here we handle trees that are not completely rewritten.
1286 First we detect some inlining-induced bogosities for
1288 if (TREE_CODE (*tp
) == MODIFY_EXPR
1289 && TREE_OPERAND (*tp
, 0) == TREE_OPERAND (*tp
, 1)
1290 && (auto_var_in_fn_p (TREE_OPERAND (*tp
, 0), fn
)))
1292 /* Some assignments VAR = VAR; don't generate any rtl code
1293 and thus don't count as variable modification. Avoid
1294 keeping bogosities like 0 = 0. */
1295 tree decl
= TREE_OPERAND (*tp
, 0), value
;
1298 n
= id
->decl_map
->get (decl
);
1302 STRIP_TYPE_NOPS (value
);
1303 if (TREE_CONSTANT (value
) || TREE_READONLY (value
))
1305 *tp
= build_empty_stmt (EXPR_LOCATION (*tp
));
1306 return copy_tree_body_r (tp
, walk_subtrees
, data
);
1310 else if (TREE_CODE (*tp
) == INDIRECT_REF
)
1312 /* Get rid of *& from inline substitutions that can happen when a
1313 pointer argument is an ADDR_EXPR. */
1314 tree decl
= TREE_OPERAND (*tp
, 0);
1315 tree
*n
= id
->decl_map
->get (decl
);
1318 /* If we happen to get an ADDR_EXPR in n->value, strip
1319 it manually here as we'll eventually get ADDR_EXPRs
1320 which lie about their types pointed to. In this case
1321 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1322 but we absolutely rely on that. As fold_indirect_ref
1323 does other useful transformations, try that first, though. */
1324 tree type
= TREE_TYPE (*tp
);
1325 tree ptr
= id
->do_not_unshare
? *n
: unshare_expr (*n
);
1327 *tp
= id
->do_not_fold
? NULL
: gimple_fold_indirect_ref (ptr
);
1330 type
= remap_type (type
, id
);
1331 if (TREE_CODE (ptr
) == ADDR_EXPR
&& !id
->do_not_fold
)
1334 = fold_indirect_ref_1 (EXPR_LOCATION (ptr
), type
, ptr
);
1335 /* ??? We should either assert here or build
1336 a VIEW_CONVERT_EXPR instead of blindly leaking
1337 incompatible types to our IL. */
1339 *tp
= TREE_OPERAND (ptr
, 0);
1343 *tp
= build1 (INDIRECT_REF
, type
, ptr
);
1344 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1345 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1346 TREE_READONLY (*tp
) = TREE_READONLY (old
);
1347 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1348 have remapped a parameter as the property might be
1349 valid only for the parameter itself. */
1350 if (TREE_THIS_NOTRAP (old
)
1351 && (!is_parm (TREE_OPERAND (old
, 0))
1352 || (!id
->transform_parameter
&& is_parm (ptr
))))
1353 TREE_THIS_NOTRAP (*tp
) = 1;
1360 else if (TREE_CODE (*tp
) == MEM_REF
&& !id
->do_not_fold
)
1362 /* We need to re-canonicalize MEM_REFs from inline substitutions
1363 that can happen when a pointer argument is an ADDR_EXPR.
1364 Recurse here manually to allow that. */
1365 tree ptr
= TREE_OPERAND (*tp
, 0);
1366 tree type
= remap_type (TREE_TYPE (*tp
), id
);
1368 walk_tree (&ptr
, copy_tree_body_r
, data
, NULL
);
1369 *tp
= fold_build2 (MEM_REF
, type
, ptr
, TREE_OPERAND (*tp
, 1));
1370 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1371 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1372 copy_warning (*tp
, old
);
1373 if (MR_DEPENDENCE_CLIQUE (old
) != 0)
1375 MR_DEPENDENCE_CLIQUE (*tp
)
1376 = remap_dependence_clique (id
, MR_DEPENDENCE_CLIQUE (old
));
1377 MR_DEPENDENCE_BASE (*tp
) = MR_DEPENDENCE_BASE (old
);
1379 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1380 remapped a parameter as the property might be valid only
1381 for the parameter itself. */
1382 if (TREE_THIS_NOTRAP (old
)
1383 && (!is_parm (TREE_OPERAND (old
, 0))
1384 || (!id
->transform_parameter
&& is_parm (ptr
))))
1385 TREE_THIS_NOTRAP (*tp
) = 1;
1386 REF_REVERSE_STORAGE_ORDER (*tp
) = REF_REVERSE_STORAGE_ORDER (old
);
1391 /* Here is the "usual case". Copy this tree node, and then
1392 tweak some special cases. */
1393 copy_tree_r (tp
, walk_subtrees
, NULL
);
1395 /* If EXPR has block defined, map it to newly constructed block.
1396 When inlining we want EXPRs without block appear in the block
1397 of function call if we are not remapping a type. */
1400 new_block
= id
->remapping_type_depth
== 0 ? id
->block
: NULL
;
1401 if (TREE_BLOCK (*tp
))
1404 n
= id
->decl_map
->get (TREE_BLOCK (*tp
));
1408 TREE_SET_BLOCK (*tp
, new_block
);
1411 if (TREE_CODE (*tp
) != OMP_CLAUSE
)
1412 TREE_TYPE (*tp
) = remap_type (TREE_TYPE (*tp
), id
);
1414 /* The copied TARGET_EXPR has never been expanded, even if the
1415 original node was expanded already. */
1416 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
1418 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
1419 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
1422 /* Variable substitution need not be simple. In particular, the
1423 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1424 and friends are up-to-date. */
1425 else if (TREE_CODE (*tp
) == ADDR_EXPR
)
1427 int invariant
= is_gimple_min_invariant (*tp
);
1428 walk_tree (&TREE_OPERAND (*tp
, 0), copy_tree_body_r
, id
, NULL
);
1430 /* Handle the case where we substituted an INDIRECT_REF
1431 into the operand of the ADDR_EXPR. */
1432 if (TREE_CODE (TREE_OPERAND (*tp
, 0)) == INDIRECT_REF
1433 && !id
->do_not_fold
)
1435 tree t
= TREE_OPERAND (TREE_OPERAND (*tp
, 0), 0);
1436 if (TREE_TYPE (t
) != TREE_TYPE (*tp
))
1437 t
= fold_convert (remap_type (TREE_TYPE (*tp
), id
), t
);
1441 recompute_tree_invariant_for_addr_expr (*tp
);
1443 /* If this used to be invariant, but is not any longer,
1444 then regimplification is probably needed. */
1445 if (invariant
&& !is_gimple_min_invariant (*tp
))
1446 id
->regimplify
= true;
1450 else if (TREE_CODE (*tp
) == OMP_CLAUSE
1451 && (OMP_CLAUSE_CODE (*tp
) == OMP_CLAUSE_AFFINITY
1452 || OMP_CLAUSE_CODE (*tp
) == OMP_CLAUSE_DEPEND
))
1454 tree t
= OMP_CLAUSE_DECL (*tp
);
1456 && TREE_CODE (t
) == TREE_LIST
1458 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
1461 OMP_CLAUSE_DECL (*tp
) = copy_node (t
);
1462 t
= OMP_CLAUSE_DECL (*tp
);
1463 TREE_PURPOSE (t
) = copy_node (TREE_PURPOSE (t
));
1464 for (int i
= 0; i
<= 4; i
++)
1465 walk_tree (&TREE_VEC_ELT (TREE_PURPOSE (t
), i
),
1466 copy_tree_body_r
, id
, NULL
);
1467 if (TREE_VEC_ELT (TREE_PURPOSE (t
), 5))
1468 remap_block (&TREE_VEC_ELT (TREE_PURPOSE (t
), 5), id
);
1469 walk_tree (&TREE_VALUE (t
), copy_tree_body_r
, id
, NULL
);
1474 /* Keep iterating. */
1478 /* Helper for remap_gimple_stmt. Given an EH region number for the
1479 source function, map that to the duplicate EH region number in
1480 the destination function. */
1483 remap_eh_region_nr (int old_nr
, copy_body_data
*id
)
1485 eh_region old_r
, new_r
;
1487 old_r
= get_eh_region_from_number_fn (id
->src_cfun
, old_nr
);
1488 new_r
= static_cast<eh_region
> (*id
->eh_map
->get (old_r
));
1490 return new_r
->index
;
1493 /* Similar, but operate on INTEGER_CSTs. */
1496 remap_eh_region_tree_nr (tree old_t_nr
, copy_body_data
*id
)
1500 old_nr
= tree_to_shwi (old_t_nr
);
1501 new_nr
= remap_eh_region_nr (old_nr
, id
);
1503 return build_int_cst (integer_type_node
, new_nr
);
1506 /* Helper for copy_bb. Remap statement STMT using the inlining
1507 information in ID. Return the new statement copy. */
1510 remap_gimple_stmt (gimple
*stmt
, copy_body_data
*id
)
1512 gimple
*copy
= NULL
;
1513 struct walk_stmt_info wi
;
1514 bool skip_first
= false;
1515 gimple_seq stmts
= NULL
;
1517 if (is_gimple_debug (stmt
)
1518 && (gimple_debug_nonbind_marker_p (stmt
)
1519 ? !DECL_STRUCT_FUNCTION (id
->dst_fn
)->debug_nonbind_markers
1520 : !opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
)))
1523 if (!is_gimple_debug (stmt
)
1524 && id
->param_body_adjs
1525 && id
->param_body_adjs
->m_dead_stmts
.contains (stmt
))
1527 tree
*dval
= id
->param_body_adjs
->m_dead_stmt_debug_equiv
.get (stmt
);
1531 gcc_assert (is_gimple_assign (stmt
));
1532 tree lhs
= gimple_assign_lhs (stmt
);
1533 tree
*dvar
= id
->param_body_adjs
->m_dead_ssa_debug_equiv
.get (lhs
);
1534 gdebug
*bind
= gimple_build_debug_bind (*dvar
, *dval
, stmt
);
1535 if (id
->reset_location
)
1536 gimple_set_location (bind
, input_location
);
1537 id
->debug_stmts
.safe_push (bind
);
1538 gimple_seq_add_stmt_without_update (&stmts
, bind
);
1542 /* Begin by recognizing trees that we'll completely rewrite for the
1543 inlining context. Our output for these trees is completely
1544 different from our input (e.g. RETURN_EXPR is deleted and morphs
1545 into an edge). Further down, we'll handle trees that get
1546 duplicated and/or tweaked. */
1548 /* When requested, GIMPLE_RETURN should be transformed to just the
1549 contained GIMPLE_ASSIGN. The branch semantics of the return will
1550 be handled elsewhere by manipulating the CFG rather than the
1552 if (gimple_code (stmt
) == GIMPLE_RETURN
&& id
->transform_return_to_modify
)
1554 tree retval
= gimple_return_retval (as_a
<greturn
*> (stmt
));
1556 /* If we're returning something, just turn that into an
1557 assignment to the equivalent of the original RESULT_DECL.
1558 If RETVAL is just the result decl, the result decl has
1559 already been set (e.g. a recent "foo (&result_decl, ...)");
1560 just toss the entire GIMPLE_RETURN. Likewise for when the
1561 call doesn't want the return value. */
1563 && (TREE_CODE (retval
) != RESULT_DECL
1565 || gimple_call_lhs (id
->call_stmt
) != NULL_TREE
)
1566 && (TREE_CODE (retval
) != SSA_NAME
1567 || ! SSA_NAME_VAR (retval
)
1568 || TREE_CODE (SSA_NAME_VAR (retval
)) != RESULT_DECL
)))
1570 copy
= gimple_build_assign (id
->do_not_unshare
1571 ? id
->retvar
: unshare_expr (id
->retvar
),
1573 /* id->retvar is already substituted. Skip it on later remapping. */
1579 else if (gimple_has_substatements (stmt
))
1583 /* When cloning bodies from the C++ front end, we will be handed bodies
1584 in High GIMPLE form. Handle here all the High GIMPLE statements that
1585 have embedded statements. */
1586 switch (gimple_code (stmt
))
1589 copy
= copy_gimple_bind (as_a
<gbind
*> (stmt
), id
);
1594 gcatch
*catch_stmt
= as_a
<gcatch
*> (stmt
);
1595 s1
= remap_gimple_seq (gimple_catch_handler (catch_stmt
), id
);
1596 copy
= gimple_build_catch (gimple_catch_types (catch_stmt
), s1
);
1600 case GIMPLE_EH_FILTER
:
1601 s1
= remap_gimple_seq (gimple_eh_filter_failure (stmt
), id
);
1602 copy
= gimple_build_eh_filter (gimple_eh_filter_types (stmt
), s1
);
1606 s1
= remap_gimple_seq (gimple_try_eval (stmt
), id
);
1607 s2
= remap_gimple_seq (gimple_try_cleanup (stmt
), id
);
1608 copy
= gimple_build_try (s1
, s2
, gimple_try_kind (stmt
));
1611 case GIMPLE_WITH_CLEANUP_EXPR
:
1612 s1
= remap_gimple_seq (gimple_wce_cleanup (stmt
), id
);
1613 copy
= gimple_build_wce (s1
);
1616 case GIMPLE_OMP_PARALLEL
:
1618 gomp_parallel
*omp_par_stmt
= as_a
<gomp_parallel
*> (stmt
);
1619 s1
= remap_gimple_seq (gimple_omp_body (omp_par_stmt
), id
);
1620 copy
= gimple_build_omp_parallel
1622 gimple_omp_parallel_clauses (omp_par_stmt
),
1623 gimple_omp_parallel_child_fn (omp_par_stmt
),
1624 gimple_omp_parallel_data_arg (omp_par_stmt
));
1628 case GIMPLE_OMP_TASK
:
1629 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1630 copy
= gimple_build_omp_task
1632 gimple_omp_task_clauses (stmt
),
1633 gimple_omp_task_child_fn (stmt
),
1634 gimple_omp_task_data_arg (stmt
),
1635 gimple_omp_task_copy_fn (stmt
),
1636 gimple_omp_task_arg_size (stmt
),
1637 gimple_omp_task_arg_align (stmt
));
1640 case GIMPLE_OMP_FOR
:
1641 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1642 s2
= remap_gimple_seq (gimple_omp_for_pre_body (stmt
), id
);
1643 copy
= gimple_build_omp_for (s1
, gimple_omp_for_kind (stmt
),
1644 gimple_omp_for_clauses (stmt
),
1645 gimple_omp_for_collapse (stmt
), s2
);
1648 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
1650 gimple_omp_for_set_index (copy
, i
,
1651 gimple_omp_for_index (stmt
, i
));
1652 gimple_omp_for_set_initial (copy
, i
,
1653 gimple_omp_for_initial (stmt
, i
));
1654 gimple_omp_for_set_final (copy
, i
,
1655 gimple_omp_for_final (stmt
, i
));
1656 gimple_omp_for_set_incr (copy
, i
,
1657 gimple_omp_for_incr (stmt
, i
));
1658 gimple_omp_for_set_cond (copy
, i
,
1659 gimple_omp_for_cond (stmt
, i
));
1664 case GIMPLE_OMP_MASTER
:
1665 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1666 copy
= gimple_build_omp_master (s1
);
1669 case GIMPLE_OMP_MASKED
:
1670 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1671 copy
= gimple_build_omp_masked
1672 (s1
, gimple_omp_masked_clauses (stmt
));
1675 case GIMPLE_OMP_SCOPE
:
1676 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1677 copy
= gimple_build_omp_scope
1678 (s1
, gimple_omp_scope_clauses (stmt
));
1681 case GIMPLE_OMP_TASKGROUP
:
1682 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1683 copy
= gimple_build_omp_taskgroup
1684 (s1
, gimple_omp_taskgroup_clauses (stmt
));
1687 case GIMPLE_OMP_ORDERED
:
1688 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1689 copy
= gimple_build_omp_ordered
1691 gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
)));
1694 case GIMPLE_OMP_SCAN
:
1695 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1696 copy
= gimple_build_omp_scan
1697 (s1
, gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)));
1700 case GIMPLE_OMP_SECTION
:
1701 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1702 copy
= gimple_build_omp_section (s1
);
1705 case GIMPLE_OMP_SECTIONS
:
1706 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1707 copy
= gimple_build_omp_sections
1708 (s1
, gimple_omp_sections_clauses (stmt
));
1711 case GIMPLE_OMP_SINGLE
:
1712 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1713 copy
= gimple_build_omp_single
1714 (s1
, gimple_omp_single_clauses (stmt
));
1717 case GIMPLE_OMP_TARGET
:
1718 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1719 copy
= gimple_build_omp_target
1720 (s1
, gimple_omp_target_kind (stmt
),
1721 gimple_omp_target_clauses (stmt
));
1724 case GIMPLE_OMP_TEAMS
:
1725 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1726 copy
= gimple_build_omp_teams
1727 (s1
, gimple_omp_teams_clauses (stmt
));
1730 case GIMPLE_OMP_CRITICAL
:
1731 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1732 copy
= gimple_build_omp_critical (s1
,
1733 gimple_omp_critical_name
1734 (as_a
<gomp_critical
*> (stmt
)),
1735 gimple_omp_critical_clauses
1736 (as_a
<gomp_critical
*> (stmt
)));
1740 s1
= remap_gimple_seq (gimple_assume_body (stmt
), id
);
1741 copy
= gimple_build_assume (gimple_assume_guard (stmt
), s1
);
1744 case GIMPLE_TRANSACTION
:
1746 gtransaction
*old_trans_stmt
= as_a
<gtransaction
*> (stmt
);
1747 gtransaction
*new_trans_stmt
;
1748 s1
= remap_gimple_seq (gimple_transaction_body (old_trans_stmt
),
1750 copy
= new_trans_stmt
= gimple_build_transaction (s1
);
1751 gimple_transaction_set_subcode (new_trans_stmt
,
1752 gimple_transaction_subcode (old_trans_stmt
));
1753 gimple_transaction_set_label_norm (new_trans_stmt
,
1754 gimple_transaction_label_norm (old_trans_stmt
));
1755 gimple_transaction_set_label_uninst (new_trans_stmt
,
1756 gimple_transaction_label_uninst (old_trans_stmt
));
1757 gimple_transaction_set_label_over (new_trans_stmt
,
1758 gimple_transaction_label_over (old_trans_stmt
));
1768 if (gimple_assign_single_p (stmt
)
1769 && gimple_assign_lhs (stmt
) == gimple_assign_rhs1 (stmt
)
1770 && auto_var_in_fn_p (gimple_assign_lhs (stmt
), id
->src_fn
))
1772 /* Here we handle statements that are not completely rewritten.
1773 First we detect some inlining-induced bogosities for
1776 /* Some assignments VAR = VAR; don't generate any rtl code
1777 and thus don't count as variable modification. Avoid
1778 keeping bogosities like 0 = 0. */
1779 tree decl
= gimple_assign_lhs (stmt
), value
;
1782 n
= id
->decl_map
->get (decl
);
1786 STRIP_TYPE_NOPS (value
);
1787 if (TREE_CONSTANT (value
) || TREE_READONLY (value
))
1792 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1793 in a block that we aren't copying during tree_function_versioning,
1794 just drop the clobber stmt. */
1795 if (id
->blocks_to_copy
&& gimple_clobber_p (stmt
))
1797 tree lhs
= gimple_assign_lhs (stmt
);
1798 if (TREE_CODE (lhs
) == MEM_REF
1799 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
)
1801 gimple
*def_stmt
= SSA_NAME_DEF_STMT (TREE_OPERAND (lhs
, 0));
1802 if (gimple_bb (def_stmt
)
1803 && !bitmap_bit_p (id
->blocks_to_copy
,
1804 gimple_bb (def_stmt
)->index
))
1809 /* We do not allow CLOBBERs of handled components. In case
1810 returned value is stored via such handled component, remove
1811 the clobber so stmt verifier is happy. */
1812 if (gimple_clobber_p (stmt
)
1813 && TREE_CODE (gimple_assign_lhs (stmt
)) == RESULT_DECL
)
1815 tree remapped
= remap_decl (gimple_assign_lhs (stmt
), id
);
1816 if (!DECL_P (remapped
)
1817 && TREE_CODE (remapped
) != MEM_REF
)
1821 if (gimple_debug_bind_p (stmt
))
1823 tree var
= gimple_debug_bind_get_var (stmt
);
1824 tree value
= gimple_debug_bind_get_value (stmt
);
1825 if (id
->param_body_adjs
1826 && id
->param_body_adjs
->m_dead_stmts
.contains (stmt
))
1828 value
= unshare_expr_without_location (value
);
1829 id
->param_body_adjs
->remap_with_debug_expressions (&value
);
1832 gdebug
*copy
= gimple_build_debug_bind (var
, value
, stmt
);
1833 if (id
->reset_location
)
1834 gimple_set_location (copy
, input_location
);
1835 id
->debug_stmts
.safe_push (copy
);
1836 gimple_seq_add_stmt_without_update (&stmts
, copy
);
1839 if (gimple_debug_source_bind_p (stmt
))
1841 gdebug
*copy
= gimple_build_debug_source_bind
1842 (gimple_debug_source_bind_get_var (stmt
),
1843 gimple_debug_source_bind_get_value (stmt
),
1845 if (id
->reset_location
)
1846 gimple_set_location (copy
, input_location
);
1847 id
->debug_stmts
.safe_push (copy
);
1848 gimple_seq_add_stmt_without_update (&stmts
, copy
);
1851 if (gimple_debug_nonbind_marker_p (stmt
))
1853 /* If the inlined function has too many debug markers,
1855 if (id
->src_cfun
->debug_marker_count
1856 > param_max_debug_marker_count
1857 || id
->reset_location
)
1860 gdebug
*copy
= as_a
<gdebug
*> (gimple_copy (stmt
));
1861 id
->debug_stmts
.safe_push (copy
);
1862 gimple_seq_add_stmt_without_update (&stmts
, copy
);
1866 /* Create a new deep copy of the statement. */
1867 copy
= gimple_copy (stmt
);
1869 /* Clear flags that need revisiting. */
1870 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (copy
))
1872 if (gimple_call_tail_p (call_stmt
))
1873 gimple_call_set_tail (call_stmt
, false);
1874 if (gimple_call_from_thunk_p (call_stmt
))
1875 gimple_call_set_from_thunk (call_stmt
, false);
1876 if (gimple_call_internal_p (call_stmt
))
1877 switch (gimple_call_internal_fn (call_stmt
))
1879 case IFN_GOMP_SIMD_LANE
:
1880 case IFN_GOMP_SIMD_VF
:
1881 case IFN_GOMP_SIMD_LAST_LANE
:
1882 case IFN_GOMP_SIMD_ORDERED_START
:
1883 case IFN_GOMP_SIMD_ORDERED_END
:
1884 DECL_STRUCT_FUNCTION (id
->dst_fn
)->has_simduid_loops
= true;
1891 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1892 RESX and EH_DISPATCH. */
1894 switch (gimple_code (copy
))
1898 tree r
, fndecl
= gimple_call_fndecl (copy
);
1899 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
1900 switch (DECL_FUNCTION_CODE (fndecl
))
1902 case BUILT_IN_EH_COPY_VALUES
:
1903 r
= gimple_call_arg (copy
, 1);
1904 r
= remap_eh_region_tree_nr (r
, id
);
1905 gimple_call_set_arg (copy
, 1, r
);
1908 case BUILT_IN_EH_POINTER
:
1909 case BUILT_IN_EH_FILTER
:
1910 r
= gimple_call_arg (copy
, 0);
1911 r
= remap_eh_region_tree_nr (r
, id
);
1912 gimple_call_set_arg (copy
, 0, r
);
1919 /* Reset alias info if we didn't apply measures to
1920 keep it valid over inlining by setting DECL_PT_UID. */
1921 if (!id
->src_cfun
->gimple_df
1922 || !id
->src_cfun
->gimple_df
->ipa_pta
)
1923 gimple_call_reset_alias_info (as_a
<gcall
*> (copy
));
1929 gresx
*resx_stmt
= as_a
<gresx
*> (copy
);
1930 int r
= gimple_resx_region (resx_stmt
);
1931 r
= remap_eh_region_nr (r
, id
);
1932 gimple_resx_set_region (resx_stmt
, r
);
1936 case GIMPLE_EH_DISPATCH
:
1938 geh_dispatch
*eh_dispatch
= as_a
<geh_dispatch
*> (copy
);
1939 int r
= gimple_eh_dispatch_region (eh_dispatch
);
1940 r
= remap_eh_region_nr (r
, id
);
1941 gimple_eh_dispatch_set_region (eh_dispatch
, r
);
1950 /* If STMT has a block defined, map it to the newly constructed block. */
1951 if (tree block
= gimple_block (copy
))
1954 n
= id
->decl_map
->get (block
);
1956 gimple_set_block (copy
, *n
);
1958 if (id
->param_body_adjs
)
1960 gimple_seq extra_stmts
= NULL
;
1961 id
->param_body_adjs
->modify_gimple_stmt (©
, &extra_stmts
, stmt
);
1962 if (!gimple_seq_empty_p (extra_stmts
))
1964 memset (&wi
, 0, sizeof (wi
));
1966 for (gimple_stmt_iterator egsi
= gsi_start (extra_stmts
);
1969 walk_gimple_op (gsi_stmt (egsi
), remap_gimple_op_r
, &wi
);
1970 gimple_seq_add_seq_without_update (&stmts
, extra_stmts
);
1974 if (id
->reset_location
)
1975 gimple_set_location (copy
, input_location
);
1977 /* Debug statements ought to be rebuilt and not copied. */
1978 gcc_checking_assert (!is_gimple_debug (copy
));
1980 /* Remap all the operands in COPY. */
1981 memset (&wi
, 0, sizeof (wi
));
1984 walk_tree (gimple_op_ptr (copy
, 1), remap_gimple_op_r
, &wi
, NULL
);
1986 walk_gimple_op (copy
, remap_gimple_op_r
, &wi
);
1988 /* Clear the copied virtual operands. We are not remapping them here
1989 but are going to recreate them from scratch. */
1990 if (gimple_has_mem_ops (copy
))
1992 gimple_set_vdef (copy
, NULL_TREE
);
1993 gimple_set_vuse (copy
, NULL_TREE
);
1996 if (cfun
->can_throw_non_call_exceptions
)
1998 /* When inlining a function which does not have non-call exceptions
1999 enabled into a function that has (which only happens with
2000 always-inline) we have to fixup stmts that cannot throw. */
2001 if (gcond
*cond
= dyn_cast
<gcond
*> (copy
))
2002 if (gimple_could_trap_p (cond
))
2005 = gimple_build_assign (make_ssa_name (boolean_type_node
),
2006 gimple_cond_code (cond
),
2007 gimple_cond_lhs (cond
),
2008 gimple_cond_rhs (cond
));
2009 gimple_seq_add_stmt_without_update (&stmts
, cmp
);
2010 gimple_cond_set_code (cond
, NE_EXPR
);
2011 gimple_cond_set_lhs (cond
, gimple_assign_lhs (cmp
));
2012 gimple_cond_set_rhs (cond
, boolean_false_node
);
2016 gimple_seq_add_stmt_without_update (&stmts
, copy
);
2021 /* Copy basic block, scale profile accordingly. Edges will be taken care of
2025 copy_bb (copy_body_data
*id
, basic_block bb
,
2026 profile_count num
, profile_count den
)
2028 gimple_stmt_iterator gsi
, copy_gsi
, seq_gsi
;
2029 basic_block copy_basic_block
;
2033 profile_count::adjust_for_ipa_scaling (&num
, &den
);
2035 /* Search for previous copied basic block. */
2038 prev
= prev
->prev_bb
;
2040 /* create_basic_block() will append every new block to
2041 basic_block_info automatically. */
2042 copy_basic_block
= create_basic_block (NULL
, (basic_block
) prev
->aux
);
2043 copy_basic_block
->count
= bb
->count
.apply_scale (num
, den
);
2045 copy_gsi
= gsi_start_bb (copy_basic_block
);
2047 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2050 gimple
*stmt
= gsi_stmt (gsi
);
2051 gimple
*orig_stmt
= stmt
;
2052 gimple_stmt_iterator stmts_gsi
;
2053 bool stmt_added
= false;
2055 id
->regimplify
= false;
2056 stmts
= remap_gimple_stmt (stmt
, id
);
2058 if (gimple_seq_empty_p (stmts
))
2063 for (stmts_gsi
= gsi_start (stmts
);
2064 !gsi_end_p (stmts_gsi
); )
2066 stmt
= gsi_stmt (stmts_gsi
);
2068 /* Advance iterator now before stmt is moved to seq_gsi. */
2069 gsi_next (&stmts_gsi
);
2071 if (gimple_nop_p (stmt
))
2074 gimple_duplicate_stmt_histograms (cfun
, stmt
, id
->src_cfun
,
2077 gsi_insert_after (&seq_gsi
, stmt
, GSI_NEW_STMT
);
2080 gimple_regimplify_operands (stmt
, &seq_gsi
);
2088 /* If copy_basic_block has been empty at the start of this iteration,
2089 call gsi_start_bb again to get at the newly added statements. */
2090 if (gsi_end_p (copy_gsi
))
2091 copy_gsi
= gsi_start_bb (copy_basic_block
);
2093 gsi_next (©_gsi
);
2095 /* Process the new statement. The call to gimple_regimplify_operands
2096 possibly turned the statement into multiple statements, we
2097 need to process all of them. */
2103 stmt
= gsi_stmt (copy_gsi
);
2104 call_stmt
= dyn_cast
<gcall
*> (stmt
);
2106 && gimple_call_va_arg_pack_p (call_stmt
)
2108 && ! gimple_call_va_arg_pack_p (id
->call_stmt
))
2110 /* __builtin_va_arg_pack () should be replaced by
2111 all arguments corresponding to ... in the caller. */
2115 size_t nargs_caller
= gimple_call_num_args (id
->call_stmt
);
2116 size_t nargs
= nargs_caller
;
2118 for (p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
2120 /* Avoid crashing on invalid IL that doesn't have a
2121 varargs function or that passes not enough arguments. */
2127 /* Create the new array of arguments. */
2128 size_t nargs_callee
= gimple_call_num_args (call_stmt
);
2129 size_t n
= nargs
+ nargs_callee
;
2130 argarray
.create (n
);
2131 argarray
.safe_grow_cleared (n
, true);
2133 /* Copy all the arguments before '...' */
2135 memcpy (argarray
.address (),
2136 gimple_call_arg_ptr (call_stmt
, 0),
2137 nargs_callee
* sizeof (tree
));
2139 /* Append the arguments passed in '...' */
2141 memcpy (argarray
.address () + nargs_callee
,
2142 gimple_call_arg_ptr (id
->call_stmt
, 0)
2143 + (nargs_caller
- nargs
), nargs
* sizeof (tree
));
2145 new_call
= gimple_build_call_vec (gimple_call_fn (call_stmt
),
2148 argarray
.release ();
2150 /* Copy all GIMPLE_CALL flags, location and block, except
2151 GF_CALL_VA_ARG_PACK. */
2152 gimple_call_copy_flags (new_call
, call_stmt
);
2153 gimple_call_set_va_arg_pack (new_call
, false);
2154 gimple_call_set_fntype (new_call
, gimple_call_fntype (call_stmt
));
2155 /* location includes block. */
2156 gimple_set_location (new_call
, gimple_location (stmt
));
2157 gimple_call_set_lhs (new_call
, gimple_call_lhs (call_stmt
));
2159 gsi_replace (©_gsi
, new_call
, false);
2164 && (decl
= gimple_call_fndecl (stmt
))
2165 && fndecl_built_in_p (decl
, BUILT_IN_VA_ARG_PACK_LEN
))
2167 /* __builtin_va_arg_pack_len () should be replaced by
2168 the number of anonymous arguments. */
2169 size_t nargs
= gimple_call_num_args (id
->call_stmt
);
2173 for (p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
2176 if (!gimple_call_lhs (stmt
))
2178 /* Drop unused calls. */
2179 gsi_remove (©_gsi
, false);
2182 else if (!gimple_call_va_arg_pack_p (id
->call_stmt
))
2184 count
= build_int_cst (integer_type_node
, nargs
);
2185 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
), count
);
2186 gsi_replace (©_gsi
, new_stmt
, false);
2189 else if (nargs
!= 0)
2191 tree newlhs
= create_tmp_reg_or_ssa_name (integer_type_node
);
2192 count
= build_int_cst (integer_type_node
, nargs
);
2193 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2194 PLUS_EXPR
, newlhs
, count
);
2195 gimple_call_set_lhs (stmt
, newlhs
);
2196 gsi_insert_after (©_gsi
, new_stmt
, GSI_NEW_STMT
);
2201 && gimple_call_internal_p (stmt
)
2202 && gimple_call_internal_fn (stmt
) == IFN_TSAN_FUNC_EXIT
)
2204 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2205 gsi_remove (©_gsi
, false);
2209 /* Statements produced by inlining can be unfolded, especially
2210 when we constant propagated some operands. We can't fold
2211 them right now for two reasons:
2212 1) folding require SSA_NAME_DEF_STMTs to be correct
2213 2) we can't change function calls to builtins.
2214 So we just mark statement for later folding. We mark
2215 all new statements, instead just statements that has changed
2216 by some nontrivial substitution so even statements made
2217 foldable indirectly are updated. If this turns out to be
2218 expensive, copy_body can be told to watch for nontrivial
2220 if (id
->statements_to_fold
)
2221 id
->statements_to_fold
->add (stmt
);
2223 /* We're duplicating a CALL_EXPR. Find any corresponding
2224 callgraph edges and update or duplicate them. */
2225 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
2227 struct cgraph_edge
*edge
;
2229 switch (id
->transform_call_graph_edges
)
2231 case CB_CGE_DUPLICATE
:
2232 edge
= id
->src_node
->get_edge (orig_stmt
);
2235 struct cgraph_edge
*old_edge
= edge
;
2237 /* A speculative call is consist of multiple
2238 edges - indirect edge and one or more direct edges
2239 Duplicate the whole thing and distribute frequencies
2241 if (edge
->speculative
)
2244 profile_count direct_cnt
2245 = profile_count::zero ();
2247 /* First figure out the distribution of counts
2248 so we can re-scale BB profile accordingly. */
2249 for (cgraph_edge
*e
= old_edge
; e
;
2250 e
= e
->next_speculative_call_target ())
2251 direct_cnt
= direct_cnt
+ e
->count
;
2253 cgraph_edge
*indirect
2254 = old_edge
->speculative_call_indirect_edge ();
2255 profile_count indir_cnt
= indirect
->count
;
2257 /* Next iterate all direct edges, clone it and its
2258 corresponding reference and update profile. */
2259 for (cgraph_edge
*e
= old_edge
;
2261 e
= e
->next_speculative_call_target ())
2263 profile_count cnt
= e
->count
;
2265 id
->dst_node
->clone_reference
2266 (e
->speculative_call_target_ref (), stmt
);
2267 edge
= e
->clone (id
->dst_node
, call_stmt
,
2268 gimple_uid (stmt
), num
, den
,
2270 profile_probability prob
2271 = cnt
.probability_in (direct_cnt
2274 = copy_basic_block
->count
.apply_probability
2279 (indirect
->num_speculative_call_targets_p ()
2282 /* Duplicate the indirect edge after all direct edges
2284 indirect
= indirect
->clone (id
->dst_node
, call_stmt
,
2289 profile_probability prob
2290 = indir_cnt
.probability_in (direct_cnt
2293 = copy_basic_block
->count
.apply_probability (prob
);
2297 edge
= edge
->clone (id
->dst_node
, call_stmt
,
2301 edge
->count
= copy_basic_block
->count
;
2306 case CB_CGE_MOVE_CLONES
:
2307 id
->dst_node
->set_call_stmt_including_clones (orig_stmt
,
2309 edge
= id
->dst_node
->get_edge (stmt
);
2313 edge
= id
->dst_node
->get_edge (orig_stmt
);
2315 edge
= cgraph_edge::set_call_stmt (edge
, call_stmt
);
2322 /* Constant propagation on argument done during inlining
2323 may create new direct call. Produce an edge for it. */
2325 || (edge
->indirect_inlining_edge
2326 && id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
))
2327 && id
->dst_node
->definition
2328 && (fn
= gimple_call_fndecl (stmt
)) != NULL
)
2330 struct cgraph_node
*dest
= cgraph_node::get_create (fn
);
2332 /* We have missing edge in the callgraph. This can happen
2333 when previous inlining turned an indirect call into a
2334 direct call by constant propagating arguments or we are
2335 producing dead clone (for further cloning). In all
2336 other cases we hit a bug (incorrect node sharing is the
2337 most common reason for missing edges). */
2338 gcc_assert (!dest
->definition
2339 || dest
->address_taken
2340 || !id
->src_node
->definition
2341 || !id
->dst_node
->definition
);
2342 if (id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
)
2343 id
->dst_node
->create_edge_including_clones
2344 (dest
, orig_stmt
, call_stmt
, bb
->count
,
2345 CIF_ORIGINALLY_INDIRECT_CALL
);
2347 id
->dst_node
->create_edge (dest
, call_stmt
,
2348 bb
->count
)->inline_failed
2349 = CIF_ORIGINALLY_INDIRECT_CALL
;
2352 fprintf (dump_file
, "Created new direct edge to %s\n",
2353 dest
->dump_name ());
2357 notice_special_calls (as_a
<gcall
*> (stmt
));
2360 maybe_duplicate_eh_stmt_fn (cfun
, stmt
, id
->src_cfun
, orig_stmt
,
2361 id
->eh_map
, id
->eh_lp_nr
);
2363 gsi_next (©_gsi
);
2365 while (!gsi_end_p (copy_gsi
));
2367 copy_gsi
= gsi_last_bb (copy_basic_block
);
2370 return copy_basic_block
;
2373 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2374 form is quite easy, since dominator relationship for old basic blocks does
2377 There is however exception where inlining might change dominator relation
2378 across EH edges from basic block within inlined functions destinating
2379 to landing pads in function we inline into.
2381 The function fills in PHI_RESULTs of such PHI nodes if they refer
2382 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2383 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2384 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2385 set, and this means that there will be no overlapping live ranges
2386 for the underlying symbol.
2388 This might change in future if we allow redirecting of EH edges and
2389 we might want to change way build CFG pre-inlining to include
2390 all the possible edges then. */
2392 update_ssa_across_abnormal_edges (basic_block bb
, basic_block ret_bb
,
2393 bool can_throw
, bool nonlocal_goto
)
2398 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2400 || ((basic_block
)e
->dest
->aux
)->index
== ENTRY_BLOCK
)
2406 gcc_assert (e
->flags
& EDGE_EH
);
2409 gcc_assert (!(e
->flags
& EDGE_EH
));
2411 for (si
= gsi_start_phis (e
->dest
); !gsi_end_p (si
); gsi_next (&si
))
2417 /* For abnormal goto/call edges the receiver can be the
2418 ENTRY_BLOCK. Do not assert this cannot happen. */
2420 gcc_assert ((e
->flags
& EDGE_EH
)
2421 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)));
2423 re
= find_edge (ret_bb
, e
->dest
);
2424 gcc_checking_assert (re
);
2425 gcc_assert ((re
->flags
& (EDGE_EH
| EDGE_ABNORMAL
))
2426 == (e
->flags
& (EDGE_EH
| EDGE_ABNORMAL
)));
2428 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
),
2429 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, re
)));
2434 /* Insert clobbers for automatic variables of inlined ID->src_fn
2435 function at the start of basic block ID->eh_landing_pad_dest. */
2438 add_clobbers_to_eh_landing_pad (copy_body_data
*id
)
2441 basic_block bb
= id
->eh_landing_pad_dest
;
2442 live_vars_map
*vars
= NULL
;
2443 unsigned int cnt
= 0;
2445 FOR_EACH_VEC_SAFE_ELT (id
->src_cfun
->local_decls
, i
, var
)
2447 && !DECL_HARD_REGISTER (var
)
2448 && !TREE_THIS_VOLATILE (var
)
2449 && !DECL_HAS_VALUE_EXPR_P (var
)
2450 && !is_gimple_reg (var
)
2451 && auto_var_in_fn_p (var
, id
->src_fn
)
2452 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var
)))
2454 tree
*t
= id
->decl_map
->get (var
);
2459 && !DECL_HARD_REGISTER (new_var
)
2460 && !TREE_THIS_VOLATILE (new_var
)
2461 && !DECL_HAS_VALUE_EXPR_P (new_var
)
2462 && !is_gimple_reg (new_var
)
2463 && auto_var_in_fn_p (new_var
, id
->dst_fn
))
2466 vars
= new live_vars_map
;
2467 vars
->put (DECL_UID (var
), cnt
++);
2473 vec
<bitmap_head
> live
= compute_live_vars (id
->src_cfun
, vars
);
2474 FOR_EACH_VEC_SAFE_ELT (id
->src_cfun
->local_decls
, i
, var
)
2479 bool needed
= false;
2480 unsigned int *v
= vars
->get (DECL_UID (var
));
2483 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
2484 if ((e
->flags
& EDGE_EH
) != 0
2485 && e
->src
->index
>= id
->add_clobbers_to_eh_landing_pads
)
2487 basic_block src_bb
= (basic_block
) e
->src
->aux
;
2489 if (bitmap_bit_p (&live
[src_bb
->index
], *v
))
2497 tree new_var
= *id
->decl_map
->get (var
);
2498 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
2499 tree clobber
= build_clobber (TREE_TYPE (new_var
));
2500 gimple
*clobber_stmt
= gimple_build_assign (new_var
, clobber
);
2501 gsi_insert_before (&gsi
, clobber_stmt
, GSI_NEW_STMT
);
2504 destroy_live_vars (live
);
2508 /* Copy edges from BB into its copy constructed earlier, scale profile
2509 accordingly. Edges will be taken care of later. Assume aux
2510 pointers to point to the copies of each BB. Return true if any
2511 debug stmts are left after a statement that must end the basic block. */
2514 copy_edges_for_bb (basic_block bb
, profile_count num
, profile_count den
,
2515 basic_block ret_bb
, basic_block abnormal_goto_dest
,
2518 basic_block new_bb
= (basic_block
) bb
->aux
;
2521 gimple_stmt_iterator si
;
2522 bool need_debug_cleanup
= false;
2524 /* Use the indices from the original blocks to create edges for the
2526 FOR_EACH_EDGE (old_edge
, ei
, bb
->succs
)
2527 if (!(old_edge
->flags
& EDGE_EH
))
2530 int flags
= old_edge
->flags
;
2531 location_t locus
= old_edge
->goto_locus
;
2533 /* Return edges do get a FALLTHRU flag when they get inlined. */
2534 if (old_edge
->dest
->index
== EXIT_BLOCK
2535 && !(flags
& (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
|EDGE_FAKE
))
2536 && old_edge
->dest
->aux
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
2537 flags
|= EDGE_FALLTHRU
;
2540 = make_edge (new_bb
, (basic_block
) old_edge
->dest
->aux
, flags
);
2541 new_edge
->probability
= old_edge
->probability
;
2542 if (!id
->reset_location
)
2543 new_edge
->goto_locus
= remap_location (locus
, id
);
2546 if (bb
->index
== ENTRY_BLOCK
|| bb
->index
== EXIT_BLOCK
)
2549 /* When doing function splitting, we must decrease count of the return block
2550 which was previously reachable by block we did not copy. */
2551 if (single_succ_p (bb
) && single_succ_edge (bb
)->dest
->index
== EXIT_BLOCK
)
2552 FOR_EACH_EDGE (old_edge
, ei
, bb
->preds
)
2553 if (old_edge
->src
->index
!= ENTRY_BLOCK
2554 && !old_edge
->src
->aux
)
2555 new_bb
->count
-= old_edge
->count ().apply_scale (num
, den
);
2557 /* Walk stmts from end to start so that splitting will adjust the BB
2558 pointer for each stmt at most once, even when we split the block
2560 bool seen_nondebug
= false;
2561 for (si
= gsi_last_bb (new_bb
); !gsi_end_p (si
);)
2563 bool can_throw
, nonlocal_goto
;
2564 gimple
*copy_stmt
= gsi_stmt (si
);
2566 /* Do this before the possible split_block. */
2569 /* If this tree could throw an exception, there are two
2570 cases where we need to add abnormal edge(s): the
2571 tree wasn't in a region and there is a "current
2572 region" in the caller; or the original tree had
2573 EH edges. In both cases split the block after the tree,
2574 and add abnormal edge(s) as needed; we need both
2575 those from the callee and the caller.
2576 We check whether the copy can throw, because the const
2577 propagation can change an INDIRECT_REF which throws
2578 into a COMPONENT_REF which doesn't. If the copy
2579 can throw, the original could also throw. */
2580 can_throw
= stmt_can_throw_internal (cfun
, copy_stmt
);
2582 = (stmt_can_make_abnormal_goto (copy_stmt
)
2583 && !computed_goto_p (copy_stmt
));
2585 if (can_throw
|| nonlocal_goto
)
2587 /* If there's only debug insns after copy_stmt don't split
2588 the block but instead mark the block for cleanup. */
2590 need_debug_cleanup
= true;
2593 /* Note that bb's predecessor edges aren't necessarily
2594 right at this point; split_block doesn't care. */
2595 edge e
= split_block (new_bb
, copy_stmt
);
2596 e
->dest
->aux
= new_bb
->aux
;
2597 seen_nondebug
= false;
2601 if (!is_gimple_debug (copy_stmt
))
2602 seen_nondebug
= true;
2604 bool update_probs
= false;
2606 if (gimple_code (copy_stmt
) == GIMPLE_EH_DISPATCH
)
2608 make_eh_dispatch_edges (as_a
<geh_dispatch
*> (copy_stmt
));
2609 update_probs
= true;
2613 make_eh_edges (copy_stmt
);
2614 update_probs
= true;
2617 /* EH edges may not match old edges. Copy as much as possible. */
2622 basic_block copy_stmt_bb
= gimple_bb (copy_stmt
);
2624 FOR_EACH_EDGE (old_edge
, ei
, bb
->succs
)
2625 if ((old_edge
->flags
& EDGE_EH
)
2626 && (e
= find_edge (copy_stmt_bb
,
2627 (basic_block
) old_edge
->dest
->aux
))
2628 && (e
->flags
& EDGE_EH
))
2629 e
->probability
= old_edge
->probability
;
2631 FOR_EACH_EDGE (e
, ei
, copy_stmt_bb
->succs
)
2632 if (e
->flags
& EDGE_EH
)
2634 if (!e
->probability
.initialized_p ())
2635 e
->probability
= profile_probability::never ();
2636 if (e
->dest
->index
< id
->add_clobbers_to_eh_landing_pads
)
2638 if (id
->eh_landing_pad_dest
== NULL
)
2639 id
->eh_landing_pad_dest
= e
->dest
;
2641 gcc_assert (id
->eh_landing_pad_dest
== e
->dest
);
2647 /* If the call we inline cannot make abnormal goto do not add
2648 additional abnormal edges but only retain those already present
2649 in the original function body. */
2650 if (abnormal_goto_dest
== NULL
)
2651 nonlocal_goto
= false;
2654 basic_block copy_stmt_bb
= gimple_bb (copy_stmt
);
2656 if (get_abnormal_succ_dispatcher (copy_stmt_bb
))
2657 nonlocal_goto
= false;
2658 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2659 in OpenMP regions which aren't allowed to be left abnormally.
2660 So, no need to add abnormal edge in that case. */
2661 else if (is_gimple_call (copy_stmt
)
2662 && gimple_call_internal_p (copy_stmt
)
2663 && (gimple_call_internal_fn (copy_stmt
)
2664 == IFN_ABNORMAL_DISPATCHER
)
2665 && gimple_call_arg (copy_stmt
, 0) == boolean_true_node
)
2666 nonlocal_goto
= false;
2668 make_single_succ_edge (copy_stmt_bb
, abnormal_goto_dest
,
2672 if ((can_throw
|| nonlocal_goto
)
2673 && gimple_in_ssa_p (cfun
))
2674 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt
), ret_bb
,
2675 can_throw
, nonlocal_goto
);
2677 return need_debug_cleanup
;
2680 /* Copy the PHIs. All blocks and edges are copied, some blocks
2681 was possibly split and new outgoing EH edges inserted.
2682 BB points to the block of original function and AUX pointers links
2683 the original and newly copied blocks. */
2686 copy_phis_for_bb (basic_block bb
, copy_body_data
*id
)
2688 basic_block
const new_bb
= (basic_block
) bb
->aux
;
2693 bool inserted
= false;
2695 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
2701 res
= PHI_RESULT (phi
);
2703 if (!virtual_operand_p (res
)
2704 && (!id
->param_body_adjs
2705 || !id
->param_body_adjs
->m_dead_stmts
.contains (phi
)))
2707 walk_tree (&new_res
, copy_tree_body_r
, id
, NULL
);
2708 if (EDGE_COUNT (new_bb
->preds
) == 0)
2710 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2711 SSA_NAME_DEF_STMT (new_res
) = gimple_build_nop ();
2715 new_phi
= create_phi_node (new_res
, new_bb
);
2716 FOR_EACH_EDGE (new_edge
, ei
, new_bb
->preds
)
2718 edge old_edge
= find_edge ((basic_block
) new_edge
->src
->aux
,
2725 /* When doing partial cloning, we allow PHIs on the entry
2726 block as long as all the arguments are the same.
2727 Find any input edge to see argument to copy. */
2729 FOR_EACH_EDGE (old_edge
, ei2
, bb
->preds
)
2730 if (!old_edge
->src
->aux
)
2733 arg
= PHI_ARG_DEF_FROM_EDGE (phi
, old_edge
);
2735 walk_tree (&new_arg
, copy_tree_body_r
, id
, NULL
);
2736 gcc_assert (new_arg
);
2737 /* With return slot optimization we can end up with
2738 non-gimple (foo *)&this->m, fix that here. */
2739 if (TREE_CODE (new_arg
) != SSA_NAME
2740 && TREE_CODE (new_arg
) != FUNCTION_DECL
2741 && !is_gimple_val (new_arg
))
2743 gimple_seq stmts
= NULL
;
2744 new_arg
= force_gimple_operand (new_arg
, &stmts
, true,
2746 gsi_insert_seq_on_edge (new_edge
, stmts
);
2749 locus
= gimple_phi_arg_location_from_edge (phi
, old_edge
);
2750 if (id
->reset_location
)
2751 locus
= input_location
;
2753 locus
= remap_location (locus
, id
);
2754 add_phi_arg (new_phi
, new_arg
, new_edge
, locus
);
2760 /* Commit the delayed edge insertions. */
2762 FOR_EACH_EDGE (new_edge
, ei
, new_bb
->preds
)
2763 gsi_commit_one_edge_insert (new_edge
, NULL
);
2767 /* Wrapper for remap_decl so it can be used as a callback. */
2770 remap_decl_1 (tree decl
, void *data
)
2772 return remap_decl (decl
, (copy_body_data
*) data
);
2775 /* Build struct function and associated datastructures for the new clone
2776 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2777 the cfun to the function of new_fndecl (and current_function_decl too). */
2780 initialize_cfun (tree new_fndecl
, tree callee_fndecl
, profile_count count
)
2782 struct function
*src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2784 /* Register specific tree functions. */
2785 gimple_register_cfg_hooks ();
2787 /* Get clean struct function. */
2788 push_struct_function (new_fndecl
, true);
2789 targetm
.target_option
.relayout_function (new_fndecl
);
2791 /* We will rebuild these, so just sanity check that they are empty. */
2792 gcc_assert (VALUE_HISTOGRAMS (cfun
) == NULL
);
2793 gcc_assert (cfun
->local_decls
== NULL
);
2794 gcc_assert (cfun
->cfg
== NULL
);
2795 gcc_assert (cfun
->decl
== new_fndecl
);
2797 /* Copy items we preserve during cloning. */
2798 cfun
->static_chain_decl
= src_cfun
->static_chain_decl
;
2799 cfun
->nonlocal_goto_save_area
= src_cfun
->nonlocal_goto_save_area
;
2800 cfun
->function_end_locus
= src_cfun
->function_end_locus
;
2801 cfun
->curr_properties
= src_cfun
->curr_properties
;
2802 cfun
->last_verified
= src_cfun
->last_verified
;
2803 cfun
->va_list_gpr_size
= src_cfun
->va_list_gpr_size
;
2804 cfun
->va_list_fpr_size
= src_cfun
->va_list_fpr_size
;
2805 cfun
->has_nonlocal_label
= src_cfun
->has_nonlocal_label
;
2806 cfun
->calls_eh_return
= src_cfun
->calls_eh_return
;
2807 cfun
->stdarg
= src_cfun
->stdarg
;
2808 cfun
->after_inlining
= src_cfun
->after_inlining
;
2809 cfun
->can_throw_non_call_exceptions
2810 = src_cfun
->can_throw_non_call_exceptions
;
2811 cfun
->can_delete_dead_exceptions
= src_cfun
->can_delete_dead_exceptions
;
2812 cfun
->returns_struct
= src_cfun
->returns_struct
;
2813 cfun
->returns_pcc_struct
= src_cfun
->returns_pcc_struct
;
2815 init_empty_tree_cfg ();
2817 profile_status_for_fn (cfun
) = profile_status_for_fn (src_cfun
);
2819 profile_count num
= count
;
2820 profile_count den
= ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
;
2821 profile_count::adjust_for_ipa_scaling (&num
, &den
);
2823 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
=
2824 ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
.apply_scale (count
,
2825 ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
);
2826 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
=
2827 EXIT_BLOCK_PTR_FOR_FN (src_cfun
)->count
.apply_scale (count
,
2828 ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
);
2830 init_eh_for_function ();
2832 if (src_cfun
->gimple_df
)
2834 init_tree_ssa (cfun
);
2835 cfun
->gimple_df
->in_ssa_p
= src_cfun
->gimple_df
->in_ssa_p
;
2836 if (cfun
->gimple_df
->in_ssa_p
)
2837 init_ssa_operands (cfun
);
2841 /* Helper function for copy_cfg_body. Move debug stmts from the end
2842 of NEW_BB to the beginning of successor basic blocks when needed. If the
2843 successor has multiple predecessors, reset them, otherwise keep
2847 maybe_move_debug_stmts_to_successors (copy_body_data
*id
, basic_block new_bb
)
2851 gimple_stmt_iterator si
= gsi_last_nondebug_bb (new_bb
);
2854 || gsi_one_before_end_p (si
)
2855 || !(stmt_can_throw_internal (cfun
, gsi_stmt (si
))
2856 || stmt_can_make_abnormal_goto (gsi_stmt (si
))))
2859 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
2861 gimple_stmt_iterator ssi
= gsi_last_bb (new_bb
);
2862 gimple_stmt_iterator dsi
= gsi_after_labels (e
->dest
);
2863 while (is_gimple_debug (gsi_stmt (ssi
)))
2865 gimple
*stmt
= gsi_stmt (ssi
);
2870 /* For the last edge move the debug stmts instead of copying
2872 if (ei_one_before_end_p (ei
))
2876 if (!single_pred_p (e
->dest
) && gimple_debug_bind_p (stmt
))
2878 gimple_debug_bind_reset_value (stmt
);
2879 gimple_set_location (stmt
, UNKNOWN_LOCATION
);
2881 gsi_remove (&si
, false);
2882 gsi_insert_before (&dsi
, stmt
, GSI_NEW_STMT
);
2886 if (gimple_debug_bind_p (stmt
))
2888 var
= gimple_debug_bind_get_var (stmt
);
2889 if (single_pred_p (e
->dest
))
2891 value
= gimple_debug_bind_get_value (stmt
);
2892 value
= unshare_expr (value
);
2893 new_stmt
= gimple_build_debug_bind (var
, value
, stmt
);
2896 new_stmt
= gimple_build_debug_bind (var
, NULL_TREE
, NULL
);
2898 else if (gimple_debug_source_bind_p (stmt
))
2900 var
= gimple_debug_source_bind_get_var (stmt
);
2901 value
= gimple_debug_source_bind_get_value (stmt
);
2902 new_stmt
= gimple_build_debug_source_bind (var
, value
, stmt
);
2904 else if (gimple_debug_nonbind_marker_p (stmt
))
2905 new_stmt
= as_a
<gdebug
*> (gimple_copy (stmt
));
2908 gsi_insert_before (&dsi
, new_stmt
, GSI_NEW_STMT
);
2909 id
->debug_stmts
.safe_push (new_stmt
);
2915 /* Make a copy of the sub-loops of SRC_PARENT and place them
2916 as siblings of DEST_PARENT. */
2919 copy_loops (copy_body_data
*id
,
2920 class loop
*dest_parent
, class loop
*src_parent
)
2922 class loop
*src_loop
= src_parent
->inner
;
2925 if (!id
->blocks_to_copy
2926 || bitmap_bit_p (id
->blocks_to_copy
, src_loop
->header
->index
))
2928 class loop
*dest_loop
= alloc_loop ();
2930 /* Assign the new loop its header and latch and associate
2931 those with the new loop. */
2932 dest_loop
->header
= (basic_block
)src_loop
->header
->aux
;
2933 dest_loop
->header
->loop_father
= dest_loop
;
2934 if (src_loop
->latch
!= NULL
)
2936 dest_loop
->latch
= (basic_block
)src_loop
->latch
->aux
;
2937 dest_loop
->latch
->loop_father
= dest_loop
;
2940 /* Copy loop meta-data. */
2941 copy_loop_info (src_loop
, dest_loop
);
2942 if (dest_loop
->unroll
)
2943 cfun
->has_unroll
= true;
2944 if (dest_loop
->force_vectorize
)
2945 cfun
->has_force_vectorize_loops
= true;
2946 if (id
->src_cfun
->last_clique
!= 0)
2947 dest_loop
->owned_clique
2948 = remap_dependence_clique (id
,
2949 src_loop
->owned_clique
2950 ? src_loop
->owned_clique
: 1);
2952 /* Finally place it into the loop array and the loop tree. */
2953 place_new_loop (cfun
, dest_loop
);
2954 flow_loop_tree_node_add (dest_parent
, dest_loop
);
2956 if (src_loop
->simduid
)
2958 dest_loop
->simduid
= remap_decl (src_loop
->simduid
, id
);
2959 cfun
->has_simduid_loops
= true;
2963 copy_loops (id
, dest_loop
, src_loop
);
2965 src_loop
= src_loop
->next
;
2969 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2972 redirect_all_calls (copy_body_data
* id
, basic_block bb
)
2974 gimple_stmt_iterator si
;
2975 gimple
*last
= last_stmt (bb
);
2976 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
2978 gimple
*stmt
= gsi_stmt (si
);
2979 if (is_gimple_call (stmt
))
2981 tree old_lhs
= gimple_call_lhs (stmt
);
2982 struct cgraph_edge
*edge
= id
->dst_node
->get_edge (stmt
);
2986 = cgraph_edge::redirect_call_stmt_to_callee (edge
);
2987 /* If IPA-SRA transformation, run as part of edge redirection,
2988 removed the LHS because it is unused, save it to
2989 killed_new_ssa_names so that we can prune it from debug
2992 && TREE_CODE (old_lhs
) == SSA_NAME
2993 && !gimple_call_lhs (new_stmt
))
2995 if (!id
->killed_new_ssa_names
)
2996 id
->killed_new_ssa_names
= new hash_set
<tree
> (16);
2997 id
->killed_new_ssa_names
->add (old_lhs
);
3000 if (stmt
== last
&& id
->call_stmt
&& maybe_clean_eh_stmt (stmt
))
3001 gimple_purge_dead_eh_edges (bb
);
3007 /* Make a copy of the body of FN so that it can be inserted inline in
3008 another function. Walks FN via CFG, returns new fndecl. */
3011 copy_cfg_body (copy_body_data
* id
,
3012 basic_block entry_block_map
, basic_block exit_block_map
,
3013 basic_block new_entry
)
3015 tree callee_fndecl
= id
->src_fn
;
3016 /* Original cfun for the callee, doesn't change. */
3017 struct function
*src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
3018 struct function
*cfun_to_copy
;
3020 tree new_fndecl
= NULL
;
3021 bool need_debug_cleanup
= false;
3023 profile_count den
= ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
;
3024 profile_count num
= entry_block_map
->count
;
3026 cfun_to_copy
= id
->src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
3028 /* Register specific tree functions. */
3029 gimple_register_cfg_hooks ();
3031 /* If we are inlining just region of the function, make sure to connect
3032 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
3033 part of loop, we must compute frequency and probability of
3034 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
3035 probabilities of edges incoming from nonduplicated region. */
3040 den
= profile_count::zero ();
3042 FOR_EACH_EDGE (e
, ei
, new_entry
->preds
)
3045 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= den
;
3048 profile_count::adjust_for_ipa_scaling (&num
, &den
);
3050 /* Must have a CFG here at this point. */
3051 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
3052 (DECL_STRUCT_FUNCTION (callee_fndecl
)));
3055 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy
)->aux
= entry_block_map
;
3056 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy
)->aux
= exit_block_map
;
3057 entry_block_map
->aux
= ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy
);
3058 exit_block_map
->aux
= EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy
);
3060 /* Duplicate any exception-handling regions. */
3062 id
->eh_map
= duplicate_eh_regions (cfun_to_copy
, NULL
, id
->eh_lp_nr
,
3065 /* Use aux pointers to map the original blocks to copy. */
3066 FOR_EACH_BB_FN (bb
, cfun_to_copy
)
3067 if (!id
->blocks_to_copy
|| bitmap_bit_p (id
->blocks_to_copy
, bb
->index
))
3069 basic_block new_bb
= copy_bb (id
, bb
, num
, den
);
3072 new_bb
->loop_father
= entry_block_map
->loop_father
;
3075 last
= last_basic_block_for_fn (cfun
);
3077 /* Now that we've duplicated the blocks, duplicate their edges. */
3078 basic_block abnormal_goto_dest
= NULL
;
3080 && stmt_can_make_abnormal_goto (id
->call_stmt
))
3082 gimple_stmt_iterator gsi
= gsi_for_stmt (id
->call_stmt
);
3084 bb
= gimple_bb (id
->call_stmt
);
3086 if (gsi_end_p (gsi
))
3087 abnormal_goto_dest
= get_abnormal_succ_dispatcher (bb
);
3089 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
3090 if (!id
->blocks_to_copy
3091 || (bb
->index
> 0 && bitmap_bit_p (id
->blocks_to_copy
, bb
->index
)))
3092 need_debug_cleanup
|= copy_edges_for_bb (bb
, num
, den
, exit_block_map
,
3093 abnormal_goto_dest
, id
);
3095 if (id
->eh_landing_pad_dest
)
3097 add_clobbers_to_eh_landing_pad (id
);
3098 id
->eh_landing_pad_dest
= NULL
;
3103 edge e
= make_edge (entry_block_map
, (basic_block
)new_entry
->aux
,
3105 e
->probability
= profile_probability::always ();
3108 /* Duplicate the loop tree, if available and wanted. */
3109 if (loops_for_fn (src_cfun
) != NULL
3110 && current_loops
!= NULL
)
3112 copy_loops (id
, entry_block_map
->loop_father
,
3113 get_loop (src_cfun
, 0));
3114 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
3115 loops_state_set (LOOPS_NEED_FIXUP
);
3118 /* If the loop tree in the source function needed fixup, mark the
3119 destination loop tree for fixup, too. */
3120 if (loops_for_fn (src_cfun
)->state
& LOOPS_NEED_FIXUP
)
3121 loops_state_set (LOOPS_NEED_FIXUP
);
3123 if (gimple_in_ssa_p (cfun
))
3124 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
3125 if (!id
->blocks_to_copy
3126 || (bb
->index
> 0 && bitmap_bit_p (id
->blocks_to_copy
, bb
->index
)))
3127 copy_phis_for_bb (bb
, id
);
3129 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
3132 if (need_debug_cleanup
3133 && bb
->index
!= ENTRY_BLOCK
3134 && bb
->index
!= EXIT_BLOCK
)
3135 maybe_move_debug_stmts_to_successors (id
, (basic_block
) bb
->aux
);
3136 /* Update call edge destinations. This cannot be done before loop
3137 info is updated, because we may split basic blocks. */
3138 if (id
->transform_call_graph_edges
== CB_CGE_DUPLICATE
3139 && bb
->index
!= ENTRY_BLOCK
3140 && bb
->index
!= EXIT_BLOCK
)
3141 redirect_all_calls (id
, (basic_block
)bb
->aux
);
3142 ((basic_block
)bb
->aux
)->aux
= NULL
;
3146 /* Zero out AUX fields of newly created block during EH edge
3148 for (; last
< last_basic_block_for_fn (cfun
); last
++)
3150 if (need_debug_cleanup
)
3151 maybe_move_debug_stmts_to_successors (id
,
3152 BASIC_BLOCK_FOR_FN (cfun
, last
));
3153 BASIC_BLOCK_FOR_FN (cfun
, last
)->aux
= NULL
;
3154 /* Update call edge destinations. This cannot be done before loop
3155 info is updated, because we may split basic blocks. */
3156 if (id
->transform_call_graph_edges
== CB_CGE_DUPLICATE
)
3157 redirect_all_calls (id
, BASIC_BLOCK_FOR_FN (cfun
, last
));
3159 entry_block_map
->aux
= NULL
;
3160 exit_block_map
->aux
= NULL
;
3167 if (id
->dependence_map
)
3169 delete id
->dependence_map
;
3170 id
->dependence_map
= NULL
;
3176 /* Copy the debug STMT using ID. We deal with these statements in a
3177 special way: if any variable in their VALUE expression wasn't
3178 remapped yet, we won't remap it, because that would get decl uids
3179 out of sync, causing codegen differences between -g and -g0. If
3180 this arises, we drop the VALUE expression altogether. */
3183 copy_debug_stmt (gdebug
*stmt
, copy_body_data
*id
)
3186 struct walk_stmt_info wi
;
3188 if (tree block
= gimple_block (stmt
))
3190 n
= id
->decl_map
->get (block
);
3191 gimple_set_block (stmt
, n
? *n
: id
->block
);
3194 if (gimple_debug_nonbind_marker_p (stmt
))
3196 if (id
->call_stmt
&& !gimple_block (stmt
))
3198 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
3199 gsi_remove (&gsi
, true);
3204 /* Remap all the operands in COPY. */
3205 memset (&wi
, 0, sizeof (wi
));
3208 processing_debug_stmt
= 1;
3210 if (gimple_debug_source_bind_p (stmt
))
3211 t
= gimple_debug_source_bind_get_var (stmt
);
3212 else if (gimple_debug_bind_p (stmt
))
3213 t
= gimple_debug_bind_get_var (stmt
);
3217 if (TREE_CODE (t
) == PARM_DECL
3219 && (n
= id
->debug_map
->get (t
)))
3221 gcc_assert (VAR_P (*n
));
3224 else if (VAR_P (t
) && !is_global_var (t
) && !id
->decl_map
->get (t
))
3225 /* T is a non-localized variable. */;
3227 walk_tree (&t
, remap_gimple_op_r
, &wi
, NULL
);
3229 if (gimple_debug_bind_p (stmt
))
3231 gimple_debug_bind_set_var (stmt
, t
);
3233 if (gimple_debug_bind_has_value_p (stmt
))
3234 walk_tree (gimple_debug_bind_get_value_ptr (stmt
),
3235 remap_gimple_op_r
, &wi
, NULL
);
3237 /* Punt if any decl couldn't be remapped. */
3238 if (processing_debug_stmt
< 0)
3239 gimple_debug_bind_reset_value (stmt
);
3241 else if (gimple_debug_source_bind_p (stmt
))
3243 gimple_debug_source_bind_set_var (stmt
, t
);
3244 /* When inlining and source bind refers to one of the optimized
3245 away parameters, change the source bind into normal debug bind
3246 referring to the corresponding DEBUG_EXPR_DECL that should have
3247 been bound before the call stmt. */
3248 t
= gimple_debug_source_bind_get_value (stmt
);
3250 && TREE_CODE (t
) == PARM_DECL
3253 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (id
->src_fn
);
3255 if (debug_args
!= NULL
)
3257 for (i
= 0; i
< vec_safe_length (*debug_args
); i
+= 2)
3258 if ((**debug_args
)[i
] == DECL_ORIGIN (t
)
3259 && TREE_CODE ((**debug_args
)[i
+ 1]) == DEBUG_EXPR_DECL
)
3261 t
= (**debug_args
)[i
+ 1];
3262 stmt
->subcode
= GIMPLE_DEBUG_BIND
;
3263 gimple_debug_bind_set_value (stmt
, t
);
3268 if (gimple_debug_source_bind_p (stmt
))
3269 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt
),
3270 remap_gimple_op_r
, &wi
, NULL
);
3273 processing_debug_stmt
= 0;
3278 /* Process deferred debug stmts. In order to give values better odds
3279 of being successfully remapped, we delay the processing of debug
3280 stmts until all other stmts that might require remapping are
3284 copy_debug_stmts (copy_body_data
*id
)
3286 if (!id
->debug_stmts
.exists ())
3289 for (gdebug
*stmt
: id
->debug_stmts
)
3290 copy_debug_stmt (stmt
, id
);
3292 id
->debug_stmts
.release ();
3295 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3296 another function. */
3299 copy_tree_body (copy_body_data
*id
)
3301 tree fndecl
= id
->src_fn
;
3302 tree body
= DECL_SAVED_TREE (fndecl
);
3304 walk_tree (&body
, copy_tree_body_r
, id
, NULL
);
3309 /* Make a copy of the body of FN so that it can be inserted inline in
3310 another function. */
3313 copy_body (copy_body_data
*id
,
3314 basic_block entry_block_map
, basic_block exit_block_map
,
3315 basic_block new_entry
)
3317 tree fndecl
= id
->src_fn
;
3320 /* If this body has a CFG, walk CFG and copy. */
3321 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl
)));
3322 body
= copy_cfg_body (id
, entry_block_map
, exit_block_map
,
3324 copy_debug_stmts (id
);
3325 delete id
->killed_new_ssa_names
;
3326 id
->killed_new_ssa_names
= NULL
;
3331 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3332 defined in function FN, or of a data member thereof. */
3335 self_inlining_addr_expr (tree value
, tree fn
)
3339 if (TREE_CODE (value
) != ADDR_EXPR
)
3342 var
= get_base_address (TREE_OPERAND (value
, 0));
3344 return var
&& auto_var_in_fn_p (var
, fn
);
3347 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3348 lexical block and line number information from base_stmt, if given,
3349 or from the last stmt of the block otherwise. */
3352 insert_init_debug_bind (copy_body_data
*id
,
3353 basic_block bb
, tree var
, tree value
,
3357 gimple_stmt_iterator gsi
;
3360 if (!gimple_in_ssa_p (id
->src_cfun
))
3363 if (!opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
))
3366 tracked_var
= target_for_debug_bind (var
);
3372 gsi
= gsi_last_bb (bb
);
3373 if (!base_stmt
&& !gsi_end_p (gsi
))
3374 base_stmt
= gsi_stmt (gsi
);
3377 note
= gimple_build_debug_bind (tracked_var
,
3378 value
== error_mark_node
3379 ? NULL_TREE
: unshare_expr (value
),
3384 if (!gsi_end_p (gsi
))
3385 gsi_insert_after (&gsi
, note
, GSI_SAME_STMT
);
3387 gsi_insert_before (&gsi
, note
, GSI_SAME_STMT
);
3394 insert_init_stmt (copy_body_data
*id
, basic_block bb
, gimple
*init_stmt
)
3396 /* If VAR represents a zero-sized variable, it's possible that the
3397 assignment statement may result in no gimple statements. */
3400 gimple_stmt_iterator si
= gsi_last_bb (bb
);
3402 /* We can end up with init statements that store to a non-register
3403 from a rhs with a conversion. Handle that here by forcing the
3404 rhs into a temporary. gimple_regimplify_operands is not
3405 prepared to do this for us. */
3406 if (!is_gimple_debug (init_stmt
)
3407 && !is_gimple_reg (gimple_assign_lhs (init_stmt
))
3408 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt
)))
3409 && gimple_assign_rhs_class (init_stmt
) == GIMPLE_UNARY_RHS
)
3411 tree rhs
= build1 (gimple_assign_rhs_code (init_stmt
),
3412 TREE_TYPE (gimple_assign_lhs (init_stmt
)),
3413 gimple_assign_rhs1 (init_stmt
));
3414 rhs
= force_gimple_operand_gsi (&si
, rhs
, true, NULL_TREE
, false,
3416 gimple_assign_set_rhs_code (init_stmt
, TREE_CODE (rhs
));
3417 gimple_assign_set_rhs1 (init_stmt
, rhs
);
3419 gsi_insert_after (&si
, init_stmt
, GSI_NEW_STMT
);
3420 if (!is_gimple_debug (init_stmt
))
3422 gimple_regimplify_operands (init_stmt
, &si
);
3424 tree def
= gimple_assign_lhs (init_stmt
);
3425 insert_init_debug_bind (id
, bb
, def
, def
, init_stmt
);
3430 /* Deal with mismatched formal/actual parameters, in a rather brute-force way
3431 if need be (which should only be necessary for invalid programs). Attempt
3432 to convert VAL to TYPE and return the result if it is possible, just return
3433 a zero constant of the given type if it fails. */
3436 force_value_to_type (tree type
, tree value
)
3438 /* If we can match up types by promotion/demotion do so. */
3439 if (fold_convertible_p (type
, value
))
3440 return fold_convert (type
, value
);
3442 /* ??? For valid programs we should not end up here.
3443 Still if we end up with truly mismatched types here, fall back
3444 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3445 GIMPLE to the following passes. */
3446 if (TREE_CODE (value
) == WITH_SIZE_EXPR
)
3447 return error_mark_node
;
3448 else if (!is_gimple_reg_type (TREE_TYPE (value
))
3449 || TYPE_SIZE (type
) == TYPE_SIZE (TREE_TYPE (value
)))
3450 return fold_build1 (VIEW_CONVERT_EXPR
, type
, value
);
3452 return build_zero_cst (type
);
3455 /* Initialize parameter P with VALUE. If needed, produce init statement
3456 at the end of BB. When BB is NULL, we return init statement to be
3459 setup_one_parameter (copy_body_data
*id
, tree p
, tree value
, tree fn
,
3460 basic_block bb
, tree
*vars
)
3462 gimple
*init_stmt
= NULL
;
3464 tree def
= (gimple_in_ssa_p (cfun
)
3465 ? ssa_default_def (id
->src_cfun
, p
) : NULL
);
3467 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3468 here since the type of this decl must be visible to the calling
3470 var
= copy_decl_to_var (p
, id
);
3472 /* Declare this new variable. */
3473 DECL_CHAIN (var
) = *vars
;
3476 /* Make gimplifier happy about this variable. */
3477 DECL_SEEN_IN_BIND_EXPR_P (var
) = 1;
3479 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3480 we would not need to create a new variable here at all, if it
3481 weren't for debug info. Still, we can just use the argument
3483 if (TREE_READONLY (p
)
3484 && !TREE_ADDRESSABLE (p
)
3486 && !TREE_SIDE_EFFECTS (value
)
3489 /* We may produce non-gimple trees by adding NOPs or introduce invalid
3490 sharing when the value is not constant or DECL. And we need to make
3491 sure that it cannot be modified from another path in the callee. */
3492 if (((is_gimple_min_invariant (value
)
3493 /* When the parameter is used in a context that forces it to
3494 not be a GIMPLE register avoid substituting something that
3495 is not a decl there. */
3496 && ! DECL_NOT_GIMPLE_REG_P (p
))
3497 || (DECL_P (value
) && TREE_READONLY (value
))
3498 || (auto_var_in_fn_p (value
, id
->dst_fn
)
3499 && !TREE_ADDRESSABLE (value
)))
3500 && useless_type_conversion_p (TREE_TYPE (p
), TREE_TYPE (value
))
3501 /* We have to be very careful about ADDR_EXPR. Make sure
3502 the base variable isn't a local variable of the inlined
3503 function, e.g., when doing recursive inlining, direct or
3504 mutually-recursive or whatever, which is why we don't
3505 just test whether fn == current_function_decl. */
3506 && ! self_inlining_addr_expr (value
, fn
))
3508 insert_decl_map (id
, p
, value
);
3510 id
->debug_map
= new hash_map
<tree
, tree
>;
3511 id
->debug_map
->put (p
, var
);
3512 return insert_init_debug_bind (id
, bb
, var
, value
, NULL
);
3516 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3517 that way, when the PARM_DECL is encountered, it will be
3518 automatically replaced by the VAR_DECL. */
3519 insert_decl_map (id
, p
, var
);
3521 /* Even if P was TREE_READONLY, the new VAR should not be. In the original
3522 code, we would have constructed a temporary, and then the function body
3523 would have never changed the value of P. However, now, we will be
3524 constructing VAR directly. Therefore, it must not be TREE_READONLY. */
3525 TREE_READONLY (var
) = 0;
3529 && value
!= error_mark_node
3530 && !useless_type_conversion_p (TREE_TYPE (p
), TREE_TYPE (value
)))
3531 rhs
= force_value_to_type (TREE_TYPE (p
), value
);
3533 /* If there is no setup required and we are in SSA, take the easy route
3534 replacing all SSA names representing the function parameter by the
3535 SSA name passed to function.
3537 We need to construct map for the variable anyway as it might be used
3538 in different SSA names when parameter is set in function.
3540 Do replacement at -O0 for const arguments replaced by constant.
3541 This is important for builtin_constant_p and other construct requiring
3542 constant argument to be visible in inlined function body. */
3543 if (gimple_in_ssa_p (cfun
) && rhs
&& def
&& is_gimple_reg (p
)
3545 || (TREE_READONLY (p
)
3546 && is_gimple_min_invariant (rhs
)))
3547 && (TREE_CODE (rhs
) == SSA_NAME
3548 || is_gimple_min_invariant (rhs
))
3549 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
3551 insert_decl_map (id
, def
, rhs
);
3552 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
3555 /* If the value of argument is never used, don't care about initializing
3557 if (optimize
&& gimple_in_ssa_p (cfun
) && !def
&& is_gimple_reg (p
))
3559 gcc_assert (!value
|| !TREE_SIDE_EFFECTS (value
));
3560 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
3563 /* Initialize this VAR_DECL from the equivalent argument. Convert
3564 the argument to the proper type in case it was promoted. */
3567 if (rhs
== error_mark_node
)
3569 insert_decl_map (id
, p
, var
);
3570 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
3573 STRIP_USELESS_TYPE_CONVERSION (rhs
);
3575 /* If we are in SSA form properly remap the default definition
3576 or assign to a dummy SSA name if the parameter is unused and
3577 we are not optimizing. */
3578 if (gimple_in_ssa_p (cfun
) && is_gimple_reg (p
))
3582 def
= remap_ssa_name (def
, id
);
3583 init_stmt
= gimple_build_assign (def
, rhs
);
3584 SSA_NAME_IS_DEFAULT_DEF (def
) = 0;
3585 set_ssa_default_def (cfun
, var
, NULL
);
3589 def
= make_ssa_name (var
);
3590 init_stmt
= gimple_build_assign (def
, rhs
);
3593 else if (!is_empty_type (TREE_TYPE (var
)))
3594 init_stmt
= gimple_build_assign (var
, rhs
);
3596 if (bb
&& init_stmt
)
3597 insert_init_stmt (id
, bb
, init_stmt
);
3602 /* Generate code to initialize the parameters of the function at the
3603 top of the stack in ID from the GIMPLE_CALL STMT. */
3606 initialize_inlined_parameters (copy_body_data
*id
, gimple
*stmt
,
3607 tree fn
, basic_block bb
)
3612 tree vars
= NULL_TREE
;
3613 tree static_chain
= gimple_call_chain (stmt
);
3615 /* Figure out what the parameters are. */
3616 parms
= DECL_ARGUMENTS (fn
);
3618 /* Loop through the parameter declarations, replacing each with an
3619 equivalent VAR_DECL, appropriately initialized. */
3620 for (p
= parms
, i
= 0; p
; p
= DECL_CHAIN (p
), i
++)
3623 val
= i
< gimple_call_num_args (stmt
) ? gimple_call_arg (stmt
, i
) : NULL
;
3624 setup_one_parameter (id
, p
, val
, fn
, bb
, &vars
);
3626 /* After remapping parameters remap their types. This has to be done
3627 in a second loop over all parameters to appropriately remap
3628 variable sized arrays when the size is specified in a
3629 parameter following the array. */
3630 for (p
= parms
, i
= 0; p
; p
= DECL_CHAIN (p
), i
++)
3632 tree
*varp
= id
->decl_map
->get (p
);
3633 if (varp
&& VAR_P (*varp
))
3635 tree def
= (gimple_in_ssa_p (cfun
) && is_gimple_reg (p
)
3636 ? ssa_default_def (id
->src_cfun
, p
) : NULL
);
3638 TREE_TYPE (var
) = remap_type (TREE_TYPE (var
), id
);
3639 /* Also remap the default definition if it was remapped
3640 to the default definition of the parameter replacement
3641 by the parameter setup. */
3644 tree
*defp
= id
->decl_map
->get (def
);
3646 && TREE_CODE (*defp
) == SSA_NAME
3647 && SSA_NAME_VAR (*defp
) == var
)
3648 TREE_TYPE (*defp
) = TREE_TYPE (var
);
3653 /* Initialize the static chain. */
3654 p
= DECL_STRUCT_FUNCTION (fn
)->static_chain_decl
;
3655 gcc_assert (fn
!= current_function_decl
);
3658 /* No static chain? Seems like a bug in tree-nested.cc. */
3659 gcc_assert (static_chain
);
3661 setup_one_parameter (id
, p
, static_chain
, fn
, bb
, &vars
);
3664 declare_inline_vars (id
->block
, vars
);
3668 /* Declare a return variable to replace the RESULT_DECL for the
3669 function we are calling. An appropriate DECL_STMT is returned.
3670 The USE_STMT is filled to contain a use of the declaration to
3671 indicate the return value of the function.
3673 RETURN_SLOT, if non-null is place where to store the result. It
3674 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3675 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3677 The return value is a (possibly null) value that holds the result
3678 as seen by the caller. */
3681 declare_return_variable (copy_body_data
*id
, tree return_slot
, tree modify_dest
,
3682 basic_block entry_bb
)
3684 tree callee
= id
->src_fn
;
3685 tree result
= DECL_RESULT (callee
);
3686 tree callee_type
= TREE_TYPE (result
);
3690 /* Handle type-mismatches in the function declaration return type
3691 vs. the call expression. */
3693 caller_type
= TREE_TYPE (modify_dest
);
3694 else if (return_slot
)
3695 caller_type
= TREE_TYPE (return_slot
);
3696 else /* No LHS on the call. */
3697 caller_type
= TREE_TYPE (TREE_TYPE (callee
));
3699 /* We don't need to do anything for functions that don't return anything. */
3700 if (VOID_TYPE_P (callee_type
))
3703 /* If there was a return slot, then the return value is the
3704 dereferenced address of that object. */
3707 /* The front end shouldn't have used both return_slot and
3708 a modify expression. */
3709 gcc_assert (!modify_dest
);
3710 if (DECL_BY_REFERENCE (result
))
3712 tree return_slot_addr
= build_fold_addr_expr (return_slot
);
3713 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr
);
3715 /* We are going to construct *&return_slot and we can't do that
3716 for variables believed to be not addressable.
3718 FIXME: This check possibly can match, because values returned
3719 via return slot optimization are not believed to have address
3720 taken by alias analysis. */
3721 gcc_assert (TREE_CODE (return_slot
) != SSA_NAME
);
3722 var
= return_slot_addr
;
3723 mark_addressable (return_slot
);
3728 gcc_assert (TREE_CODE (var
) != SSA_NAME
);
3729 if (TREE_ADDRESSABLE (result
))
3730 mark_addressable (var
);
3732 if (DECL_NOT_GIMPLE_REG_P (result
)
3734 DECL_NOT_GIMPLE_REG_P (var
) = 1;
3736 if (!useless_type_conversion_p (callee_type
, caller_type
))
3737 var
= build1 (VIEW_CONVERT_EXPR
, callee_type
, var
);
3743 /* All types requiring non-trivial constructors should have been handled. */
3744 gcc_assert (!TREE_ADDRESSABLE (callee_type
));
3746 /* Attempt to avoid creating a new temporary variable. */
3748 && TREE_CODE (modify_dest
) != SSA_NAME
)
3750 bool use_it
= false;
3752 /* We can't use MODIFY_DEST if there's type promotion involved. */
3753 if (!useless_type_conversion_p (callee_type
, caller_type
))
3756 /* ??? If we're assigning to a variable sized type, then we must
3757 reuse the destination variable, because we've no good way to
3758 create variable sized temporaries at this point. */
3759 else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type
)))
3762 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3763 reuse it as the result of the call directly. Don't do this if
3764 it would promote MODIFY_DEST to addressable. */
3765 else if (TREE_ADDRESSABLE (result
))
3769 tree base_m
= get_base_address (modify_dest
);
3771 /* If the base isn't a decl, then it's a pointer, and we don't
3772 know where that's going to go. */
3773 if (!DECL_P (base_m
))
3775 else if (is_global_var (base_m
))
3777 else if (DECL_NOT_GIMPLE_REG_P (result
)
3778 && !DECL_NOT_GIMPLE_REG_P (base_m
))
3780 else if (!TREE_ADDRESSABLE (base_m
))
3792 gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type
)));
3794 var
= copy_result_decl_to_var (result
, id
);
3795 DECL_SEEN_IN_BIND_EXPR_P (var
) = 1;
3797 /* Do not have the rest of GCC warn about this variable as it should
3798 not be visible to the user. */
3799 suppress_warning (var
/* OPT_Wuninitialized? */);
3801 declare_inline_vars (id
->block
, var
);
3803 /* Build the use expr. If the return type of the function was
3804 promoted, convert it back to the expected type. */
3806 if (!useless_type_conversion_p (caller_type
, TREE_TYPE (var
)))
3808 /* If we can match up types by promotion/demotion do so. */
3809 if (fold_convertible_p (caller_type
, var
))
3810 use
= fold_convert (caller_type
, var
);
3813 /* ??? For valid programs we should not end up here.
3814 Still if we end up with truly mismatched types here, fall back
3815 to using a MEM_REF to not leak invalid GIMPLE to the following
3817 /* Prevent var from being written into SSA form. */
3818 if (is_gimple_reg_type (TREE_TYPE (var
)))
3819 DECL_NOT_GIMPLE_REG_P (var
) = true;
3820 use
= fold_build2 (MEM_REF
, caller_type
,
3821 build_fold_addr_expr (var
),
3822 build_int_cst (ptr_type_node
, 0));
3826 STRIP_USELESS_TYPE_CONVERSION (use
);
3828 if (DECL_BY_REFERENCE (result
))
3830 TREE_ADDRESSABLE (var
) = 1;
3831 var
= build_fold_addr_expr (var
);
3835 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3836 way, when the RESULT_DECL is encountered, it will be
3837 automatically replaced by the VAR_DECL.
3839 When returning by reference, ensure that RESULT_DECL remaps to
3841 if (DECL_BY_REFERENCE (result
)
3842 && !is_gimple_val (var
))
3844 tree temp
= create_tmp_var (TREE_TYPE (result
), "retvalptr");
3845 insert_decl_map (id
, result
, temp
);
3846 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3847 it's default_def SSA_NAME. */
3848 if (gimple_in_ssa_p (id
->src_cfun
)
3849 && is_gimple_reg (result
))
3850 if (tree default_def
= ssa_default_def (id
->src_cfun
, result
))
3852 temp
= make_ssa_name (temp
);
3853 insert_decl_map (id
, default_def
, temp
);
3855 insert_init_stmt (id
, entry_bb
, gimple_build_assign (temp
, var
));
3858 insert_decl_map (id
, result
, var
);
3860 /* Remember this so we can ignore it in remap_decls. */
3865 /* Determine if the function can be copied. If so return NULL. If
3866 not return a string describng the reason for failure. */
3869 copy_forbidden (struct function
*fun
)
3871 const char *reason
= fun
->cannot_be_copied_reason
;
3873 /* Only examine the function once. */
3874 if (fun
->cannot_be_copied_set
)
3877 /* We cannot copy a function that receives a non-local goto
3878 because we cannot remap the destination label used in the
3879 function that is performing the non-local goto. */
3880 /* ??? Actually, this should be possible, if we work at it.
3881 No doubt there's just a handful of places that simply
3882 assume it doesn't happen and don't substitute properly. */
3883 if (fun
->has_nonlocal_label
)
3885 reason
= G_("function %q+F can never be copied "
3886 "because it receives a non-local goto");
3890 if (fun
->has_forced_label_in_static
)
3892 reason
= G_("function %q+F can never be copied because it saves "
3893 "address of local label in a static variable");
3898 fun
->cannot_be_copied_reason
= reason
;
3899 fun
->cannot_be_copied_set
= true;
3904 static const char *inline_forbidden_reason
;
3906 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3907 iff a function cannot be inlined. Also sets the reason why. */
3910 inline_forbidden_p_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3911 struct walk_stmt_info
*wip
)
3913 tree fn
= (tree
) wip
->info
;
3915 gimple
*stmt
= gsi_stmt (*gsi
);
3917 switch (gimple_code (stmt
))
3920 /* Refuse to inline alloca call unless user explicitly forced so as
3921 this may change program's memory overhead drastically when the
3922 function using alloca is called in loop. In GCC present in
3923 SPEC2000 inlining into schedule_block cause it to require 2GB of
3924 RAM instead of 256MB. Don't do so for alloca calls emitted for
3925 VLA objects as those can't cause unbounded growth (they're always
3926 wrapped inside stack_save/stack_restore regions. */
3927 if (gimple_maybe_alloca_call_p (stmt
)
3928 && !gimple_call_alloca_for_var_p (as_a
<gcall
*> (stmt
))
3929 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
)))
3931 inline_forbidden_reason
3932 = G_("function %q+F can never be inlined because it uses "
3933 "alloca (override using the always_inline attribute)");
3934 *handled_ops_p
= true;
3938 t
= gimple_call_fndecl (stmt
);
3942 /* We cannot inline functions that call setjmp. */
3943 if (setjmp_call_p (t
))
3945 inline_forbidden_reason
3946 = G_("function %q+F can never be inlined because it uses setjmp");
3947 *handled_ops_p
= true;
3951 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
)
3952 switch (DECL_FUNCTION_CODE (t
))
3954 /* We cannot inline functions that take a variable number of
3956 case BUILT_IN_VA_START
:
3957 case BUILT_IN_NEXT_ARG
:
3958 case BUILT_IN_VA_END
:
3959 inline_forbidden_reason
3960 = G_("function %q+F can never be inlined because it "
3961 "uses variable argument lists");
3962 *handled_ops_p
= true;
3965 case BUILT_IN_LONGJMP
:
3966 /* We can't inline functions that call __builtin_longjmp at
3967 all. The non-local goto machinery really requires the
3968 destination be in a different function. If we allow the
3969 function calling __builtin_longjmp to be inlined into the
3970 function calling __builtin_setjmp, Things will Go Awry. */
3971 inline_forbidden_reason
3972 = G_("function %q+F can never be inlined because "
3973 "it uses setjmp-longjmp exception handling");
3974 *handled_ops_p
= true;
3977 case BUILT_IN_NONLOCAL_GOTO
:
3979 inline_forbidden_reason
3980 = G_("function %q+F can never be inlined because "
3981 "it uses non-local goto");
3982 *handled_ops_p
= true;
3985 case BUILT_IN_RETURN
:
3986 case BUILT_IN_APPLY_ARGS
:
3987 /* If a __builtin_apply_args caller would be inlined,
3988 it would be saving arguments of the function it has
3989 been inlined into. Similarly __builtin_return would
3990 return from the function the inline has been inlined into. */
3991 inline_forbidden_reason
3992 = G_("function %q+F can never be inlined because "
3993 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3994 *handled_ops_p
= true;
4003 t
= gimple_goto_dest (stmt
);
4005 /* We will not inline a function which uses computed goto. The
4006 addresses of its local labels, which may be tucked into
4007 global storage, are of course not constant across
4008 instantiations, which causes unexpected behavior. */
4009 if (TREE_CODE (t
) != LABEL_DECL
)
4011 inline_forbidden_reason
4012 = G_("function %q+F can never be inlined "
4013 "because it contains a computed goto");
4014 *handled_ops_p
= true;
4023 *handled_ops_p
= false;
4027 /* Return true if FNDECL is a function that cannot be inlined into
4031 inline_forbidden_p (tree fndecl
)
4033 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
4034 struct walk_stmt_info wi
;
4036 bool forbidden_p
= false;
4038 /* First check for shared reasons not to copy the code. */
4039 inline_forbidden_reason
= copy_forbidden (fun
);
4040 if (inline_forbidden_reason
!= NULL
)
4043 /* Next, walk the statements of the function looking for
4044 constraucts we can't handle, or are non-optimal for inlining. */
4045 hash_set
<tree
> visited_nodes
;
4046 memset (&wi
, 0, sizeof (wi
));
4047 wi
.info
= (void *) fndecl
;
4048 wi
.pset
= &visited_nodes
;
4050 /* We cannot inline a function with a variable-sized parameter because we
4051 cannot materialize a temporary of such a type in the caller if need be.
4052 Note that the return case is not symmetrical because we can guarantee
4053 that a temporary is not needed by means of CALL_EXPR_RETURN_SLOT_OPT. */
4054 for (tree parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
4055 if (!poly_int_tree_p (DECL_SIZE (parm
)))
4057 inline_forbidden_reason
4058 = G_("function %q+F can never be inlined because "
4059 "it has a VLA argument");
4063 FOR_EACH_BB_FN (bb
, fun
)
4066 gimple_seq seq
= bb_seq (bb
);
4067 ret
= walk_gimple_seq (seq
, inline_forbidden_p_stmt
, NULL
, &wi
);
4068 forbidden_p
= (ret
!= NULL
);
4076 /* Return false if the function FNDECL cannot be inlined on account of its
4077 attributes, true otherwise. */
4079 function_attribute_inlinable_p (const_tree fndecl
)
4081 if (targetm
.attribute_table
)
4085 for (a
= DECL_ATTRIBUTES (fndecl
); a
; a
= TREE_CHAIN (a
))
4087 const_tree name
= get_attribute_name (a
);
4090 for (i
= 0; targetm
.attribute_table
[i
].name
!= NULL
; i
++)
4091 if (is_attribute_p (targetm
.attribute_table
[i
].name
, name
))
4092 return targetm
.function_attribute_inlinable_p (fndecl
);
4099 /* Returns nonzero if FN is a function that does not have any
4100 fundamental inline blocking properties. */
4103 tree_inlinable_function_p (tree fn
)
4105 bool inlinable
= true;
4109 /* If we've already decided this function shouldn't be inlined,
4110 there's no need to check again. */
4111 if (DECL_UNINLINABLE (fn
))
4114 /* We only warn for functions declared `inline' by the user. */
4115 do_warning
= (opt_for_fn (fn
, warn_inline
)
4116 && DECL_DECLARED_INLINE_P (fn
)
4117 && !DECL_NO_INLINE_WARNING_P (fn
)
4118 && !DECL_IN_SYSTEM_HEADER (fn
));
4120 always_inline
= lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
));
4123 && always_inline
== NULL
)
4126 warning (OPT_Winline
, "function %q+F can never be inlined because it "
4127 "is suppressed using %<-fno-inline%>", fn
);
4131 else if (!function_attribute_inlinable_p (fn
))
4134 warning (OPT_Winline
, "function %q+F can never be inlined because it "
4135 "uses attributes conflicting with inlining", fn
);
4139 else if (inline_forbidden_p (fn
))
4141 /* See if we should warn about uninlinable functions. Previously,
4142 some of these warnings would be issued while trying to expand
4143 the function inline, but that would cause multiple warnings
4144 about functions that would for example call alloca. But since
4145 this a property of the function, just one warning is enough.
4146 As a bonus we can now give more details about the reason why a
4147 function is not inlinable. */
4149 error (inline_forbidden_reason
, fn
);
4150 else if (do_warning
)
4151 warning (OPT_Winline
, inline_forbidden_reason
, fn
);
4156 /* Squirrel away the result so that we don't have to check again. */
4157 DECL_UNINLINABLE (fn
) = !inlinable
;
4162 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
4163 word size and take possible memcpy call into account and return
4164 cost based on whether optimizing for size or speed according to SPEED_P. */
4167 estimate_move_cost (tree type
, bool ARG_UNUSED (speed_p
))
4171 gcc_assert (!VOID_TYPE_P (type
));
4173 if (TREE_CODE (type
) == VECTOR_TYPE
)
4175 scalar_mode inner
= SCALAR_TYPE_MODE (TREE_TYPE (type
));
4176 machine_mode simd
= targetm
.vectorize
.preferred_simd_mode (inner
);
4178 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type
)));
4179 int simd_mode_size
= estimated_poly_value (GET_MODE_SIZE (simd
));
4180 return ((orig_mode_size
+ simd_mode_size
- 1)
4184 size
= int_size_in_bytes (type
);
4186 if (size
< 0 || size
> MOVE_MAX_PIECES
* MOVE_RATIO (speed_p
))
4187 /* Cost of a memcpy call, 3 arguments and the call. */
4190 return ((size
+ MOVE_MAX_PIECES
- 1) / MOVE_MAX_PIECES
);
4193 /* Returns cost of operation CODE, according to WEIGHTS */
4196 estimate_operator_cost (enum tree_code code
, eni_weights
*weights
,
4197 tree op1 ATTRIBUTE_UNUSED
, tree op2
)
4201 /* These are "free" conversions, or their presumed cost
4202 is folded into other operations. */
4207 case VIEW_CONVERT_EXPR
:
4210 /* Assign cost of 1 to usual operations.
4211 ??? We may consider mapping RTL costs to this. */
4217 case POINTER_PLUS_EXPR
:
4218 case POINTER_DIFF_EXPR
:
4221 case MULT_HIGHPART_EXPR
:
4223 case ADDR_SPACE_CONVERT_EXPR
:
4224 case FIXED_CONVERT_EXPR
:
4225 case FIX_TRUNC_EXPR
:
4244 case TRUTH_ANDIF_EXPR
:
4245 case TRUTH_ORIF_EXPR
:
4246 case TRUTH_AND_EXPR
:
4248 case TRUTH_XOR_EXPR
:
4249 case TRUTH_NOT_EXPR
:
4258 case UNORDERED_EXPR
:
4269 case PREDECREMENT_EXPR
:
4270 case PREINCREMENT_EXPR
:
4271 case POSTDECREMENT_EXPR
:
4272 case POSTINCREMENT_EXPR
:
4274 case REALIGN_LOAD_EXPR
:
4276 case WIDEN_PLUS_EXPR
:
4277 case WIDEN_MINUS_EXPR
:
4278 case WIDEN_SUM_EXPR
:
4279 case WIDEN_MULT_EXPR
:
4282 case WIDEN_MULT_PLUS_EXPR
:
4283 case WIDEN_MULT_MINUS_EXPR
:
4284 case WIDEN_LSHIFT_EXPR
:
4286 case VEC_WIDEN_PLUS_HI_EXPR
:
4287 case VEC_WIDEN_PLUS_LO_EXPR
:
4288 case VEC_WIDEN_MINUS_HI_EXPR
:
4289 case VEC_WIDEN_MINUS_LO_EXPR
:
4290 case VEC_WIDEN_MULT_HI_EXPR
:
4291 case VEC_WIDEN_MULT_LO_EXPR
:
4292 case VEC_WIDEN_MULT_EVEN_EXPR
:
4293 case VEC_WIDEN_MULT_ODD_EXPR
:
4294 case VEC_UNPACK_HI_EXPR
:
4295 case VEC_UNPACK_LO_EXPR
:
4296 case VEC_UNPACK_FLOAT_HI_EXPR
:
4297 case VEC_UNPACK_FLOAT_LO_EXPR
:
4298 case VEC_UNPACK_FIX_TRUNC_HI_EXPR
:
4299 case VEC_UNPACK_FIX_TRUNC_LO_EXPR
:
4300 case VEC_PACK_TRUNC_EXPR
:
4301 case VEC_PACK_SAT_EXPR
:
4302 case VEC_PACK_FIX_TRUNC_EXPR
:
4303 case VEC_PACK_FLOAT_EXPR
:
4304 case VEC_WIDEN_LSHIFT_HI_EXPR
:
4305 case VEC_WIDEN_LSHIFT_LO_EXPR
:
4306 case VEC_DUPLICATE_EXPR
:
4307 case VEC_SERIES_EXPR
:
4311 /* Few special cases of expensive operations. This is useful
4312 to avoid inlining on functions having too many of these. */
4313 case TRUNC_DIV_EXPR
:
4315 case FLOOR_DIV_EXPR
:
4316 case ROUND_DIV_EXPR
:
4317 case EXACT_DIV_EXPR
:
4318 case TRUNC_MOD_EXPR
:
4320 case FLOOR_MOD_EXPR
:
4321 case ROUND_MOD_EXPR
:
4323 if (TREE_CODE (op2
) != INTEGER_CST
)
4324 return weights
->div_mod_cost
;
4327 /* Bit-field insertion needs several shift and mask operations. */
4328 case BIT_INSERT_EXPR
:
4332 /* We expect a copy assignment with no operator. */
4333 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_SINGLE_RHS
);
4339 /* Estimate number of instructions that will be created by expanding
4340 the statements in the statement sequence STMTS.
4341 WEIGHTS contains weights attributed to various constructs. */
4344 estimate_num_insns_seq (gimple_seq stmts
, eni_weights
*weights
)
4347 gimple_stmt_iterator gsi
;
4350 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4351 cost
+= estimate_num_insns (gsi_stmt (gsi
), weights
);
4357 /* Estimate number of instructions that will be created by expanding STMT.
4358 WEIGHTS contains weights attributed to various constructs. */
4361 estimate_num_insns (gimple
*stmt
, eni_weights
*weights
)
4364 enum gimple_code code
= gimple_code (stmt
);
4371 /* Try to estimate the cost of assignments. We have three cases to
4373 1) Simple assignments to registers;
4374 2) Stores to things that must live in memory. This includes
4375 "normal" stores to scalars, but also assignments of large
4376 structures, or constructors of big arrays;
4378 Let us look at the first two cases, assuming we have "a = b + C":
4379 <GIMPLE_ASSIGN <var_decl "a">
4380 <plus_expr <var_decl "b"> <constant C>>
4381 If "a" is a GIMPLE register, the assignment to it is free on almost
4382 any target, because "a" usually ends up in a real register. Hence
4383 the only cost of this expression comes from the PLUS_EXPR, and we
4384 can ignore the GIMPLE_ASSIGN.
4385 If "a" is not a GIMPLE register, the assignment to "a" will most
4386 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4387 of moving something into "a", which we compute using the function
4388 estimate_move_cost. */
4389 if (gimple_clobber_p (stmt
))
4390 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4392 lhs
= gimple_assign_lhs (stmt
);
4393 rhs
= gimple_assign_rhs1 (stmt
);
4397 /* Account for the cost of moving to / from memory. */
4398 if (gimple_store_p (stmt
))
4399 cost
+= estimate_move_cost (TREE_TYPE (lhs
), weights
->time_based
);
4400 if (gimple_assign_load_p (stmt
))
4401 cost
+= estimate_move_cost (TREE_TYPE (rhs
), weights
->time_based
);
4403 cost
+= estimate_operator_cost (gimple_assign_rhs_code (stmt
), weights
,
4404 gimple_assign_rhs1 (stmt
),
4405 get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
4406 == GIMPLE_BINARY_RHS
4407 ? gimple_assign_rhs2 (stmt
) : NULL
);
4411 cost
= 1 + estimate_operator_cost (gimple_cond_code (stmt
), weights
,
4412 gimple_op (stmt
, 0),
4413 gimple_op (stmt
, 1));
4418 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
4419 /* Take into account cost of the switch + guess 2 conditional jumps for
4422 TODO: once the switch expansion logic is sufficiently separated, we can
4423 do better job on estimating cost of the switch. */
4424 if (weights
->time_based
)
4425 cost
= floor_log2 (gimple_switch_num_labels (switch_stmt
)) * 2;
4427 cost
= gimple_switch_num_labels (switch_stmt
) * 2;
4435 if (gimple_call_internal_p (stmt
))
4437 else if ((decl
= gimple_call_fndecl (stmt
))
4438 && fndecl_built_in_p (decl
))
4440 /* Do not special case builtins where we see the body.
4441 This just confuse inliner. */
4442 struct cgraph_node
*node
;
4443 if ((node
= cgraph_node::get (decl
))
4444 && node
->definition
)
4446 /* For buitins that are likely expanded to nothing or
4447 inlined do not account operand costs. */
4448 else if (is_simple_builtin (decl
))
4450 else if (is_inexpensive_builtin (decl
))
4451 return weights
->target_builtin_call_cost
;
4452 else if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
4454 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4455 specialize the cheap expansion we do here.
4456 ??? This asks for a more general solution. */
4457 switch (DECL_FUNCTION_CODE (decl
))
4462 if (TREE_CODE (gimple_call_arg (stmt
, 1)) == REAL_CST
4464 (&TREE_REAL_CST (gimple_call_arg (stmt
, 1)),
4466 return estimate_operator_cost
4467 (MULT_EXPR
, weights
, gimple_call_arg (stmt
, 0),
4468 gimple_call_arg (stmt
, 0));
4477 cost
= decl
? weights
->call_cost
: weights
->indirect_call_cost
;
4478 if (gimple_call_lhs (stmt
))
4479 cost
+= estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt
)),
4480 weights
->time_based
);
4481 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4483 tree arg
= gimple_call_arg (stmt
, i
);
4484 cost
+= estimate_move_cost (TREE_TYPE (arg
),
4485 weights
->time_based
);
4491 return weights
->return_cost
;
4497 case GIMPLE_PREDICT
:
4503 int count
= asm_str_count (gimple_asm_string (as_a
<gasm
*> (stmt
)));
4504 /* 1000 means infinity. This avoids overflows later
4505 with very long asm statements. */
4508 /* If this asm is asm inline, count anything as minimum size. */
4509 if (gimple_asm_inline_p (as_a
<gasm
*> (stmt
)))
4510 count
= MIN (1, count
);
4511 return MAX (1, count
);
4515 /* This is either going to be an external function call with one
4516 argument, or two register copy statements plus a goto. */
4519 case GIMPLE_EH_DISPATCH
:
4520 /* ??? This is going to turn into a switch statement. Ideally
4521 we'd have a look at the eh region and estimate the number of
4526 return estimate_num_insns_seq (
4527 gimple_bind_body (as_a
<gbind
*> (stmt
)),
4530 case GIMPLE_EH_FILTER
:
4531 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt
), weights
);
4534 return estimate_num_insns_seq (gimple_catch_handler (
4535 as_a
<gcatch
*> (stmt
)),
4539 return (estimate_num_insns_seq (gimple_try_eval (stmt
), weights
)
4540 + estimate_num_insns_seq (gimple_try_cleanup (stmt
), weights
));
4542 /* OMP directives are generally very expensive. */
4544 case GIMPLE_OMP_RETURN
:
4545 case GIMPLE_OMP_SECTIONS_SWITCH
:
4546 case GIMPLE_OMP_ATOMIC_STORE
:
4547 case GIMPLE_OMP_CONTINUE
:
4548 /* ...except these, which are cheap. */
4551 case GIMPLE_OMP_ATOMIC_LOAD
:
4552 return weights
->omp_cost
;
4554 case GIMPLE_OMP_FOR
:
4555 return (weights
->omp_cost
4556 + estimate_num_insns_seq (gimple_omp_body (stmt
), weights
)
4557 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt
), weights
));
4559 case GIMPLE_OMP_PARALLEL
:
4560 case GIMPLE_OMP_TASK
:
4561 case GIMPLE_OMP_CRITICAL
:
4562 case GIMPLE_OMP_MASTER
:
4563 case GIMPLE_OMP_MASKED
:
4564 case GIMPLE_OMP_SCOPE
:
4565 case GIMPLE_OMP_TASKGROUP
:
4566 case GIMPLE_OMP_ORDERED
:
4567 case GIMPLE_OMP_SCAN
:
4568 case GIMPLE_OMP_SECTION
:
4569 case GIMPLE_OMP_SECTIONS
:
4570 case GIMPLE_OMP_SINGLE
:
4571 case GIMPLE_OMP_TARGET
:
4572 case GIMPLE_OMP_TEAMS
:
4573 return (weights
->omp_cost
4574 + estimate_num_insns_seq (gimple_omp_body (stmt
), weights
));
4576 case GIMPLE_TRANSACTION
:
4577 return (weights
->tm_cost
4578 + estimate_num_insns_seq (gimple_transaction_body (
4579 as_a
<gtransaction
*> (stmt
)),
4589 /* Estimate number of instructions that will be created by expanding
4590 function FNDECL. WEIGHTS contains weights attributed to various
4594 estimate_num_insns_fn (tree fndecl
, eni_weights
*weights
)
4596 struct function
*my_function
= DECL_STRUCT_FUNCTION (fndecl
);
4597 gimple_stmt_iterator bsi
;
4601 gcc_assert (my_function
&& my_function
->cfg
);
4602 FOR_EACH_BB_FN (bb
, my_function
)
4604 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
4605 n
+= estimate_num_insns (gsi_stmt (bsi
), weights
);
4612 /* Initializes weights used by estimate_num_insns. */
4615 init_inline_once (void)
4617 eni_size_weights
.call_cost
= 1;
4618 eni_size_weights
.indirect_call_cost
= 3;
4619 eni_size_weights
.target_builtin_call_cost
= 1;
4620 eni_size_weights
.div_mod_cost
= 1;
4621 eni_size_weights
.omp_cost
= 40;
4622 eni_size_weights
.tm_cost
= 10;
4623 eni_size_weights
.time_based
= false;
4624 eni_size_weights
.return_cost
= 1;
4626 /* Estimating time for call is difficult, since we have no idea what the
4627 called function does. In the current uses of eni_time_weights,
4628 underestimating the cost does less harm than overestimating it, so
4629 we choose a rather small value here. */
4630 eni_time_weights
.call_cost
= 10;
4631 eni_time_weights
.indirect_call_cost
= 15;
4632 eni_time_weights
.target_builtin_call_cost
= 1;
4633 eni_time_weights
.div_mod_cost
= 10;
4634 eni_time_weights
.omp_cost
= 40;
4635 eni_time_weights
.tm_cost
= 40;
4636 eni_time_weights
.time_based
= true;
4637 eni_time_weights
.return_cost
= 2;
4641 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4644 prepend_lexical_block (tree current_block
, tree new_block
)
4646 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (current_block
);
4647 BLOCK_SUBBLOCKS (current_block
) = new_block
;
4648 BLOCK_SUPERCONTEXT (new_block
) = current_block
;
4651 /* Add local variables from CALLEE to CALLER. */
4654 add_local_variables (struct function
*callee
, struct function
*caller
,
4660 FOR_EACH_LOCAL_DECL (callee
, ix
, var
)
4661 if (!can_be_nonlocal (var
, id
))
4663 tree new_var
= remap_decl (var
, id
);
4665 /* Remap debug-expressions. */
4667 && DECL_HAS_DEBUG_EXPR_P (var
)
4670 tree tem
= DECL_DEBUG_EXPR (var
);
4671 bool old_regimplify
= id
->regimplify
;
4672 id
->remapping_type_depth
++;
4673 walk_tree (&tem
, copy_tree_body_r
, id
, NULL
);
4674 id
->remapping_type_depth
--;
4675 id
->regimplify
= old_regimplify
;
4676 SET_DECL_DEBUG_EXPR (new_var
, tem
);
4677 DECL_HAS_DEBUG_EXPR_P (new_var
) = 1;
4679 add_local_decl (caller
, new_var
);
4683 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4684 have brought in or introduced any debug stmts for SRCVAR. */
4687 reset_debug_binding (copy_body_data
*id
, tree srcvar
, gimple_seq
*bindings
)
4689 tree
*remappedvarp
= id
->decl_map
->get (srcvar
);
4694 if (!VAR_P (*remappedvarp
))
4697 if (*remappedvarp
== id
->retvar
)
4700 tree tvar
= target_for_debug_bind (*remappedvarp
);
4704 gdebug
*stmt
= gimple_build_debug_bind (tvar
, NULL_TREE
,
4706 gimple_seq_add_stmt (bindings
, stmt
);
4709 /* For each inlined variable for which we may have debug bind stmts,
4710 add before GSI a final debug stmt resetting it, marking the end of
4711 its life, so that var-tracking knows it doesn't have to compute
4712 further locations for it. */
4715 reset_debug_bindings (copy_body_data
*id
, gimple_stmt_iterator gsi
)
4719 gimple_seq bindings
= NULL
;
4721 if (!gimple_in_ssa_p (id
->src_cfun
))
4724 if (!opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
))
4727 for (var
= DECL_ARGUMENTS (id
->src_fn
);
4728 var
; var
= DECL_CHAIN (var
))
4729 reset_debug_binding (id
, var
, &bindings
);
4731 FOR_EACH_LOCAL_DECL (id
->src_cfun
, ix
, var
)
4732 reset_debug_binding (id
, var
, &bindings
);
4734 gsi_insert_seq_before_without_update (&gsi
, bindings
, GSI_SAME_STMT
);
4737 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4740 expand_call_inline (basic_block bb
, gimple
*stmt
, copy_body_data
*id
,
4745 hash_map
<tree
, tree
> *dst
;
4746 hash_map
<tree
, tree
> *st
= NULL
;
4749 struct cgraph_edge
*cg_edge
;
4750 cgraph_inline_failed_t reason
;
4751 basic_block return_block
;
4753 gimple_stmt_iterator gsi
, stmt_gsi
;
4754 bool successfully_inlined
= false;
4755 bool purge_dead_abnormal_edges
;
4757 unsigned int prop_mask
, src_properties
;
4758 struct function
*dst_cfun
;
4761 gimple
*simtenter_stmt
= NULL
;
4762 vec
<tree
> *simtvars_save
;
4764 /* The gimplifier uses input_location in too many places, such as
4765 internal_get_tmp_var (). */
4766 location_t saved_location
= input_location
;
4767 input_location
= gimple_location (stmt
);
4769 /* From here on, we're only interested in CALL_EXPRs. */
4770 call_stmt
= dyn_cast
<gcall
*> (stmt
);
4774 cg_edge
= id
->dst_node
->get_edge (stmt
);
4775 gcc_checking_assert (cg_edge
);
4776 /* First, see if we can figure out what function is being called.
4777 If we cannot, then there is no hope of inlining the function. */
4778 if (cg_edge
->indirect_unknown_callee
)
4780 fn
= cg_edge
->callee
->decl
;
4781 gcc_checking_assert (fn
);
4783 /* If FN is a declaration of a function in a nested scope that was
4784 globally declared inline, we don't set its DECL_INITIAL.
4785 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4786 C++ front-end uses it for cdtors to refer to their internal
4787 declarations, that are not real functions. Fortunately those
4788 don't have trees to be saved, so we can tell by checking their
4790 if (!DECL_INITIAL (fn
)
4791 && DECL_ABSTRACT_ORIGIN (fn
)
4792 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn
)))
4793 fn
= DECL_ABSTRACT_ORIGIN (fn
);
4795 /* Don't try to inline functions that are not well-suited to inlining. */
4796 if (cg_edge
->inline_failed
)
4798 reason
= cg_edge
->inline_failed
;
4799 /* If this call was originally indirect, we do not want to emit any
4800 inlining related warnings or sorry messages because there are no
4801 guarantees regarding those. */
4802 if (cg_edge
->indirect_inlining_edge
)
4805 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
))
4806 /* For extern inline functions that get redefined we always
4807 silently ignored always_inline flag. Better behavior would
4808 be to be able to keep both bodies and use extern inline body
4809 for inlining, but we can't do that because frontends overwrite
4811 && !cg_edge
->callee
->redefined_extern_inline
4812 /* During early inline pass, report only when optimization is
4814 && (symtab
->global_info_ready
4816 || cgraph_inline_failed_type (reason
) == CIF_FINAL_ERROR
)
4817 /* PR 20090218-1_0.c. Body can be provided by another module. */
4818 && (reason
!= CIF_BODY_NOT_AVAILABLE
|| !flag_generate_lto
))
4820 error ("inlining failed in call to %<always_inline%> %q+F: %s", fn
,
4821 cgraph_inline_failed_string (reason
));
4822 if (gimple_location (stmt
) != UNKNOWN_LOCATION
)
4823 inform (gimple_location (stmt
), "called from here");
4824 else if (DECL_SOURCE_LOCATION (cfun
->decl
) != UNKNOWN_LOCATION
)
4825 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
4826 "called from this function");
4828 else if (opt_for_fn (fn
, warn_inline
)
4829 && DECL_DECLARED_INLINE_P (fn
)
4830 && !DECL_NO_INLINE_WARNING_P (fn
)
4831 && !DECL_IN_SYSTEM_HEADER (fn
)
4832 && reason
!= CIF_UNSPECIFIED
4833 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn
))
4834 /* Do not warn about not inlined recursive calls. */
4835 && !cg_edge
->recursive_p ()
4836 /* Avoid warnings during early inline pass. */
4837 && symtab
->global_info_ready
)
4839 auto_diagnostic_group d
;
4840 if (warning (OPT_Winline
, "inlining failed in call to %q+F: %s",
4841 fn
, _(cgraph_inline_failed_string (reason
))))
4843 if (gimple_location (stmt
) != UNKNOWN_LOCATION
)
4844 inform (gimple_location (stmt
), "called from here");
4845 else if (DECL_SOURCE_LOCATION (cfun
->decl
) != UNKNOWN_LOCATION
)
4846 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
4847 "called from this function");
4852 id
->src_node
= cg_edge
->callee
;
4854 /* If callee is thunk, all we need is to adjust the THIS pointer
4855 and redirect to function being thunked. */
4856 if (id
->src_node
->thunk
)
4859 tree virtual_offset
= NULL
;
4860 profile_count count
= cg_edge
->count
;
4862 gimple_stmt_iterator iter
= gsi_for_stmt (stmt
);
4863 thunk_info
*info
= thunk_info::get (id
->src_node
);
4865 cgraph_edge::remove (cg_edge
);
4866 edge
= id
->src_node
->callees
->clone (id
->dst_node
, call_stmt
,
4868 profile_count::one (),
4869 profile_count::one (),
4871 edge
->count
= count
;
4872 if (info
->virtual_offset_p
)
4873 virtual_offset
= size_int (info
->virtual_value
);
4874 op
= create_tmp_reg_fn (cfun
, TREE_TYPE (gimple_call_arg (stmt
, 0)),
4876 gsi_insert_before (&iter
, gimple_build_assign (op
,
4877 gimple_call_arg (stmt
, 0)),
4879 gcc_assert (info
->this_adjusting
);
4880 op
= thunk_adjust (&iter
, op
, 1, info
->fixed_offset
,
4881 virtual_offset
, info
->indirect_offset
);
4883 gimple_call_set_arg (stmt
, 0, op
);
4884 gimple_call_set_fndecl (stmt
, edge
->callee
->decl
);
4886 id
->src_node
->remove ();
4887 successfully_inlined
= expand_call_inline (bb
, stmt
, id
, to_purge
);
4888 maybe_remove_unused_call_args (cfun
, stmt
);
4889 /* This used to return true even though we do fail to inline in
4890 some cases. See PR98525. */
4893 fn
= cg_edge
->callee
->decl
;
4894 cg_edge
->callee
->get_untransformed_body ();
4896 if (flag_checking
&& cg_edge
->callee
->decl
!= id
->dst_node
->decl
)
4897 cg_edge
->callee
->verify ();
4899 /* We will be inlining this callee. */
4900 id
->eh_lp_nr
= lookup_stmt_eh_lp (stmt
);
4902 /* Update the callers EH personality. */
4903 if (DECL_FUNCTION_PERSONALITY (fn
))
4904 DECL_FUNCTION_PERSONALITY (cg_edge
->caller
->decl
)
4905 = DECL_FUNCTION_PERSONALITY (fn
);
4907 /* Split the block before the GIMPLE_CALL. */
4908 stmt_gsi
= gsi_for_stmt (stmt
);
4909 gsi_prev (&stmt_gsi
);
4910 e
= split_block (bb
, gsi_end_p (stmt_gsi
) ? NULL
: gsi_stmt (stmt_gsi
));
4912 return_block
= e
->dest
;
4915 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4916 been the source of abnormal edges. In this case, schedule
4917 the removal of dead abnormal edges. */
4918 gsi
= gsi_start_bb (return_block
);
4920 purge_dead_abnormal_edges
= gsi_end_p (gsi
);
4922 stmt_gsi
= gsi_start_bb (return_block
);
4924 /* Build a block containing code to initialize the arguments, the
4925 actual inline expansion of the body, and a label for the return
4926 statements within the function to jump to. The type of the
4927 statement expression is the return type of the function call.
4928 ??? If the call does not have an associated block then we will
4929 remap all callee blocks to NULL, effectively dropping most of
4930 its debug information. This should only happen for calls to
4931 artificial decls inserted by the compiler itself. We need to
4932 either link the inlined blocks into the caller block tree or
4933 not refer to them in any way to not break GC for locations. */
4934 if (tree block
= gimple_block (stmt
))
4936 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4937 to make inlined_function_outer_scope_p return true on this BLOCK. */
4938 location_t loc
= LOCATION_LOCUS (gimple_location (stmt
));
4939 if (loc
== UNKNOWN_LOCATION
)
4940 loc
= LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn
));
4941 if (loc
== UNKNOWN_LOCATION
)
4942 loc
= BUILTINS_LOCATION
;
4943 id
->block
= make_node (BLOCK
);
4944 BLOCK_ABSTRACT_ORIGIN (id
->block
) = DECL_ORIGIN (fn
);
4945 BLOCK_SOURCE_LOCATION (id
->block
) = loc
;
4946 prepend_lexical_block (block
, id
->block
);
4949 /* Local declarations will be replaced by their equivalents in this map. */
4951 id
->decl_map
= new hash_map
<tree
, tree
>;
4952 dst
= id
->debug_map
;
4953 id
->debug_map
= NULL
;
4954 if (flag_stack_reuse
!= SR_NONE
)
4955 id
->add_clobbers_to_eh_landing_pads
= last_basic_block_for_fn (cfun
);
4957 /* Record the function we are about to inline. */
4959 id
->src_cfun
= DECL_STRUCT_FUNCTION (fn
);
4960 id
->reset_location
= DECL_IGNORED_P (fn
);
4961 id
->call_stmt
= call_stmt
;
4963 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4964 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4965 dst_cfun
= DECL_STRUCT_FUNCTION (id
->dst_fn
);
4966 simtvars_save
= id
->dst_simt_vars
;
4967 if (!(dst_cfun
->curr_properties
& PROP_gimple_lomp_dev
)
4968 && (simduid
= bb
->loop_father
->simduid
) != NULL_TREE
4969 && (simduid
= ssa_default_def (dst_cfun
, simduid
)) != NULL_TREE
4970 && single_imm_use (simduid
, &use
, &simtenter_stmt
)
4971 && is_gimple_call (simtenter_stmt
)
4972 && gimple_call_internal_p (simtenter_stmt
, IFN_GOMP_SIMT_ENTER
))
4973 vec_alloc (id
->dst_simt_vars
, 0);
4975 id
->dst_simt_vars
= NULL
;
4977 if (profile_status_for_fn (id
->src_cfun
) == PROFILE_ABSENT
)
4978 profile_status_for_fn (dst_cfun
) = PROFILE_ABSENT
;
4980 /* If the src function contains an IFN_VA_ARG, then so will the dst
4981 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4982 prop_mask
= PROP_gimple_lva
| PROP_gimple_lomp_dev
;
4983 src_properties
= id
->src_cfun
->curr_properties
& prop_mask
;
4984 if (src_properties
!= prop_mask
)
4985 dst_cfun
->curr_properties
&= src_properties
| ~prop_mask
;
4986 dst_cfun
->calls_eh_return
|= id
->src_cfun
->calls_eh_return
;
4987 id
->dst_node
->calls_declare_variant_alt
4988 |= id
->src_node
->calls_declare_variant_alt
;
4990 gcc_assert (!id
->src_cfun
->after_inlining
);
4993 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn
)))
4995 gimple_stmt_iterator si
= gsi_last_bb (bb
);
4996 gsi_insert_after (&si
, gimple_build_predict (PRED_COLD_FUNCTION
,
5000 initialize_inlined_parameters (id
, stmt
, fn
, bb
);
5001 if (debug_nonbind_markers_p
&& debug_inline_points
&& id
->block
5002 && inlined_function_outer_scope_p (id
->block
))
5004 gimple_stmt_iterator si
= gsi_last_bb (bb
);
5005 gsi_insert_after (&si
, gimple_build_debug_inline_entry
5006 (id
->block
, DECL_SOURCE_LOCATION (id
->src_fn
)),
5010 if (DECL_INITIAL (fn
))
5012 if (gimple_block (stmt
))
5016 prepend_lexical_block (id
->block
,
5017 remap_blocks (DECL_INITIAL (fn
), id
));
5018 gcc_checking_assert (BLOCK_SUBBLOCKS (id
->block
)
5019 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id
->block
))
5021 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
5022 otherwise for DWARF DW_TAG_formal_parameter will not be children of
5023 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
5024 under it. The parameters can be then evaluated in the debugger,
5025 but don't show in backtraces. */
5026 for (var
= &BLOCK_VARS (BLOCK_SUBBLOCKS (id
->block
)); *var
; )
5027 if (TREE_CODE (DECL_ORIGIN (*var
)) == PARM_DECL
)
5030 *var
= TREE_CHAIN (v
);
5031 TREE_CHAIN (v
) = BLOCK_VARS (id
->block
);
5032 BLOCK_VARS (id
->block
) = v
;
5035 var
= &TREE_CHAIN (*var
);
5038 remap_blocks_to_null (DECL_INITIAL (fn
), id
);
5041 /* Return statements in the function body will be replaced by jumps
5042 to the RET_LABEL. */
5043 gcc_assert (DECL_INITIAL (fn
));
5044 gcc_assert (TREE_CODE (DECL_INITIAL (fn
)) == BLOCK
);
5046 /* Find the LHS to which the result of this call is assigned. */
5048 if (gimple_call_lhs (stmt
))
5050 modify_dest
= gimple_call_lhs (stmt
);
5052 /* The function which we are inlining might not return a value,
5053 in which case we should issue a warning that the function
5054 does not return a value. In that case the optimizers will
5055 see that the variable to which the value is assigned was not
5056 initialized. We do not want to issue a warning about that
5057 uninitialized variable. */
5058 if (DECL_P (modify_dest
))
5059 suppress_warning (modify_dest
, OPT_Wuninitialized
);
5061 if (gimple_call_return_slot_opt_p (call_stmt
))
5063 return_slot
= modify_dest
;
5070 /* If we are inlining a call to the C++ operator new, we don't want
5071 to use type based alias analysis on the return value. Otherwise
5072 we may get confused if the compiler sees that the inlined new
5073 function returns a pointer which was just deleted. See bug
5075 if (DECL_IS_OPERATOR_NEW_P (fn
))
5081 /* Declare the return variable for the function. */
5082 use_retvar
= declare_return_variable (id
, return_slot
, modify_dest
, bb
);
5084 /* Add local vars in this inlined callee to caller. */
5085 add_local_variables (id
->src_cfun
, cfun
, id
);
5087 if (dump_enabled_p ())
5090 snprintf (buf
, sizeof(buf
), "%4.2f",
5091 cg_edge
->sreal_frequency ().to_double ());
5092 dump_printf_loc (MSG_NOTE
| MSG_PRIORITY_INTERNALS
,
5094 "Inlining %C to %C with frequency %s\n",
5095 id
->src_node
, id
->dst_node
, buf
);
5096 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5098 id
->src_node
->dump (dump_file
);
5099 id
->dst_node
->dump (dump_file
);
5103 /* This is it. Duplicate the callee body. Assume callee is
5104 pre-gimplified. Note that we must not alter the caller
5105 function in any way before this point, as this CALL_EXPR may be
5106 a self-referential call; if we're calling ourselves, we need to
5107 duplicate our body before altering anything. */
5108 copy_body (id
, bb
, return_block
, NULL
);
5110 reset_debug_bindings (id
, stmt_gsi
);
5112 if (flag_stack_reuse
!= SR_NONE
)
5113 for (tree p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
5114 if (!TREE_THIS_VOLATILE (p
))
5116 /* The value associated with P is a local temporary only if
5117 there is no value associated with P in the debug map. */
5118 tree
*varp
= id
->decl_map
->get (p
);
5121 && !is_gimple_reg (*varp
)
5122 && !(id
->debug_map
&& id
->debug_map
->get (p
)))
5124 tree clobber
= build_clobber (TREE_TYPE (*varp
), CLOBBER_EOL
);
5125 gimple
*clobber_stmt
;
5126 clobber_stmt
= gimple_build_assign (*varp
, clobber
);
5127 gimple_set_location (clobber_stmt
, gimple_location (stmt
));
5128 gsi_insert_before (&stmt_gsi
, clobber_stmt
, GSI_SAME_STMT
);
5132 /* Reset the escaped solution. */
5133 if (cfun
->gimple_df
)
5134 pt_solution_reset (&cfun
->gimple_df
->escaped
);
5136 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
5137 if (id
->dst_simt_vars
&& id
->dst_simt_vars
->length () > 0)
5139 size_t nargs
= gimple_call_num_args (simtenter_stmt
);
5140 vec
<tree
> *vars
= id
->dst_simt_vars
;
5141 auto_vec
<tree
> newargs (nargs
+ vars
->length ());
5142 for (size_t i
= 0; i
< nargs
; i
++)
5143 newargs
.quick_push (gimple_call_arg (simtenter_stmt
, i
));
5144 for (tree
*pvar
= vars
->begin (); pvar
!= vars
->end (); pvar
++)
5146 tree ptrtype
= build_pointer_type (TREE_TYPE (*pvar
));
5147 newargs
.quick_push (build1 (ADDR_EXPR
, ptrtype
, *pvar
));
5149 gcall
*g
= gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, newargs
);
5150 gimple_call_set_lhs (g
, gimple_call_lhs (simtenter_stmt
));
5151 gimple_stmt_iterator gsi
= gsi_for_stmt (simtenter_stmt
);
5152 gsi_replace (&gsi
, g
, false);
5154 vec_free (id
->dst_simt_vars
);
5155 id
->dst_simt_vars
= simtvars_save
;
5160 delete id
->debug_map
;
5161 id
->debug_map
= dst
;
5163 delete id
->decl_map
;
5166 /* Unlink the calls virtual operands before replacing it. */
5167 unlink_stmt_vdef (stmt
);
5168 if (gimple_vdef (stmt
)
5169 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
5170 release_ssa_name (gimple_vdef (stmt
));
5172 /* If the inlined function returns a result that we care about,
5173 substitute the GIMPLE_CALL with an assignment of the return
5174 variable to the LHS of the call. That is, if STMT was
5175 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
5176 if (use_retvar
&& gimple_call_lhs (stmt
))
5178 gimple
*old_stmt
= stmt
;
5179 stmt
= gimple_build_assign (gimple_call_lhs (stmt
), use_retvar
);
5180 gimple_set_location (stmt
, gimple_location (old_stmt
));
5181 gsi_replace (&stmt_gsi
, stmt
, false);
5182 maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
);
5183 /* Append a clobber for id->retvar if easily possible. */
5184 if (flag_stack_reuse
!= SR_NONE
5186 && VAR_P (id
->retvar
)
5187 && id
->retvar
!= return_slot
5188 && id
->retvar
!= modify_dest
5189 && !TREE_THIS_VOLATILE (id
->retvar
)
5190 && !is_gimple_reg (id
->retvar
)
5191 && !stmt_ends_bb_p (stmt
))
5193 tree clobber
= build_clobber (TREE_TYPE (id
->retvar
), CLOBBER_EOL
);
5194 gimple
*clobber_stmt
;
5195 clobber_stmt
= gimple_build_assign (id
->retvar
, clobber
);
5196 gimple_set_location (clobber_stmt
, gimple_location (old_stmt
));
5197 gsi_insert_after (&stmt_gsi
, clobber_stmt
, GSI_SAME_STMT
);
5202 /* Handle the case of inlining a function with no return
5203 statement, which causes the return value to become undefined. */
5204 if (gimple_call_lhs (stmt
)
5205 && TREE_CODE (gimple_call_lhs (stmt
)) == SSA_NAME
)
5207 tree name
= gimple_call_lhs (stmt
);
5208 tree var
= SSA_NAME_VAR (name
);
5209 tree def
= var
? ssa_default_def (cfun
, var
) : NULL
;
5213 /* If the variable is used undefined, make this name
5214 undefined via a move. */
5215 stmt
= gimple_build_assign (gimple_call_lhs (stmt
), def
);
5216 gsi_replace (&stmt_gsi
, stmt
, true);
5222 var
= create_tmp_reg_fn (cfun
, TREE_TYPE (name
), NULL
);
5223 SET_SSA_NAME_VAR_OR_IDENTIFIER (name
, var
);
5225 /* Otherwise make this variable undefined. */
5226 gsi_remove (&stmt_gsi
, true);
5227 set_ssa_default_def (cfun
, var
, name
);
5228 SSA_NAME_DEF_STMT (name
) = gimple_build_nop ();
5231 /* Replace with a clobber for id->retvar. */
5232 else if (flag_stack_reuse
!= SR_NONE
5234 && VAR_P (id
->retvar
)
5235 && id
->retvar
!= return_slot
5236 && id
->retvar
!= modify_dest
5237 && !TREE_THIS_VOLATILE (id
->retvar
)
5238 && !is_gimple_reg (id
->retvar
))
5240 tree clobber
= build_clobber (TREE_TYPE (id
->retvar
));
5241 gimple
*clobber_stmt
;
5242 clobber_stmt
= gimple_build_assign (id
->retvar
, clobber
);
5243 gimple_set_location (clobber_stmt
, gimple_location (stmt
));
5244 gsi_replace (&stmt_gsi
, clobber_stmt
, false);
5245 maybe_clean_or_replace_eh_stmt (stmt
, clobber_stmt
);
5248 gsi_remove (&stmt_gsi
, true);
5251 if (purge_dead_abnormal_edges
)
5252 bitmap_set_bit (to_purge
, return_block
->index
);
5254 /* If the value of the new expression is ignored, that's OK. We
5255 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5256 the equivalent inlined version either. */
5257 if (is_gimple_assign (stmt
))
5259 gcc_assert (gimple_assign_single_p (stmt
)
5260 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)));
5261 TREE_USED (gimple_assign_rhs1 (stmt
)) = 1;
5264 id
->add_clobbers_to_eh_landing_pads
= 0;
5266 /* Output the inlining info for this abstract function, since it has been
5267 inlined. If we don't do this now, we can lose the information about the
5268 variables in the function when the blocks get blown away as soon as we
5269 remove the cgraph node. */
5270 if (gimple_block (stmt
))
5271 (*debug_hooks
->outlining_inline_function
) (fn
);
5273 /* Update callgraph if needed. */
5274 cg_edge
->callee
->remove ();
5276 id
->block
= NULL_TREE
;
5277 id
->retvar
= NULL_TREE
;
5278 successfully_inlined
= true;
5281 input_location
= saved_location
;
5282 return successfully_inlined
;
5285 /* Expand call statements reachable from STMT_P.
5286 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5287 in a MODIFY_EXPR. */
5290 gimple_expand_calls_inline (basic_block bb
, copy_body_data
*id
,
5293 gimple_stmt_iterator gsi
;
5294 bool inlined
= false;
5296 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
);)
5298 gimple
*stmt
= gsi_stmt (gsi
);
5301 if (is_gimple_call (stmt
)
5302 && !gimple_call_internal_p (stmt
))
5303 inlined
|= expand_call_inline (bb
, stmt
, id
, to_purge
);
5310 /* Walk all basic blocks created after FIRST and try to fold every statement
5311 in the STATEMENTS pointer set. */
5314 fold_marked_statements (int first
, hash_set
<gimple
*> *statements
)
5316 auto_bitmap to_purge
;
5318 auto_vec
<edge
, 20> stack (n_basic_blocks_for_fn (cfun
) + 2);
5319 auto_sbitmap
visited (last_basic_block_for_fn (cfun
));
5320 bitmap_clear (visited
);
5322 stack
.quick_push (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
5323 while (!stack
.is_empty ())
5325 /* Look at the edge on the top of the stack. */
5326 edge e
= stack
.pop ();
5327 basic_block dest
= e
->dest
;
5329 if (dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
5330 || bitmap_bit_p (visited
, dest
->index
))
5333 bitmap_set_bit (visited
, dest
->index
);
5335 if (dest
->index
>= first
)
5336 for (gimple_stmt_iterator gsi
= gsi_start_bb (dest
);
5337 !gsi_end_p (gsi
); gsi_next (&gsi
))
5339 if (!statements
->contains (gsi_stmt (gsi
)))
5342 gimple
*old_stmt
= gsi_stmt (gsi
);
5343 tree old_decl
= (is_gimple_call (old_stmt
)
5344 ? gimple_call_fndecl (old_stmt
) : 0);
5345 if (old_decl
&& fndecl_built_in_p (old_decl
))
5347 /* Folding builtins can create multiple instructions,
5348 we need to look at all of them. */
5349 gimple_stmt_iterator i2
= gsi
;
5351 if (fold_stmt (&gsi
))
5354 /* If a builtin at the end of a bb folded into nothing,
5355 the following loop won't work. */
5356 if (gsi_end_p (gsi
))
5358 cgraph_update_edges_for_call_stmt (old_stmt
,
5363 i2
= gsi_start_bb (dest
);
5368 new_stmt
= gsi_stmt (i2
);
5369 update_stmt (new_stmt
);
5370 cgraph_update_edges_for_call_stmt (old_stmt
, old_decl
,
5373 if (new_stmt
== gsi_stmt (gsi
))
5375 /* It is okay to check only for the very last
5376 of these statements. If it is a throwing
5377 statement nothing will change. If it isn't
5378 this can remove EH edges. If that weren't
5379 correct then because some intermediate stmts
5380 throw, but not the last one. That would mean
5381 we'd have to split the block, which we can't
5382 here and we'd loose anyway. And as builtins
5383 probably never throw, this all
5385 if (maybe_clean_or_replace_eh_stmt (old_stmt
,
5387 bitmap_set_bit (to_purge
, dest
->index
);
5394 else if (fold_stmt (&gsi
))
5396 /* Re-read the statement from GSI as fold_stmt() may
5398 gimple
*new_stmt
= gsi_stmt (gsi
);
5399 update_stmt (new_stmt
);
5401 if (is_gimple_call (old_stmt
)
5402 || is_gimple_call (new_stmt
))
5403 cgraph_update_edges_for_call_stmt (old_stmt
, old_decl
,
5406 if (maybe_clean_or_replace_eh_stmt (old_stmt
, new_stmt
))
5407 bitmap_set_bit (to_purge
, dest
->index
);
5411 if (EDGE_COUNT (dest
->succs
) > 0)
5413 /* Avoid warnings emitted from folding statements that
5414 became unreachable because of inlined function parameter
5416 e
= find_taken_edge (dest
, NULL_TREE
);
5418 stack
.quick_push (e
);
5422 FOR_EACH_EDGE (e
, ei
, dest
->succs
)
5423 stack
.safe_push (e
);
5428 gimple_purge_all_dead_eh_edges (to_purge
);
5431 /* Expand calls to inline functions in the body of FN. */
5434 optimize_inline_calls (tree fn
)
5438 int last
= n_basic_blocks_for_fn (cfun
);
5439 bool inlined_p
= false;
5442 memset (&id
, 0, sizeof (id
));
5444 id
.src_node
= id
.dst_node
= cgraph_node::get (fn
);
5445 gcc_assert (id
.dst_node
->definition
);
5447 /* Or any functions that aren't finished yet. */
5448 if (current_function_decl
)
5449 id
.dst_fn
= current_function_decl
;
5451 id
.copy_decl
= copy_decl_maybe_to_var
;
5452 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
5453 id
.transform_new_cfg
= false;
5454 id
.transform_return_to_modify
= true;
5455 id
.transform_parameter
= true;
5456 id
.statements_to_fold
= new hash_set
<gimple
*>;
5458 push_gimplify_context ();
5460 /* We make no attempts to keep dominance info up-to-date. */
5461 free_dominance_info (CDI_DOMINATORS
);
5462 free_dominance_info (CDI_POST_DOMINATORS
);
5464 /* Register specific gimple functions. */
5465 gimple_register_cfg_hooks ();
5467 /* Reach the trees by walking over the CFG, and note the
5468 enclosing basic-blocks in the call edges. */
5469 /* We walk the blocks going forward, because inlined function bodies
5470 will split id->current_basic_block, and the new blocks will
5471 follow it; we'll trudge through them, processing their CALL_EXPRs
5473 auto_bitmap to_purge
;
5474 FOR_EACH_BB_FN (bb
, cfun
)
5475 inlined_p
|= gimple_expand_calls_inline (bb
, &id
, to_purge
);
5477 pop_gimplify_context (NULL
);
5481 struct cgraph_edge
*e
;
5483 id
.dst_node
->verify ();
5485 /* Double check that we inlined everything we are supposed to inline. */
5486 for (e
= id
.dst_node
->callees
; e
; e
= e
->next_callee
)
5487 gcc_assert (e
->inline_failed
);
5490 /* If we didn't inline into the function there is nothing to do. */
5493 delete id
.statements_to_fold
;
5497 /* Fold queued statements. */
5498 update_max_bb_count ();
5499 fold_marked_statements (last
, id
.statements_to_fold
);
5500 delete id
.statements_to_fold
;
5502 /* Finally purge EH and abnormal edges from the call stmts we inlined.
5503 We need to do this after fold_marked_statements since that may walk
5504 the SSA use-def chain. */
5507 EXECUTE_IF_SET_IN_BITMAP (to_purge
, 0, i
, bi
)
5509 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
5512 gimple_purge_dead_eh_edges (bb
);
5513 gimple_purge_dead_abnormal_call_edges (bb
);
5517 gcc_assert (!id
.debug_stmts
.exists ());
5519 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5522 delete_unreachable_blocks_update_callgraph (id
.dst_node
, false);
5523 id
.dst_node
->calls_comdat_local
= id
.dst_node
->check_calls_comdat_local_p ();
5526 id
.dst_node
->verify ();
5528 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5529 not possible yet - the IPA passes might make various functions to not
5530 throw and they don't care to proactively update local EH info. This is
5531 done later in fixup_cfg pass that also execute the verification. */
5532 return (TODO_update_ssa
5534 | (gimple_in_ssa_p (cfun
) ? TODO_remove_unused_locals
: 0)
5535 | (gimple_in_ssa_p (cfun
) ? TODO_update_address_taken
: 0)
5536 | (profile_status_for_fn (cfun
) != PROFILE_ABSENT
5537 ? TODO_rebuild_frequencies
: 0));
5540 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5543 copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
5545 enum tree_code code
= TREE_CODE (*tp
);
5546 enum tree_code_class cl
= TREE_CODE_CLASS (code
);
5548 /* We make copies of most nodes. */
5549 if (IS_EXPR_CODE_CLASS (cl
)
5550 || code
== TREE_LIST
5552 || code
== TYPE_DECL
5553 || code
== OMP_CLAUSE
)
5555 /* Because the chain gets clobbered when we make a copy, we save it
5557 tree chain
= NULL_TREE
, new_tree
;
5559 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
5560 chain
= TREE_CHAIN (*tp
);
5562 /* Copy the node. */
5563 new_tree
= copy_node (*tp
);
5567 /* Now, restore the chain, if appropriate. That will cause
5568 walk_tree to walk into the chain as well. */
5569 if (code
== PARM_DECL
5570 || code
== TREE_LIST
5571 || code
== OMP_CLAUSE
)
5572 TREE_CHAIN (*tp
) = chain
;
5574 /* For now, we don't update BLOCKs when we make copies. So, we
5575 have to nullify all BIND_EXPRs. */
5576 if (TREE_CODE (*tp
) == BIND_EXPR
)
5577 BIND_EXPR_BLOCK (*tp
) = NULL_TREE
;
5579 else if (code
== CONSTRUCTOR
)
5581 /* CONSTRUCTOR nodes need special handling because
5582 we need to duplicate the vector of elements. */
5585 new_tree
= copy_node (*tp
);
5586 CONSTRUCTOR_ELTS (new_tree
) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp
));
5589 else if (code
== STATEMENT_LIST
)
5590 /* We used to just abort on STATEMENT_LIST, but we can run into them
5591 with statement-expressions (c++/40975). */
5592 copy_statement_list (tp
);
5593 else if (TREE_CODE_CLASS (code
) == tcc_type
)
5595 else if (TREE_CODE_CLASS (code
) == tcc_declaration
)
5597 else if (TREE_CODE_CLASS (code
) == tcc_constant
)
5602 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5603 information indicating to what new SAVE_EXPR this one should be mapped,
5604 use that one. Otherwise, create a new node and enter it in ST. FN is
5605 the function into which the copy will be placed. */
5608 remap_save_expr (tree
*tp
, hash_map
<tree
, tree
> *st
, int *walk_subtrees
)
5613 /* See if we already encountered this SAVE_EXPR. */
5616 /* If we didn't already remap this SAVE_EXPR, do so now. */
5619 t
= copy_node (*tp
);
5621 /* Remember this SAVE_EXPR. */
5623 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5628 /* We've already walked into this SAVE_EXPR; don't do it again. */
5633 /* Replace this SAVE_EXPR with the copy. */
5637 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5638 label, copies the declaration and enters it in the splay_tree in DATA (which
5639 is really a 'copy_body_data *'. */
5642 mark_local_labels_stmt (gimple_stmt_iterator
*gsip
,
5643 bool *handled_ops_p ATTRIBUTE_UNUSED
,
5644 struct walk_stmt_info
*wi
)
5646 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
5647 glabel
*stmt
= dyn_cast
<glabel
*> (gsi_stmt (*gsip
));
5651 tree decl
= gimple_label_label (stmt
);
5653 /* Copy the decl and remember the copy. */
5654 insert_decl_map (id
, decl
, id
->copy_decl (decl
, id
));
5660 static gimple_seq
duplicate_remap_omp_clause_seq (gimple_seq seq
,
5661 struct walk_stmt_info
*wi
);
5663 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5664 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5665 remaps all local declarations to appropriate replacements in gimple
5669 replace_locals_op (tree
*tp
, int *walk_subtrees
, void *data
)
5671 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5672 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
5673 hash_map
<tree
, tree
> *st
= id
->decl_map
;
5677 /* For recursive invocations this is no longer the LHS itself. */
5678 bool is_lhs
= wi
->is_lhs
;
5681 if (TREE_CODE (expr
) == SSA_NAME
)
5683 *tp
= remap_ssa_name (*tp
, id
);
5686 SSA_NAME_DEF_STMT (*tp
) = gsi_stmt (wi
->gsi
);
5688 /* Only a local declaration (variable or label). */
5689 else if ((VAR_P (expr
) && !TREE_STATIC (expr
))
5690 || TREE_CODE (expr
) == LABEL_DECL
)
5692 /* Lookup the declaration. */
5695 /* If it's there, remap it. */
5700 else if (TREE_CODE (expr
) == STATEMENT_LIST
5701 || TREE_CODE (expr
) == BIND_EXPR
5702 || TREE_CODE (expr
) == SAVE_EXPR
)
5704 else if (TREE_CODE (expr
) == TARGET_EXPR
)
5706 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5707 It's OK for this to happen if it was part of a subtree that
5708 isn't immediately expanded, such as operand 2 of another
5710 if (!TREE_OPERAND (expr
, 1))
5712 TREE_OPERAND (expr
, 1) = TREE_OPERAND (expr
, 3);
5713 TREE_OPERAND (expr
, 3) = NULL_TREE
;
5716 else if (TREE_CODE (expr
) == OMP_CLAUSE
)
5718 /* Before the omplower pass completes, some OMP clauses can contain
5719 sequences that are neither copied by gimple_seq_copy nor walked by
5720 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5721 in those situations, we have to copy and process them explicitely. */
5723 if (OMP_CLAUSE_CODE (expr
) == OMP_CLAUSE_LASTPRIVATE
)
5725 gimple_seq seq
= OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr
);
5726 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5727 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr
) = seq
;
5729 else if (OMP_CLAUSE_CODE (expr
) == OMP_CLAUSE_LINEAR
)
5731 gimple_seq seq
= OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr
);
5732 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5733 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr
) = seq
;
5735 else if (OMP_CLAUSE_CODE (expr
) == OMP_CLAUSE_REDUCTION
)
5737 gimple_seq seq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr
);
5738 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5739 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr
) = seq
;
5740 seq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr
);
5741 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5742 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr
) = seq
;
5746 /* Keep iterating. */
5751 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5752 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5753 remaps all local declarations to appropriate replacements in gimple
5757 replace_locals_stmt (gimple_stmt_iterator
*gsip
,
5758 bool *handled_ops_p ATTRIBUTE_UNUSED
,
5759 struct walk_stmt_info
*wi
)
5761 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
5762 gimple
*gs
= gsi_stmt (*gsip
);
5764 if (gbind
*stmt
= dyn_cast
<gbind
*> (gs
))
5766 tree block
= gimple_bind_block (stmt
);
5770 remap_block (&block
, id
);
5771 gimple_bind_set_block (stmt
, block
);
5774 /* This will remap a lot of the same decls again, but this should be
5776 if (gimple_bind_vars (stmt
))
5778 tree old_var
, decls
= gimple_bind_vars (stmt
);
5780 for (old_var
= decls
; old_var
; old_var
= DECL_CHAIN (old_var
))
5781 if (!can_be_nonlocal (old_var
, id
)
5782 && ! variably_modified_type_p (TREE_TYPE (old_var
), id
->src_fn
))
5783 remap_decl (old_var
, id
);
5785 gcc_checking_assert (!id
->prevent_decl_creation_for_types
);
5786 id
->prevent_decl_creation_for_types
= true;
5787 gimple_bind_set_vars (stmt
, remap_decls (decls
, NULL
, id
));
5788 id
->prevent_decl_creation_for_types
= false;
5792 /* Keep iterating. */
5796 /* Create a copy of SEQ and remap all decls in it. */
5799 duplicate_remap_omp_clause_seq (gimple_seq seq
, struct walk_stmt_info
*wi
)
5804 /* If there are any labels in OMP sequences, they can be only referred to in
5805 the sequence itself and therefore we can do both here. */
5806 walk_gimple_seq (seq
, mark_local_labels_stmt
, NULL
, wi
);
5807 gimple_seq copy
= gimple_seq_copy (seq
);
5808 walk_gimple_seq (copy
, replace_locals_stmt
, replace_locals_op
, wi
);
5812 /* Copies everything in SEQ and replaces variables and labels local to
5813 current_function_decl. */
5816 copy_gimple_seq_and_replace_locals (gimple_seq seq
)
5819 struct walk_stmt_info wi
;
5822 /* There's nothing to do for NULL_TREE. */
5827 memset (&id
, 0, sizeof (id
));
5828 id
.src_fn
= current_function_decl
;
5829 id
.dst_fn
= current_function_decl
;
5831 id
.decl_map
= new hash_map
<tree
, tree
>;
5832 id
.debug_map
= NULL
;
5834 id
.copy_decl
= copy_decl_no_change
;
5835 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
5836 id
.transform_new_cfg
= false;
5837 id
.transform_return_to_modify
= false;
5838 id
.transform_parameter
= false;
5840 /* Walk the tree once to find local labels. */
5841 memset (&wi
, 0, sizeof (wi
));
5842 hash_set
<tree
> visited
;
5845 walk_gimple_seq (seq
, mark_local_labels_stmt
, NULL
, &wi
);
5847 copy
= gimple_seq_copy (seq
);
5849 /* Walk the copy, remapping decls. */
5850 memset (&wi
, 0, sizeof (wi
));
5852 walk_gimple_seq (copy
, replace_locals_stmt
, replace_locals_op
, &wi
);
5857 delete id
.debug_map
;
5858 if (id
.dependence_map
)
5860 delete id
.dependence_map
;
5861 id
.dependence_map
= NULL
;
5868 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5871 debug_find_tree_1 (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
, void *data
)
5880 debug_find_tree (tree top
, tree search
)
5882 return walk_tree_without_duplicates (&top
, debug_find_tree_1
, search
) != 0;
5886 /* Declare the variables created by the inliner. Add all the variables in
5887 VARS to BIND_EXPR. */
5890 declare_inline_vars (tree block
, tree vars
)
5893 for (t
= vars
; t
; t
= DECL_CHAIN (t
))
5895 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
5896 gcc_assert (!TREE_STATIC (t
) && !TREE_ASM_WRITTEN (t
));
5897 add_local_decl (cfun
, t
);
5901 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), vars
);
5904 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5905 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5906 VAR_DECL translation. */
5909 copy_decl_for_dup_finish (copy_body_data
*id
, tree decl
, tree copy
)
5911 /* Don't generate debug information for the copy if we wouldn't have
5912 generated it for the copy either. */
5913 DECL_ARTIFICIAL (copy
) = DECL_ARTIFICIAL (decl
);
5914 DECL_IGNORED_P (copy
) = DECL_IGNORED_P (decl
);
5916 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5917 declaration inspired this copy. */
5918 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (decl
);
5920 /* The new variable/label has no RTL, yet. */
5921 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy
), TS_DECL_WRTL
)
5922 && !TREE_STATIC (copy
) && !DECL_EXTERNAL (copy
))
5923 SET_DECL_RTL (copy
, 0);
5924 /* For vector typed decls make sure to update DECL_MODE according
5925 to the new function context. */
5926 if (VECTOR_TYPE_P (TREE_TYPE (copy
)))
5927 SET_DECL_MODE (copy
, TYPE_MODE (TREE_TYPE (copy
)));
5929 /* These args would always appear unused, if not for this. */
5930 TREE_USED (copy
) = 1;
5932 /* Set the context for the new declaration. */
5933 if (!DECL_CONTEXT (decl
))
5934 /* Globals stay global. */
5936 else if (DECL_CONTEXT (decl
) != id
->src_fn
)
5937 /* Things that weren't in the scope of the function we're inlining
5938 from aren't in the scope we're inlining to, either. */
5940 else if (TREE_STATIC (decl
))
5941 /* Function-scoped static variables should stay in the original
5946 /* Ordinary automatic local variables are now in the scope of the
5948 DECL_CONTEXT (copy
) = id
->dst_fn
;
5949 if (VAR_P (copy
) && id
->dst_simt_vars
&& !is_gimple_reg (copy
))
5951 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy
)))
5952 DECL_ATTRIBUTES (copy
)
5953 = tree_cons (get_identifier ("omp simt private"), NULL
,
5954 DECL_ATTRIBUTES (copy
));
5955 id
->dst_simt_vars
->safe_push (copy
);
5962 /* Create a new VAR_DECL that is indentical in all respect to DECL except that
5963 DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
5964 DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
5967 copy_decl_to_var (tree decl
, copy_body_data
*id
)
5971 gcc_assert (TREE_CODE (decl
) == PARM_DECL
5972 || TREE_CODE (decl
) == RESULT_DECL
);
5974 type
= TREE_TYPE (decl
);
5976 copy
= build_decl (DECL_SOURCE_LOCATION (id
->dst_fn
),
5977 VAR_DECL
, DECL_NAME (decl
), type
);
5978 if (DECL_PT_UID_SET_P (decl
))
5979 SET_DECL_PT_UID (copy
, DECL_PT_UID (decl
));
5980 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
5981 TREE_READONLY (copy
) = TREE_READONLY (decl
);
5982 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
5983 DECL_NOT_GIMPLE_REG_P (copy
) = DECL_NOT_GIMPLE_REG_P (decl
);
5984 DECL_BY_REFERENCE (copy
) = DECL_BY_REFERENCE (decl
);
5986 return copy_decl_for_dup_finish (id
, decl
, copy
);
5989 /* Like copy_decl_to_var, but create a return slot object instead of a
5990 pointer variable for return by invisible reference. */
5993 copy_result_decl_to_var (tree decl
, copy_body_data
*id
)
5997 gcc_assert (TREE_CODE (decl
) == PARM_DECL
5998 || TREE_CODE (decl
) == RESULT_DECL
);
6000 type
= TREE_TYPE (decl
);
6001 if (DECL_BY_REFERENCE (decl
))
6002 type
= TREE_TYPE (type
);
6004 copy
= build_decl (DECL_SOURCE_LOCATION (id
->dst_fn
),
6005 VAR_DECL
, DECL_NAME (decl
), type
);
6006 if (DECL_PT_UID_SET_P (decl
))
6007 SET_DECL_PT_UID (copy
, DECL_PT_UID (decl
));
6008 TREE_READONLY (copy
) = TREE_READONLY (decl
);
6009 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
6010 if (!DECL_BY_REFERENCE (decl
))
6012 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
6013 DECL_NOT_GIMPLE_REG_P (copy
)
6014 = (DECL_NOT_GIMPLE_REG_P (decl
)
6015 /* RESULT_DECLs are treated special by needs_to_live_in_memory,
6016 mirror that to the created VAR_DECL. */
6017 || (TREE_CODE (decl
) == RESULT_DECL
6018 && aggregate_value_p (decl
, id
->src_fn
)));
6021 return copy_decl_for_dup_finish (id
, decl
, copy
);
6025 copy_decl_no_change (tree decl
, copy_body_data
*id
)
6029 copy
= copy_node (decl
);
6031 /* The COPY is not abstract; it will be generated in DST_FN. */
6032 DECL_ABSTRACT_P (copy
) = false;
6033 lang_hooks
.dup_lang_specific_decl (copy
);
6035 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
6036 been taken; it's for internal bookkeeping in expand_goto_internal. */
6037 if (TREE_CODE (copy
) == LABEL_DECL
)
6039 TREE_ADDRESSABLE (copy
) = 0;
6040 LABEL_DECL_UID (copy
) = -1;
6043 return copy_decl_for_dup_finish (id
, decl
, copy
);
6047 copy_decl_maybe_to_var (tree decl
, copy_body_data
*id
)
6049 if (TREE_CODE (decl
) == PARM_DECL
|| TREE_CODE (decl
) == RESULT_DECL
)
6050 return copy_decl_to_var (decl
, id
);
6052 return copy_decl_no_change (decl
, id
);
6055 /* Return a copy of the function's argument tree without any modifications. */
6058 copy_arguments_nochange (tree orig_parm
, copy_body_data
* id
)
6061 tree new_parm
= NULL
;
6064 for (arg
= orig_parm
; arg
; arg
= DECL_CHAIN (arg
))
6066 tree new_tree
= remap_decl (arg
, id
);
6067 if (TREE_CODE (new_tree
) != PARM_DECL
)
6068 new_tree
= id
->copy_decl (arg
, id
);
6069 lang_hooks
.dup_lang_specific_decl (new_tree
);
6071 parg
= &DECL_CHAIN (new_tree
);
6076 /* Return a copy of the function's static chain. */
6078 copy_static_chain (tree static_chain
, copy_body_data
* id
)
6080 tree
*chain_copy
, *pvar
;
6082 chain_copy
= &static_chain
;
6083 for (pvar
= chain_copy
; *pvar
; pvar
= &DECL_CHAIN (*pvar
))
6085 tree new_tree
= remap_decl (*pvar
, id
);
6086 lang_hooks
.dup_lang_specific_decl (new_tree
);
6087 DECL_CHAIN (new_tree
) = DECL_CHAIN (*pvar
);
6090 return static_chain
;
6093 /* Return true if the function is allowed to be versioned.
6094 This is a guard for the versioning functionality. */
6097 tree_versionable_function_p (tree fndecl
)
6099 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl
))
6100 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl
)) == NULL
);
6103 /* Update clone info after duplication. */
6106 update_clone_info (copy_body_data
* id
)
6108 struct cgraph_node
*this_node
= id
->dst_node
;
6109 if (!this_node
->clones
)
6111 for (cgraph_node
*node
= this_node
->clones
; node
!= this_node
;)
6113 /* First update replace maps to match the new body. */
6114 clone_info
*info
= clone_info::get (node
);
6115 if (info
&& info
->tree_map
)
6118 for (i
= 0; i
< vec_safe_length (info
->tree_map
); i
++)
6120 struct ipa_replace_map
*replace_info
;
6121 replace_info
= (*info
->tree_map
)[i
];
6122 walk_tree (&replace_info
->new_tree
, copy_tree_body_r
, id
, NULL
);
6127 node
= node
->clones
;
6128 else if (node
->next_sibling_clone
)
6129 node
= node
->next_sibling_clone
;
6132 while (node
!= id
->dst_node
&& !node
->next_sibling_clone
)
6133 node
= node
->clone_of
;
6134 if (node
!= id
->dst_node
)
6135 node
= node
->next_sibling_clone
;
6140 /* Create a copy of a function's tree.
6141 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6142 of the original function and the new copied function
6143 respectively. In case we want to replace a DECL
6144 tree with another tree while duplicating the function's
6145 body, TREE_MAP represents the mapping between these
6146 trees. If UPDATE_CLONES is set, the call_stmt fields
6147 of edges of clones of the function will be updated.
6149 If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6150 function parameters and return value) should be modified).
6151 If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6152 If non_NULL NEW_ENTRY determine new entry BB of the clone.
6155 tree_function_versioning (tree old_decl
, tree new_decl
,
6156 vec
<ipa_replace_map
*, va_gc
> *tree_map
,
6157 ipa_param_adjustments
*param_adjustments
,
6158 bool update_clones
, bitmap blocks_to_copy
,
6159 basic_block new_entry
)
6161 struct cgraph_node
*old_version_node
;
6162 struct cgraph_node
*new_version_node
;
6166 struct ipa_replace_map
*replace_info
;
6167 basic_block old_entry_block
, bb
;
6168 auto_vec
<gimple
*, 10> init_stmts
;
6169 tree vars
= NULL_TREE
;
6171 /* We can get called recursively from expand_call_inline via clone
6172 materialization. While expand_call_inline maintains input_location
6173 we cannot tolerate it to leak into the materialized clone. */
6174 location_t saved_location
= input_location
;
6175 input_location
= UNKNOWN_LOCATION
;
6177 gcc_assert (TREE_CODE (old_decl
) == FUNCTION_DECL
6178 && TREE_CODE (new_decl
) == FUNCTION_DECL
);
6179 DECL_POSSIBLY_INLINED (old_decl
) = 1;
6181 old_version_node
= cgraph_node::get (old_decl
);
6182 gcc_checking_assert (old_version_node
);
6183 new_version_node
= cgraph_node::get (new_decl
);
6184 gcc_checking_assert (new_version_node
);
6186 /* Copy over debug args. */
6187 if (DECL_HAS_DEBUG_ARGS_P (old_decl
))
6189 vec
<tree
, va_gc
> **new_debug_args
, **old_debug_args
;
6190 gcc_checking_assert (decl_debug_args_lookup (new_decl
) == NULL
);
6191 DECL_HAS_DEBUG_ARGS_P (new_decl
) = 0;
6192 old_debug_args
= decl_debug_args_lookup (old_decl
);
6195 new_debug_args
= decl_debug_args_insert (new_decl
);
6196 *new_debug_args
= vec_safe_copy (*old_debug_args
);
6200 /* Output the inlining info for this abstract function, since it has been
6201 inlined. If we don't do this now, we can lose the information about the
6202 variables in the function when the blocks get blown away as soon as we
6203 remove the cgraph node. */
6204 (*debug_hooks
->outlining_inline_function
) (old_decl
);
6206 DECL_ARTIFICIAL (new_decl
) = 1;
6207 DECL_ABSTRACT_ORIGIN (new_decl
) = DECL_ORIGIN (old_decl
);
6208 if (DECL_ORIGIN (old_decl
) == old_decl
)
6209 old_version_node
->used_as_abstract_origin
= true;
6210 DECL_FUNCTION_PERSONALITY (new_decl
) = DECL_FUNCTION_PERSONALITY (old_decl
);
6212 /* Prepare the data structures for the tree copy. */
6213 memset (&id
, 0, sizeof (id
));
6215 /* Generate a new name for the new version. */
6216 id
.statements_to_fold
= new hash_set
<gimple
*>;
6218 id
.decl_map
= new hash_map
<tree
, tree
>;
6219 id
.debug_map
= NULL
;
6220 id
.src_fn
= old_decl
;
6221 id
.dst_fn
= new_decl
;
6222 id
.src_node
= old_version_node
;
6223 id
.dst_node
= new_version_node
;
6224 id
.src_cfun
= DECL_STRUCT_FUNCTION (old_decl
);
6225 id
.blocks_to_copy
= blocks_to_copy
;
6227 id
.copy_decl
= copy_decl_no_change
;
6228 id
.transform_call_graph_edges
6229 = update_clones
? CB_CGE_MOVE_CLONES
: CB_CGE_MOVE
;
6230 id
.transform_new_cfg
= true;
6231 id
.transform_return_to_modify
= false;
6232 id
.transform_parameter
= false;
6234 old_entry_block
= ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (old_decl
));
6235 DECL_RESULT (new_decl
) = DECL_RESULT (old_decl
);
6236 DECL_ARGUMENTS (new_decl
) = DECL_ARGUMENTS (old_decl
);
6237 initialize_cfun (new_decl
, old_decl
,
6238 new_entry
? new_entry
->count
: old_entry_block
->count
);
6239 new_version_node
->calls_declare_variant_alt
6240 = old_version_node
->calls_declare_variant_alt
;
6241 if (DECL_STRUCT_FUNCTION (new_decl
)->gimple_df
)
6242 DECL_STRUCT_FUNCTION (new_decl
)->gimple_df
->ipa_pta
6243 = id
.src_cfun
->gimple_df
->ipa_pta
;
6245 /* Copy the function's static chain. */
6246 p
= DECL_STRUCT_FUNCTION (old_decl
)->static_chain_decl
;
6248 DECL_STRUCT_FUNCTION (new_decl
)->static_chain_decl
6249 = copy_static_chain (p
, &id
);
6251 auto_vec
<int, 16> new_param_indices
;
6252 clone_info
*info
= clone_info::get (old_version_node
);
6253 ipa_param_adjustments
*old_param_adjustments
6254 = info
? info
->param_adjustments
: NULL
;
6255 if (old_param_adjustments
)
6256 old_param_adjustments
->get_updated_indices (&new_param_indices
);
6258 /* If there's a tree_map, prepare for substitution. */
6260 for (i
= 0; i
< tree_map
->length (); i
++)
6263 replace_info
= (*tree_map
)[i
];
6265 int p
= replace_info
->parm_num
;
6266 if (old_param_adjustments
)
6267 p
= new_param_indices
[p
];
6270 for (parm
= DECL_ARGUMENTS (old_decl
); p
;
6271 parm
= DECL_CHAIN (parm
))
6274 init
= setup_one_parameter (&id
, parm
, replace_info
->new_tree
,
6275 id
.src_fn
, NULL
, &vars
);
6277 init_stmts
.safe_push (init
);
6280 ipa_param_body_adjustments
*param_body_adjs
= NULL
;
6281 if (param_adjustments
)
6283 param_body_adjs
= new ipa_param_body_adjustments (param_adjustments
,
6285 &id
, &vars
, tree_map
);
6286 id
.param_body_adjs
= param_body_adjs
;
6287 DECL_ARGUMENTS (new_decl
) = param_body_adjs
->get_new_param_chain ();
6289 else if (DECL_ARGUMENTS (old_decl
) != NULL_TREE
)
6290 DECL_ARGUMENTS (new_decl
)
6291 = copy_arguments_nochange (DECL_ARGUMENTS (old_decl
), &id
);
6293 DECL_INITIAL (new_decl
) = remap_blocks (DECL_INITIAL (id
.src_fn
), &id
);
6294 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl
)) = new_decl
;
6296 declare_inline_vars (DECL_INITIAL (new_decl
), vars
);
6298 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl
)->local_decls
))
6299 /* Add local vars. */
6300 add_local_variables (DECL_STRUCT_FUNCTION (old_decl
), cfun
, &id
);
6302 if (DECL_RESULT (old_decl
) == NULL_TREE
)
6304 else if (param_adjustments
&& param_adjustments
->m_skip_return
6305 && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl
))))
6307 tree resdecl_repl
= copy_result_decl_to_var (DECL_RESULT (old_decl
),
6309 declare_inline_vars (NULL
, resdecl_repl
);
6310 if (DECL_BY_REFERENCE (DECL_RESULT (old_decl
)))
6311 resdecl_repl
= build_fold_addr_expr (resdecl_repl
);
6312 insert_decl_map (&id
, DECL_RESULT (old_decl
), resdecl_repl
);
6314 DECL_RESULT (new_decl
)
6315 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl
)),
6316 RESULT_DECL
, NULL_TREE
, void_type_node
);
6317 DECL_CONTEXT (DECL_RESULT (new_decl
)) = new_decl
;
6318 DECL_IS_MALLOC (new_decl
) = false;
6319 cfun
->returns_struct
= 0;
6320 cfun
->returns_pcc_struct
= 0;
6325 DECL_RESULT (new_decl
) = remap_decl (DECL_RESULT (old_decl
), &id
);
6326 lang_hooks
.dup_lang_specific_decl (DECL_RESULT (new_decl
));
6327 if (gimple_in_ssa_p (id
.src_cfun
)
6328 && DECL_BY_REFERENCE (DECL_RESULT (old_decl
))
6329 && (old_name
= ssa_default_def (id
.src_cfun
, DECL_RESULT (old_decl
))))
6331 tree new_name
= make_ssa_name (DECL_RESULT (new_decl
));
6332 insert_decl_map (&id
, old_name
, new_name
);
6333 SSA_NAME_DEF_STMT (new_name
) = gimple_build_nop ();
6334 set_ssa_default_def (cfun
, DECL_RESULT (new_decl
), new_name
);
6338 /* Set up the destination functions loop tree. */
6339 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl
)) != NULL
)
6341 cfun
->curr_properties
&= ~PROP_loops
;
6342 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
6343 cfun
->curr_properties
|= PROP_loops
;
6346 /* Copy the Function's body. */
6347 copy_body (&id
, ENTRY_BLOCK_PTR_FOR_FN (cfun
), EXIT_BLOCK_PTR_FOR_FN (cfun
),
6350 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6351 number_blocks (new_decl
);
6353 /* We want to create the BB unconditionally, so that the addition of
6354 debug stmts doesn't affect BB count, which may in the end cause
6355 codegen differences. */
6356 bb
= split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
6357 while (init_stmts
.length ())
6358 insert_init_stmt (&id
, bb
, init_stmts
.pop ());
6359 if (param_body_adjs
)
6360 param_body_adjs
->append_init_stmts (bb
);
6361 update_clone_info (&id
);
6363 /* Remap the nonlocal_goto_save_area, if any. */
6364 if (cfun
->nonlocal_goto_save_area
)
6366 struct walk_stmt_info wi
;
6368 memset (&wi
, 0, sizeof (wi
));
6370 walk_tree (&cfun
->nonlocal_goto_save_area
, remap_gimple_op_r
, &wi
, NULL
);
6376 delete id
.debug_map
;
6377 free_dominance_info (CDI_DOMINATORS
);
6378 free_dominance_info (CDI_POST_DOMINATORS
);
6380 update_max_bb_count ();
6381 fold_marked_statements (0, id
.statements_to_fold
);
6382 delete id
.statements_to_fold
;
6383 delete_unreachable_blocks_update_callgraph (id
.dst_node
, update_clones
);
6384 if (id
.dst_node
->definition
)
6385 cgraph_edge::rebuild_references ();
6386 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP
))
6388 calculate_dominance_info (CDI_DOMINATORS
);
6389 fix_loop_structure (NULL
);
6391 update_ssa (TODO_update_ssa
);
6393 /* After partial cloning we need to rescale frequencies, so they are
6394 within proper range in the cloned function. */
6397 struct cgraph_edge
*e
;
6398 rebuild_frequencies ();
6400 new_version_node
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
6401 for (e
= new_version_node
->callees
; e
; e
= e
->next_callee
)
6403 basic_block bb
= gimple_bb (e
->call_stmt
);
6404 e
->count
= bb
->count
;
6406 for (e
= new_version_node
->indirect_calls
; e
; e
= e
->next_callee
)
6408 basic_block bb
= gimple_bb (e
->call_stmt
);
6409 e
->count
= bb
->count
;
6413 if (param_body_adjs
&& MAY_HAVE_DEBUG_BIND_STMTS
)
6415 vec
<tree
, va_gc
> **debug_args
= NULL
;
6416 unsigned int len
= 0;
6417 unsigned reset_len
= param_body_adjs
->m_reset_debug_decls
.length ();
6419 for (i
= 0; i
< reset_len
; i
++)
6421 tree parm
= param_body_adjs
->m_reset_debug_decls
[i
];
6422 gcc_assert (is_gimple_reg (parm
));
6425 if (debug_args
== NULL
)
6427 debug_args
= decl_debug_args_insert (new_decl
);
6428 len
= vec_safe_length (*debug_args
);
6430 ddecl
= build_debug_expr_decl (TREE_TYPE (parm
));
6431 /* FIXME: Is setting the mode really necessary? */
6432 SET_DECL_MODE (ddecl
, DECL_MODE (parm
));
6433 vec_safe_push (*debug_args
, DECL_ORIGIN (parm
));
6434 vec_safe_push (*debug_args
, ddecl
);
6436 if (debug_args
!= NULL
)
6438 /* On the callee side, add
6441 stmts to the first bb where var is a VAR_DECL created for the
6442 optimized away parameter in DECL_INITIAL block. This hints
6443 in the debug info that var (whole DECL_ORIGIN is the parm
6444 PARM_DECL) is optimized away, but could be looked up at the
6445 call site as value of D#X there. */
6446 gimple_stmt_iterator cgsi
6447 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
6450 i
= vec_safe_length (*debug_args
);
6453 tree vexpr
= NULL_TREE
;
6455 while (var
!= NULL_TREE
6456 && DECL_ABSTRACT_ORIGIN (var
) != (**debug_args
)[i
])
6457 var
= TREE_CHAIN (var
);
6458 if (var
== NULL_TREE
)
6460 tree parm
= (**debug_args
)[i
];
6461 if (tree parm_ddef
= ssa_default_def (id
.src_cfun
, parm
))
6463 = param_body_adjs
->m_dead_ssa_debug_equiv
.get (parm_ddef
))
6467 vexpr
= build_debug_expr_decl (TREE_TYPE (parm
));
6468 /* FIXME: Is setting the mode really necessary? */
6469 SET_DECL_MODE (vexpr
, DECL_MODE (parm
));
6471 def_temp
= gimple_build_debug_bind (var
, vexpr
, NULL
);
6472 gsi_insert_before (&cgsi
, def_temp
, GSI_NEW_STMT
);
6473 def_temp
= gimple_build_debug_source_bind (vexpr
, parm
, NULL
);
6474 gsi_insert_before (&cgsi
, def_temp
, GSI_NEW_STMT
);
6479 delete param_body_adjs
;
6480 free_dominance_info (CDI_DOMINATORS
);
6481 free_dominance_info (CDI_POST_DOMINATORS
);
6483 gcc_assert (!id
.debug_stmts
.exists ());
6485 input_location
= saved_location
;
6489 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6490 the callee and return the inlined body on success. */
6493 maybe_inline_call_in_expr (tree exp
)
6495 tree fn
= get_callee_fndecl (exp
);
6497 /* We can only try to inline "const" functions. */
6498 if (fn
&& TREE_READONLY (fn
) && DECL_SAVED_TREE (fn
))
6500 call_expr_arg_iterator iter
;
6503 hash_map
<tree
, tree
> decl_map
;
6505 /* Remap the parameters. */
6506 for (param
= DECL_ARGUMENTS (fn
), arg
= first_call_expr_arg (exp
, &iter
);
6508 param
= DECL_CHAIN (param
), arg
= next_call_expr_arg (&iter
))
6509 decl_map
.put (param
, arg
);
6511 memset (&id
, 0, sizeof (id
));
6513 id
.dst_fn
= current_function_decl
;
6514 id
.src_cfun
= DECL_STRUCT_FUNCTION (fn
);
6515 id
.decl_map
= &decl_map
;
6517 id
.copy_decl
= copy_decl_no_change
;
6518 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
6519 id
.transform_new_cfg
= false;
6520 id
.transform_return_to_modify
= true;
6521 id
.transform_parameter
= true;
6523 /* Make sure not to unshare trees behind the front-end's back
6524 since front-end specific mechanisms may rely on sharing. */
6525 id
.regimplify
= false;
6526 id
.do_not_unshare
= true;
6528 /* We're not inside any EH region. */
6531 t
= copy_tree_body (&id
);
6533 /* We can only return something suitable for use in a GENERIC
6535 if (TREE_CODE (t
) == MODIFY_EXPR
)
6536 return TREE_OPERAND (t
, 1);
6542 /* Duplicate a type, fields and all. */
6545 build_duplicate_type (tree type
)
6547 struct copy_body_data id
;
6549 memset (&id
, 0, sizeof (id
));
6550 id
.src_fn
= current_function_decl
;
6551 id
.dst_fn
= current_function_decl
;
6553 id
.decl_map
= new hash_map
<tree
, tree
>;
6554 id
.debug_map
= NULL
;
6555 id
.copy_decl
= copy_decl_no_change
;
6557 type
= remap_type_1 (type
, &id
);
6561 delete id
.debug_map
;
6563 TYPE_CANONICAL (type
) = type
;
6568 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6569 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6573 copy_fn (tree fn
, tree
& parms
, tree
& result
)
6577 hash_map
<tree
, tree
> decl_map
;
6582 memset (&id
, 0, sizeof (id
));
6584 id
.dst_fn
= current_function_decl
;
6585 id
.src_cfun
= DECL_STRUCT_FUNCTION (fn
);
6586 id
.decl_map
= &decl_map
;
6588 id
.copy_decl
= copy_decl_no_change
;
6589 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
6590 id
.transform_new_cfg
= false;
6591 id
.transform_return_to_modify
= false;
6592 id
.transform_parameter
= true;
6594 /* Make sure not to unshare trees behind the front-end's back
6595 since front-end specific mechanisms may rely on sharing. */
6596 id
.regimplify
= false;
6597 id
.do_not_unshare
= true;
6598 id
.do_not_fold
= true;
6600 /* We're not inside any EH region. */
6603 /* Remap the parameters and result and return them to the caller. */
6604 for (param
= DECL_ARGUMENTS (fn
);
6606 param
= DECL_CHAIN (param
))
6608 *p
= remap_decl (param
, &id
);
6609 p
= &DECL_CHAIN (*p
);
6612 if (DECL_RESULT (fn
))
6613 result
= remap_decl (DECL_RESULT (fn
), &id
);
6617 return copy_tree_body (&id
);