2013-11-12 Andrew MacLeod <amacleod@redhat.com>
[official-gcc.git] / gcc / tree-inline.c
blob5d30caa6f6bb333ddb524f96460adbab0546c67f
1 /* Tree inlining.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "tree.h"
27 #include "tree-inline.h"
28 #include "flags.h"
29 #include "params.h"
30 #include "input.h"
31 #include "insn-config.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-iterator.h"
36 #include "intl.h"
37 #include "gimplify.h"
38 #include "gimple-ssa.h"
39 #include "tree-cfg.h"
40 #include "tree-phinodes.h"
41 #include "ssa-iterators.h"
42 #include "tree-ssanames.h"
43 #include "tree-into-ssa.h"
44 #include "tree-dfa.h"
45 #include "tree-ssa.h"
46 #include "function.h"
47 #include "tree-pretty-print.h"
48 #include "except.h"
49 #include "debug.h"
50 #include "pointer-set.h"
51 #include "ipa-prop.h"
52 #include "value-prof.h"
53 #include "tree-pass.h"
54 #include "target.h"
55 #include "cfgloop.h"
57 #include "rtl.h" /* FIXME: For asm_str_count. */
59 /* I'm not real happy about this, but we need to handle gimple and
60 non-gimple trees. */
62 /* Inlining, Cloning, Versioning, Parallelization
64 Inlining: a function body is duplicated, but the PARM_DECLs are
65 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
66 MODIFY_EXPRs that store to a dedicated returned-value variable.
67 The duplicated eh_region info of the copy will later be appended
68 to the info for the caller; the eh_region info in copied throwing
69 statements and RESX statements are adjusted accordingly.
71 Cloning: (only in C++) We have one body for a con/de/structor, and
72 multiple function decls, each with a unique parameter list.
73 Duplicate the body, using the given splay tree; some parameters
74 will become constants (like 0 or 1).
76 Versioning: a function body is duplicated and the result is a new
77 function rather than into blocks of an existing function as with
78 inlining. Some parameters will become constants.
80 Parallelization: a region of a function is duplicated resulting in
81 a new function. Variables may be replaced with complex expressions
82 to enable shared variable semantics.
84 All of these will simultaneously lookup any callgraph edges. If
85 we're going to inline the duplicated function body, and the given
86 function has some cloned callgraph nodes (one for each place this
87 function will be inlined) those callgraph edges will be duplicated.
88 If we're cloning the body, those callgraph edges will be
89 updated to point into the new body. (Note that the original
90 callgraph node and edge list will not be altered.)
92 See the CALL_EXPR handling case in copy_tree_body_r (). */
94 /* To Do:
96 o In order to make inlining-on-trees work, we pessimized
97 function-local static constants. In particular, they are now
98 always output, even when not addressed. Fix this by treating
99 function-local static constants just like global static
100 constants; the back-end already knows not to output them if they
101 are not needed.
103 o Provide heuristics to clamp inlining of recursive template
104 calls? */
107 /* Weights that estimate_num_insns uses to estimate the size of the
108 produced code. */
110 eni_weights eni_size_weights;
112 /* Weights that estimate_num_insns uses to estimate the time necessary
113 to execute the produced code. */
115 eni_weights eni_time_weights;
117 /* Prototypes. */
119 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
120 static void remap_block (tree *, copy_body_data *);
121 static void copy_bind_expr (tree *, int *, copy_body_data *);
122 static void declare_inline_vars (tree, tree);
123 static void remap_save_expr (tree *, void *, int *);
124 static void prepend_lexical_block (tree current_block, tree new_block);
125 static tree copy_decl_to_var (tree, copy_body_data *);
126 static tree copy_result_decl_to_var (tree, copy_body_data *);
127 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
128 static gimple remap_gimple_stmt (gimple, copy_body_data *);
129 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
131 /* Insert a tree->tree mapping for ID. Despite the name suggests
132 that the trees should be variables, it is used for more than that. */
134 void
135 insert_decl_map (copy_body_data *id, tree key, tree value)
137 *pointer_map_insert (id->decl_map, key) = value;
139 /* Always insert an identity map as well. If we see this same new
140 node again, we won't want to duplicate it a second time. */
141 if (key != value)
142 *pointer_map_insert (id->decl_map, value) = value;
145 /* Insert a tree->tree mapping for ID. This is only used for
146 variables. */
148 static void
149 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
151 if (!gimple_in_ssa_p (id->src_cfun))
152 return;
154 if (!MAY_HAVE_DEBUG_STMTS)
155 return;
157 if (!target_for_debug_bind (key))
158 return;
160 gcc_assert (TREE_CODE (key) == PARM_DECL);
161 gcc_assert (TREE_CODE (value) == VAR_DECL);
163 if (!id->debug_map)
164 id->debug_map = pointer_map_create ();
166 *pointer_map_insert (id->debug_map, key) = value;
169 /* If nonzero, we're remapping the contents of inlined debug
170 statements. If negative, an error has occurred, such as a
171 reference to a variable that isn't available in the inlined
172 context. */
173 static int processing_debug_stmt = 0;
175 /* Construct new SSA name for old NAME. ID is the inline context. */
177 static tree
178 remap_ssa_name (tree name, copy_body_data *id)
180 tree new_tree, var;
181 tree *n;
183 gcc_assert (TREE_CODE (name) == SSA_NAME);
185 n = (tree *) pointer_map_contains (id->decl_map, name);
186 if (n)
187 return unshare_expr (*n);
189 if (processing_debug_stmt)
191 if (SSA_NAME_IS_DEFAULT_DEF (name)
192 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
193 && id->entry_bb == NULL
194 && single_succ_p (ENTRY_BLOCK_PTR))
196 tree vexpr = make_node (DEBUG_EXPR_DECL);
197 gimple def_temp;
198 gimple_stmt_iterator gsi;
199 tree val = SSA_NAME_VAR (name);
201 n = (tree *) pointer_map_contains (id->decl_map, val);
202 if (n != NULL)
203 val = *n;
204 if (TREE_CODE (val) != PARM_DECL)
206 processing_debug_stmt = -1;
207 return name;
209 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
210 DECL_ARTIFICIAL (vexpr) = 1;
211 TREE_TYPE (vexpr) = TREE_TYPE (name);
212 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
213 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
214 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
215 return vexpr;
218 processing_debug_stmt = -1;
219 return name;
222 /* Remap anonymous SSA names or SSA names of anonymous decls. */
223 var = SSA_NAME_VAR (name);
224 if (!var
225 || (!SSA_NAME_IS_DEFAULT_DEF (name)
226 && TREE_CODE (var) == VAR_DECL
227 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
228 && DECL_ARTIFICIAL (var)
229 && DECL_IGNORED_P (var)
230 && !DECL_NAME (var)))
232 struct ptr_info_def *pi;
233 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id), NULL);
234 if (!var && SSA_NAME_IDENTIFIER (name))
235 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
236 insert_decl_map (id, name, new_tree);
237 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
238 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
239 /* At least IPA points-to info can be directly transferred. */
240 if (id->src_cfun->gimple_df
241 && id->src_cfun->gimple_df->ipa_pta
242 && (pi = SSA_NAME_PTR_INFO (name))
243 && !pi->pt.anything)
245 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
246 new_pi->pt = pi->pt;
248 return new_tree;
251 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
252 in copy_bb. */
253 new_tree = remap_decl (var, id);
255 /* We might've substituted constant or another SSA_NAME for
256 the variable.
258 Replace the SSA name representing RESULT_DECL by variable during
259 inlining: this saves us from need to introduce PHI node in a case
260 return value is just partly initialized. */
261 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
262 && (!SSA_NAME_VAR (name)
263 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
264 || !id->transform_return_to_modify))
266 struct ptr_info_def *pi;
267 new_tree = make_ssa_name (new_tree, NULL);
268 insert_decl_map (id, name, new_tree);
269 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
270 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
271 /* At least IPA points-to info can be directly transferred. */
272 if (id->src_cfun->gimple_df
273 && id->src_cfun->gimple_df->ipa_pta
274 && (pi = SSA_NAME_PTR_INFO (name))
275 && !pi->pt.anything)
277 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
278 new_pi->pt = pi->pt;
280 if (SSA_NAME_IS_DEFAULT_DEF (name))
282 /* By inlining function having uninitialized variable, we might
283 extend the lifetime (variable might get reused). This cause
284 ICE in the case we end up extending lifetime of SSA name across
285 abnormal edge, but also increase register pressure.
287 We simply initialize all uninitialized vars by 0 except
288 for case we are inlining to very first BB. We can avoid
289 this for all BBs that are not inside strongly connected
290 regions of the CFG, but this is expensive to test. */
291 if (id->entry_bb
292 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
293 && (!SSA_NAME_VAR (name)
294 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
295 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
296 || EDGE_COUNT (id->entry_bb->preds) != 1))
298 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
299 gimple init_stmt;
300 tree zero = build_zero_cst (TREE_TYPE (new_tree));
302 init_stmt = gimple_build_assign (new_tree, zero);
303 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
304 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
306 else
308 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
309 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
313 else
314 insert_decl_map (id, name, new_tree);
315 return new_tree;
318 /* Remap DECL during the copying of the BLOCK tree for the function. */
320 tree
321 remap_decl (tree decl, copy_body_data *id)
323 tree *n;
325 /* We only remap local variables in the current function. */
327 /* See if we have remapped this declaration. */
329 n = (tree *) pointer_map_contains (id->decl_map, decl);
331 if (!n && processing_debug_stmt)
333 processing_debug_stmt = -1;
334 return decl;
337 /* If we didn't already have an equivalent for this declaration,
338 create one now. */
339 if (!n)
341 /* Make a copy of the variable or label. */
342 tree t = id->copy_decl (decl, id);
344 /* Remember it, so that if we encounter this local entity again
345 we can reuse this copy. Do this early because remap_type may
346 need this decl for TYPE_STUB_DECL. */
347 insert_decl_map (id, decl, t);
349 if (!DECL_P (t))
350 return t;
352 /* Remap types, if necessary. */
353 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
354 if (TREE_CODE (t) == TYPE_DECL)
355 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
357 /* Remap sizes as necessary. */
358 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
359 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
361 /* If fields, do likewise for offset and qualifier. */
362 if (TREE_CODE (t) == FIELD_DECL)
364 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
365 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
366 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
369 return t;
372 if (id->do_not_unshare)
373 return *n;
374 else
375 return unshare_expr (*n);
378 static tree
379 remap_type_1 (tree type, copy_body_data *id)
381 tree new_tree, t;
383 /* We do need a copy. build and register it now. If this is a pointer or
384 reference type, remap the designated type and make a new pointer or
385 reference type. */
386 if (TREE_CODE (type) == POINTER_TYPE)
388 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
389 TYPE_MODE (type),
390 TYPE_REF_CAN_ALIAS_ALL (type));
391 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
392 new_tree = build_type_attribute_qual_variant (new_tree,
393 TYPE_ATTRIBUTES (type),
394 TYPE_QUALS (type));
395 insert_decl_map (id, type, new_tree);
396 return new_tree;
398 else if (TREE_CODE (type) == REFERENCE_TYPE)
400 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
401 TYPE_MODE (type),
402 TYPE_REF_CAN_ALIAS_ALL (type));
403 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
404 new_tree = build_type_attribute_qual_variant (new_tree,
405 TYPE_ATTRIBUTES (type),
406 TYPE_QUALS (type));
407 insert_decl_map (id, type, new_tree);
408 return new_tree;
410 else
411 new_tree = copy_node (type);
413 insert_decl_map (id, type, new_tree);
415 /* This is a new type, not a copy of an old type. Need to reassociate
416 variants. We can handle everything except the main variant lazily. */
417 t = TYPE_MAIN_VARIANT (type);
418 if (type != t)
420 t = remap_type (t, id);
421 TYPE_MAIN_VARIANT (new_tree) = t;
422 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
423 TYPE_NEXT_VARIANT (t) = new_tree;
425 else
427 TYPE_MAIN_VARIANT (new_tree) = new_tree;
428 TYPE_NEXT_VARIANT (new_tree) = NULL;
431 if (TYPE_STUB_DECL (type))
432 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
434 /* Lazily create pointer and reference types. */
435 TYPE_POINTER_TO (new_tree) = NULL;
436 TYPE_REFERENCE_TO (new_tree) = NULL;
438 switch (TREE_CODE (new_tree))
440 case INTEGER_TYPE:
441 case REAL_TYPE:
442 case FIXED_POINT_TYPE:
443 case ENUMERAL_TYPE:
444 case BOOLEAN_TYPE:
445 t = TYPE_MIN_VALUE (new_tree);
446 if (t && TREE_CODE (t) != INTEGER_CST)
447 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
449 t = TYPE_MAX_VALUE (new_tree);
450 if (t && TREE_CODE (t) != INTEGER_CST)
451 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
452 return new_tree;
454 case FUNCTION_TYPE:
455 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
456 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
457 return new_tree;
459 case ARRAY_TYPE:
460 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
461 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
462 break;
464 case RECORD_TYPE:
465 case UNION_TYPE:
466 case QUAL_UNION_TYPE:
468 tree f, nf = NULL;
470 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
472 t = remap_decl (f, id);
473 DECL_CONTEXT (t) = new_tree;
474 DECL_CHAIN (t) = nf;
475 nf = t;
477 TYPE_FIELDS (new_tree) = nreverse (nf);
479 break;
481 case OFFSET_TYPE:
482 default:
483 /* Shouldn't have been thought variable sized. */
484 gcc_unreachable ();
487 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
488 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
490 return new_tree;
493 tree
494 remap_type (tree type, copy_body_data *id)
496 tree *node;
497 tree tmp;
499 if (type == NULL)
500 return type;
502 /* See if we have remapped this type. */
503 node = (tree *) pointer_map_contains (id->decl_map, type);
504 if (node)
505 return *node;
507 /* The type only needs remapping if it's variably modified. */
508 if (! variably_modified_type_p (type, id->src_fn))
510 insert_decl_map (id, type, type);
511 return type;
514 id->remapping_type_depth++;
515 tmp = remap_type_1 (type, id);
516 id->remapping_type_depth--;
518 return tmp;
521 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
523 static bool
524 can_be_nonlocal (tree decl, copy_body_data *id)
526 /* We can not duplicate function decls. */
527 if (TREE_CODE (decl) == FUNCTION_DECL)
528 return true;
530 /* Local static vars must be non-local or we get multiple declaration
531 problems. */
532 if (TREE_CODE (decl) == VAR_DECL
533 && !auto_var_in_fn_p (decl, id->src_fn))
534 return true;
536 return false;
539 static tree
540 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
541 copy_body_data *id)
543 tree old_var;
544 tree new_decls = NULL_TREE;
546 /* Remap its variables. */
547 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
549 tree new_var;
551 if (can_be_nonlocal (old_var, id))
553 /* We need to add this variable to the local decls as otherwise
554 nothing else will do so. */
555 if (TREE_CODE (old_var) == VAR_DECL
556 && ! DECL_EXTERNAL (old_var))
557 add_local_decl (cfun, old_var);
558 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
559 && !DECL_IGNORED_P (old_var)
560 && nonlocalized_list)
561 vec_safe_push (*nonlocalized_list, old_var);
562 continue;
565 /* Remap the variable. */
566 new_var = remap_decl (old_var, id);
568 /* If we didn't remap this variable, we can't mess with its
569 TREE_CHAIN. If we remapped this variable to the return slot, it's
570 already declared somewhere else, so don't declare it here. */
572 if (new_var == id->retvar)
574 else if (!new_var)
576 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
577 && !DECL_IGNORED_P (old_var)
578 && nonlocalized_list)
579 vec_safe_push (*nonlocalized_list, old_var);
581 else
583 gcc_assert (DECL_P (new_var));
584 DECL_CHAIN (new_var) = new_decls;
585 new_decls = new_var;
587 /* Also copy value-expressions. */
588 if (TREE_CODE (new_var) == VAR_DECL
589 && DECL_HAS_VALUE_EXPR_P (new_var))
591 tree tem = DECL_VALUE_EXPR (new_var);
592 bool old_regimplify = id->regimplify;
593 id->remapping_type_depth++;
594 walk_tree (&tem, copy_tree_body_r, id, NULL);
595 id->remapping_type_depth--;
596 id->regimplify = old_regimplify;
597 SET_DECL_VALUE_EXPR (new_var, tem);
602 return nreverse (new_decls);
605 /* Copy the BLOCK to contain remapped versions of the variables
606 therein. And hook the new block into the block-tree. */
608 static void
609 remap_block (tree *block, copy_body_data *id)
611 tree old_block;
612 tree new_block;
614 /* Make the new block. */
615 old_block = *block;
616 new_block = make_node (BLOCK);
617 TREE_USED (new_block) = TREE_USED (old_block);
618 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
619 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
620 BLOCK_NONLOCALIZED_VARS (new_block)
621 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
622 *block = new_block;
624 /* Remap its variables. */
625 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
626 &BLOCK_NONLOCALIZED_VARS (new_block),
627 id);
629 if (id->transform_lang_insert_block)
630 id->transform_lang_insert_block (new_block);
632 /* Remember the remapped block. */
633 insert_decl_map (id, old_block, new_block);
636 /* Copy the whole block tree and root it in id->block. */
637 static tree
638 remap_blocks (tree block, copy_body_data *id)
640 tree t;
641 tree new_tree = block;
643 if (!block)
644 return NULL;
646 remap_block (&new_tree, id);
647 gcc_assert (new_tree != block);
648 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
649 prepend_lexical_block (new_tree, remap_blocks (t, id));
650 /* Blocks are in arbitrary order, but make things slightly prettier and do
651 not swap order when producing a copy. */
652 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
653 return new_tree;
656 /* Remap the block tree rooted at BLOCK to nothing. */
657 static void
658 remap_blocks_to_null (tree block, copy_body_data *id)
660 tree t;
661 insert_decl_map (id, block, NULL_TREE);
662 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
663 remap_blocks_to_null (t, id);
666 static void
667 copy_statement_list (tree *tp)
669 tree_stmt_iterator oi, ni;
670 tree new_tree;
672 new_tree = alloc_stmt_list ();
673 ni = tsi_start (new_tree);
674 oi = tsi_start (*tp);
675 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
676 *tp = new_tree;
678 for (; !tsi_end_p (oi); tsi_next (&oi))
680 tree stmt = tsi_stmt (oi);
681 if (TREE_CODE (stmt) == STATEMENT_LIST)
682 /* This copy is not redundant; tsi_link_after will smash this
683 STATEMENT_LIST into the end of the one we're building, and we
684 don't want to do that with the original. */
685 copy_statement_list (&stmt);
686 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
690 static void
691 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
693 tree block = BIND_EXPR_BLOCK (*tp);
694 /* Copy (and replace) the statement. */
695 copy_tree_r (tp, walk_subtrees, NULL);
696 if (block)
698 remap_block (&block, id);
699 BIND_EXPR_BLOCK (*tp) = block;
702 if (BIND_EXPR_VARS (*tp))
703 /* This will remap a lot of the same decls again, but this should be
704 harmless. */
705 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
709 /* Create a new gimple_seq by remapping all the statements in BODY
710 using the inlining information in ID. */
712 static gimple_seq
713 remap_gimple_seq (gimple_seq body, copy_body_data *id)
715 gimple_stmt_iterator si;
716 gimple_seq new_body = NULL;
718 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
720 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
721 gimple_seq_add_stmt (&new_body, new_stmt);
724 return new_body;
728 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
729 block using the mapping information in ID. */
731 static gimple
732 copy_gimple_bind (gimple stmt, copy_body_data *id)
734 gimple new_bind;
735 tree new_block, new_vars;
736 gimple_seq body, new_body;
738 /* Copy the statement. Note that we purposely don't use copy_stmt
739 here because we need to remap statements as we copy. */
740 body = gimple_bind_body (stmt);
741 new_body = remap_gimple_seq (body, id);
743 new_block = gimple_bind_block (stmt);
744 if (new_block)
745 remap_block (&new_block, id);
747 /* This will remap a lot of the same decls again, but this should be
748 harmless. */
749 new_vars = gimple_bind_vars (stmt);
750 if (new_vars)
751 new_vars = remap_decls (new_vars, NULL, id);
753 new_bind = gimple_build_bind (new_vars, new_body, new_block);
755 return new_bind;
758 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
760 static bool
761 is_parm (tree decl)
763 if (TREE_CODE (decl) == SSA_NAME)
765 decl = SSA_NAME_VAR (decl);
766 if (!decl)
767 return false;
770 return (TREE_CODE (decl) == PARM_DECL);
773 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
774 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
775 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
776 recursing into the children nodes of *TP. */
778 static tree
779 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
781 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
782 copy_body_data *id = (copy_body_data *) wi_p->info;
783 tree fn = id->src_fn;
785 if (TREE_CODE (*tp) == SSA_NAME)
787 *tp = remap_ssa_name (*tp, id);
788 *walk_subtrees = 0;
789 return NULL;
791 else if (auto_var_in_fn_p (*tp, fn))
793 /* Local variables and labels need to be replaced by equivalent
794 variables. We don't want to copy static variables; there's
795 only one of those, no matter how many times we inline the
796 containing function. Similarly for globals from an outer
797 function. */
798 tree new_decl;
800 /* Remap the declaration. */
801 new_decl = remap_decl (*tp, id);
802 gcc_assert (new_decl);
803 /* Replace this variable with the copy. */
804 STRIP_TYPE_NOPS (new_decl);
805 /* ??? The C++ frontend uses void * pointer zero to initialize
806 any other type. This confuses the middle-end type verification.
807 As cloned bodies do not go through gimplification again the fixup
808 there doesn't trigger. */
809 if (TREE_CODE (new_decl) == INTEGER_CST
810 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
811 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
812 *tp = new_decl;
813 *walk_subtrees = 0;
815 else if (TREE_CODE (*tp) == STATEMENT_LIST)
816 gcc_unreachable ();
817 else if (TREE_CODE (*tp) == SAVE_EXPR)
818 gcc_unreachable ();
819 else if (TREE_CODE (*tp) == LABEL_DECL
820 && (!DECL_CONTEXT (*tp)
821 || decl_function_context (*tp) == id->src_fn))
822 /* These may need to be remapped for EH handling. */
823 *tp = remap_decl (*tp, id);
824 else if (TREE_CODE (*tp) == FIELD_DECL)
826 /* If the enclosing record type is variably_modified_type_p, the field
827 has already been remapped. Otherwise, it need not be. */
828 tree *n = (tree *) pointer_map_contains (id->decl_map, *tp);
829 if (n)
830 *tp = *n;
831 *walk_subtrees = 0;
833 else if (TYPE_P (*tp))
834 /* Types may need remapping as well. */
835 *tp = remap_type (*tp, id);
836 else if (CONSTANT_CLASS_P (*tp))
838 /* If this is a constant, we have to copy the node iff the type
839 will be remapped. copy_tree_r will not copy a constant. */
840 tree new_type = remap_type (TREE_TYPE (*tp), id);
842 if (new_type == TREE_TYPE (*tp))
843 *walk_subtrees = 0;
845 else if (TREE_CODE (*tp) == INTEGER_CST)
846 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
847 TREE_INT_CST_HIGH (*tp));
848 else
850 *tp = copy_node (*tp);
851 TREE_TYPE (*tp) = new_type;
854 else
856 /* Otherwise, just copy the node. Note that copy_tree_r already
857 knows not to copy VAR_DECLs, etc., so this is safe. */
859 if (TREE_CODE (*tp) == MEM_REF)
861 /* We need to re-canonicalize MEM_REFs from inline substitutions
862 that can happen when a pointer argument is an ADDR_EXPR.
863 Recurse here manually to allow that. */
864 tree ptr = TREE_OPERAND (*tp, 0);
865 tree type = remap_type (TREE_TYPE (*tp), id);
866 tree old = *tp;
867 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
868 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
869 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
870 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
871 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
872 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
873 remapped a parameter as the property might be valid only
874 for the parameter itself. */
875 if (TREE_THIS_NOTRAP (old)
876 && (!is_parm (TREE_OPERAND (old, 0))
877 || (!id->transform_parameter && is_parm (ptr))))
878 TREE_THIS_NOTRAP (*tp) = 1;
879 *walk_subtrees = 0;
880 return NULL;
883 /* Here is the "usual case". Copy this tree node, and then
884 tweak some special cases. */
885 copy_tree_r (tp, walk_subtrees, NULL);
887 if (TREE_CODE (*tp) != OMP_CLAUSE)
888 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
890 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
892 /* The copied TARGET_EXPR has never been expanded, even if the
893 original node was expanded already. */
894 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
895 TREE_OPERAND (*tp, 3) = NULL_TREE;
897 else if (TREE_CODE (*tp) == ADDR_EXPR)
899 /* Variable substitution need not be simple. In particular,
900 the MEM_REF substitution above. Make sure that
901 TREE_CONSTANT and friends are up-to-date. */
902 int invariant = is_gimple_min_invariant (*tp);
903 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
904 recompute_tree_invariant_for_addr_expr (*tp);
906 /* If this used to be invariant, but is not any longer,
907 then regimplification is probably needed. */
908 if (invariant && !is_gimple_min_invariant (*tp))
909 id->regimplify = true;
911 *walk_subtrees = 0;
915 /* Update the TREE_BLOCK for the cloned expr. */
916 if (EXPR_P (*tp))
918 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
919 tree old_block = TREE_BLOCK (*tp);
920 if (old_block)
922 tree *n;
923 n = (tree *) pointer_map_contains (id->decl_map,
924 TREE_BLOCK (*tp));
925 if (n)
926 new_block = *n;
928 TREE_SET_BLOCK (*tp, new_block);
931 /* Keep iterating. */
932 return NULL_TREE;
936 /* Called from copy_body_id via walk_tree. DATA is really a
937 `copy_body_data *'. */
939 tree
940 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
942 copy_body_data *id = (copy_body_data *) data;
943 tree fn = id->src_fn;
944 tree new_block;
946 /* Begin by recognizing trees that we'll completely rewrite for the
947 inlining context. Our output for these trees is completely
948 different from out input (e.g. RETURN_EXPR is deleted, and morphs
949 into an edge). Further down, we'll handle trees that get
950 duplicated and/or tweaked. */
952 /* When requested, RETURN_EXPRs should be transformed to just the
953 contained MODIFY_EXPR. The branch semantics of the return will
954 be handled elsewhere by manipulating the CFG rather than a statement. */
955 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
957 tree assignment = TREE_OPERAND (*tp, 0);
959 /* If we're returning something, just turn that into an
960 assignment into the equivalent of the original RESULT_DECL.
961 If the "assignment" is just the result decl, the result
962 decl has already been set (e.g. a recent "foo (&result_decl,
963 ...)"); just toss the entire RETURN_EXPR. */
964 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
966 /* Replace the RETURN_EXPR with (a copy of) the
967 MODIFY_EXPR hanging underneath. */
968 *tp = copy_node (assignment);
970 else /* Else the RETURN_EXPR returns no value. */
972 *tp = NULL;
973 return (tree) (void *)1;
976 else if (TREE_CODE (*tp) == SSA_NAME)
978 *tp = remap_ssa_name (*tp, id);
979 *walk_subtrees = 0;
980 return NULL;
983 /* Local variables and labels need to be replaced by equivalent
984 variables. We don't want to copy static variables; there's only
985 one of those, no matter how many times we inline the containing
986 function. Similarly for globals from an outer function. */
987 else if (auto_var_in_fn_p (*tp, fn))
989 tree new_decl;
991 /* Remap the declaration. */
992 new_decl = remap_decl (*tp, id);
993 gcc_assert (new_decl);
994 /* Replace this variable with the copy. */
995 STRIP_TYPE_NOPS (new_decl);
996 *tp = new_decl;
997 *walk_subtrees = 0;
999 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1000 copy_statement_list (tp);
1001 else if (TREE_CODE (*tp) == SAVE_EXPR
1002 || TREE_CODE (*tp) == TARGET_EXPR)
1003 remap_save_expr (tp, id->decl_map, walk_subtrees);
1004 else if (TREE_CODE (*tp) == LABEL_DECL
1005 && (! DECL_CONTEXT (*tp)
1006 || decl_function_context (*tp) == id->src_fn))
1007 /* These may need to be remapped for EH handling. */
1008 *tp = remap_decl (*tp, id);
1009 else if (TREE_CODE (*tp) == BIND_EXPR)
1010 copy_bind_expr (tp, walk_subtrees, id);
1011 /* Types may need remapping as well. */
1012 else if (TYPE_P (*tp))
1013 *tp = remap_type (*tp, id);
1015 /* If this is a constant, we have to copy the node iff the type will be
1016 remapped. copy_tree_r will not copy a constant. */
1017 else if (CONSTANT_CLASS_P (*tp))
1019 tree new_type = remap_type (TREE_TYPE (*tp), id);
1021 if (new_type == TREE_TYPE (*tp))
1022 *walk_subtrees = 0;
1024 else if (TREE_CODE (*tp) == INTEGER_CST)
1025 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1026 TREE_INT_CST_HIGH (*tp));
1027 else
1029 *tp = copy_node (*tp);
1030 TREE_TYPE (*tp) = new_type;
1034 /* Otherwise, just copy the node. Note that copy_tree_r already
1035 knows not to copy VAR_DECLs, etc., so this is safe. */
1036 else
1038 /* Here we handle trees that are not completely rewritten.
1039 First we detect some inlining-induced bogosities for
1040 discarding. */
1041 if (TREE_CODE (*tp) == MODIFY_EXPR
1042 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1043 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1045 /* Some assignments VAR = VAR; don't generate any rtl code
1046 and thus don't count as variable modification. Avoid
1047 keeping bogosities like 0 = 0. */
1048 tree decl = TREE_OPERAND (*tp, 0), value;
1049 tree *n;
1051 n = (tree *) pointer_map_contains (id->decl_map, decl);
1052 if (n)
1054 value = *n;
1055 STRIP_TYPE_NOPS (value);
1056 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1058 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1059 return copy_tree_body_r (tp, walk_subtrees, data);
1063 else if (TREE_CODE (*tp) == INDIRECT_REF)
1065 /* Get rid of *& from inline substitutions that can happen when a
1066 pointer argument is an ADDR_EXPR. */
1067 tree decl = TREE_OPERAND (*tp, 0);
1068 tree *n = (tree *) pointer_map_contains (id->decl_map, decl);
1069 if (n)
1071 /* If we happen to get an ADDR_EXPR in n->value, strip
1072 it manually here as we'll eventually get ADDR_EXPRs
1073 which lie about their types pointed to. In this case
1074 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1075 but we absolutely rely on that. As fold_indirect_ref
1076 does other useful transformations, try that first, though. */
1077 tree type = TREE_TYPE (*tp);
1078 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1079 tree old = *tp;
1080 *tp = gimple_fold_indirect_ref (ptr);
1081 if (! *tp)
1083 if (TREE_CODE (ptr) == ADDR_EXPR)
1086 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1087 /* ??? We should either assert here or build
1088 a VIEW_CONVERT_EXPR instead of blindly leaking
1089 incompatible types to our IL. */
1090 if (! *tp)
1091 *tp = TREE_OPERAND (ptr, 0);
1093 else
1095 *tp = build1 (INDIRECT_REF, type, ptr);
1096 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1097 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1098 TREE_READONLY (*tp) = TREE_READONLY (old);
1099 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1100 have remapped a parameter as the property might be
1101 valid only for the parameter itself. */
1102 if (TREE_THIS_NOTRAP (old)
1103 && (!is_parm (TREE_OPERAND (old, 0))
1104 || (!id->transform_parameter && is_parm (ptr))))
1105 TREE_THIS_NOTRAP (*tp) = 1;
1108 *walk_subtrees = 0;
1109 return NULL;
1112 else if (TREE_CODE (*tp) == MEM_REF)
1114 /* We need to re-canonicalize MEM_REFs from inline substitutions
1115 that can happen when a pointer argument is an ADDR_EXPR.
1116 Recurse here manually to allow that. */
1117 tree ptr = TREE_OPERAND (*tp, 0);
1118 tree type = remap_type (TREE_TYPE (*tp), id);
1119 tree old = *tp;
1120 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1121 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1122 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1123 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1124 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1125 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1126 remapped a parameter as the property might be valid only
1127 for the parameter itself. */
1128 if (TREE_THIS_NOTRAP (old)
1129 && (!is_parm (TREE_OPERAND (old, 0))
1130 || (!id->transform_parameter && is_parm (ptr))))
1131 TREE_THIS_NOTRAP (*tp) = 1;
1132 *walk_subtrees = 0;
1133 return NULL;
1136 /* Here is the "usual case". Copy this tree node, and then
1137 tweak some special cases. */
1138 copy_tree_r (tp, walk_subtrees, NULL);
1140 /* If EXPR has block defined, map it to newly constructed block.
1141 When inlining we want EXPRs without block appear in the block
1142 of function call if we are not remapping a type. */
1143 if (EXPR_P (*tp))
1145 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1146 if (TREE_BLOCK (*tp))
1148 tree *n;
1149 n = (tree *) pointer_map_contains (id->decl_map,
1150 TREE_BLOCK (*tp));
1151 if (n)
1152 new_block = *n;
1154 TREE_SET_BLOCK (*tp, new_block);
1157 if (TREE_CODE (*tp) != OMP_CLAUSE)
1158 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1160 /* The copied TARGET_EXPR has never been expanded, even if the
1161 original node was expanded already. */
1162 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1164 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1165 TREE_OPERAND (*tp, 3) = NULL_TREE;
1168 /* Variable substitution need not be simple. In particular, the
1169 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1170 and friends are up-to-date. */
1171 else if (TREE_CODE (*tp) == ADDR_EXPR)
1173 int invariant = is_gimple_min_invariant (*tp);
1174 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1176 /* Handle the case where we substituted an INDIRECT_REF
1177 into the operand of the ADDR_EXPR. */
1178 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1179 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1180 else
1181 recompute_tree_invariant_for_addr_expr (*tp);
1183 /* If this used to be invariant, but is not any longer,
1184 then regimplification is probably needed. */
1185 if (invariant && !is_gimple_min_invariant (*tp))
1186 id->regimplify = true;
1188 *walk_subtrees = 0;
1192 /* Keep iterating. */
1193 return NULL_TREE;
1196 /* Helper for remap_gimple_stmt. Given an EH region number for the
1197 source function, map that to the duplicate EH region number in
1198 the destination function. */
1200 static int
1201 remap_eh_region_nr (int old_nr, copy_body_data *id)
1203 eh_region old_r, new_r;
1204 void **slot;
1206 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1207 slot = pointer_map_contains (id->eh_map, old_r);
1208 new_r = (eh_region) *slot;
1210 return new_r->index;
1213 /* Similar, but operate on INTEGER_CSTs. */
1215 static tree
1216 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1218 int old_nr, new_nr;
1220 old_nr = tree_low_cst (old_t_nr, 0);
1221 new_nr = remap_eh_region_nr (old_nr, id);
1223 return build_int_cst (integer_type_node, new_nr);
1226 /* Helper for copy_bb. Remap statement STMT using the inlining
1227 information in ID. Return the new statement copy. */
1229 static gimple
1230 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1232 gimple copy = NULL;
1233 struct walk_stmt_info wi;
1234 bool skip_first = false;
1236 /* Begin by recognizing trees that we'll completely rewrite for the
1237 inlining context. Our output for these trees is completely
1238 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1239 into an edge). Further down, we'll handle trees that get
1240 duplicated and/or tweaked. */
1242 /* When requested, GIMPLE_RETURNs should be transformed to just the
1243 contained GIMPLE_ASSIGN. The branch semantics of the return will
1244 be handled elsewhere by manipulating the CFG rather than the
1245 statement. */
1246 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1248 tree retval = gimple_return_retval (stmt);
1250 /* If we're returning something, just turn that into an
1251 assignment into the equivalent of the original RESULT_DECL.
1252 If RETVAL is just the result decl, the result decl has
1253 already been set (e.g. a recent "foo (&result_decl, ...)");
1254 just toss the entire GIMPLE_RETURN. */
1255 if (retval
1256 && (TREE_CODE (retval) != RESULT_DECL
1257 && (TREE_CODE (retval) != SSA_NAME
1258 || ! SSA_NAME_VAR (retval)
1259 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1261 copy = gimple_build_assign (id->retvar, retval);
1262 /* id->retvar is already substituted. Skip it on later remapping. */
1263 skip_first = true;
1265 else
1266 return gimple_build_nop ();
1268 else if (gimple_has_substatements (stmt))
1270 gimple_seq s1, s2;
1272 /* When cloning bodies from the C++ front end, we will be handed bodies
1273 in High GIMPLE form. Handle here all the High GIMPLE statements that
1274 have embedded statements. */
1275 switch (gimple_code (stmt))
1277 case GIMPLE_BIND:
1278 copy = copy_gimple_bind (stmt, id);
1279 break;
1281 case GIMPLE_CATCH:
1282 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1283 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1284 break;
1286 case GIMPLE_EH_FILTER:
1287 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1288 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1289 break;
1291 case GIMPLE_TRY:
1292 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1293 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1294 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1295 break;
1297 case GIMPLE_WITH_CLEANUP_EXPR:
1298 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1299 copy = gimple_build_wce (s1);
1300 break;
1302 case GIMPLE_OMP_PARALLEL:
1303 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1304 copy = gimple_build_omp_parallel
1305 (s1,
1306 gimple_omp_parallel_clauses (stmt),
1307 gimple_omp_parallel_child_fn (stmt),
1308 gimple_omp_parallel_data_arg (stmt));
1309 break;
1311 case GIMPLE_OMP_TASK:
1312 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1313 copy = gimple_build_omp_task
1314 (s1,
1315 gimple_omp_task_clauses (stmt),
1316 gimple_omp_task_child_fn (stmt),
1317 gimple_omp_task_data_arg (stmt),
1318 gimple_omp_task_copy_fn (stmt),
1319 gimple_omp_task_arg_size (stmt),
1320 gimple_omp_task_arg_align (stmt));
1321 break;
1323 case GIMPLE_OMP_FOR:
1324 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1325 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1326 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1327 gimple_omp_for_clauses (stmt),
1328 gimple_omp_for_collapse (stmt), s2);
1330 size_t i;
1331 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1333 gimple_omp_for_set_index (copy, i,
1334 gimple_omp_for_index (stmt, i));
1335 gimple_omp_for_set_initial (copy, i,
1336 gimple_omp_for_initial (stmt, i));
1337 gimple_omp_for_set_final (copy, i,
1338 gimple_omp_for_final (stmt, i));
1339 gimple_omp_for_set_incr (copy, i,
1340 gimple_omp_for_incr (stmt, i));
1341 gimple_omp_for_set_cond (copy, i,
1342 gimple_omp_for_cond (stmt, i));
1345 break;
1347 case GIMPLE_OMP_MASTER:
1348 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1349 copy = gimple_build_omp_master (s1);
1350 break;
1352 case GIMPLE_OMP_TASKGROUP:
1353 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1354 copy = gimple_build_omp_taskgroup (s1);
1355 break;
1357 case GIMPLE_OMP_ORDERED:
1358 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1359 copy = gimple_build_omp_ordered (s1);
1360 break;
1362 case GIMPLE_OMP_SECTION:
1363 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1364 copy = gimple_build_omp_section (s1);
1365 break;
1367 case GIMPLE_OMP_SECTIONS:
1368 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1369 copy = gimple_build_omp_sections
1370 (s1, gimple_omp_sections_clauses (stmt));
1371 break;
1373 case GIMPLE_OMP_SINGLE:
1374 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1375 copy = gimple_build_omp_single
1376 (s1, gimple_omp_single_clauses (stmt));
1377 break;
1379 case GIMPLE_OMP_TARGET:
1380 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1381 copy = gimple_build_omp_target
1382 (s1, gimple_omp_target_kind (stmt),
1383 gimple_omp_target_clauses (stmt));
1384 break;
1386 case GIMPLE_OMP_TEAMS:
1387 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1388 copy = gimple_build_omp_teams
1389 (s1, gimple_omp_teams_clauses (stmt));
1390 break;
1392 case GIMPLE_OMP_CRITICAL:
1393 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1394 copy
1395 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1396 break;
1398 case GIMPLE_TRANSACTION:
1399 s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1400 copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1401 gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1402 break;
1404 default:
1405 gcc_unreachable ();
1408 else
1410 if (gimple_assign_copy_p (stmt)
1411 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1412 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1414 /* Here we handle statements that are not completely rewritten.
1415 First we detect some inlining-induced bogosities for
1416 discarding. */
1418 /* Some assignments VAR = VAR; don't generate any rtl code
1419 and thus don't count as variable modification. Avoid
1420 keeping bogosities like 0 = 0. */
1421 tree decl = gimple_assign_lhs (stmt), value;
1422 tree *n;
1424 n = (tree *) pointer_map_contains (id->decl_map, decl);
1425 if (n)
1427 value = *n;
1428 STRIP_TYPE_NOPS (value);
1429 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1430 return gimple_build_nop ();
1434 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1435 in a block that we aren't copying during tree_function_versioning,
1436 just drop the clobber stmt. */
1437 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1439 tree lhs = gimple_assign_lhs (stmt);
1440 if (TREE_CODE (lhs) == MEM_REF
1441 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1443 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1444 if (gimple_bb (def_stmt)
1445 && !bitmap_bit_p (id->blocks_to_copy,
1446 gimple_bb (def_stmt)->index))
1447 return gimple_build_nop ();
1451 if (gimple_debug_bind_p (stmt))
1453 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1454 gimple_debug_bind_get_value (stmt),
1455 stmt);
1456 id->debug_stmts.safe_push (copy);
1457 return copy;
1459 if (gimple_debug_source_bind_p (stmt))
1461 copy = gimple_build_debug_source_bind
1462 (gimple_debug_source_bind_get_var (stmt),
1463 gimple_debug_source_bind_get_value (stmt), stmt);
1464 id->debug_stmts.safe_push (copy);
1465 return copy;
1468 /* Create a new deep copy of the statement. */
1469 copy = gimple_copy (stmt);
1471 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1472 RESX and EH_DISPATCH. */
1473 if (id->eh_map)
1474 switch (gimple_code (copy))
1476 case GIMPLE_CALL:
1478 tree r, fndecl = gimple_call_fndecl (copy);
1479 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1480 switch (DECL_FUNCTION_CODE (fndecl))
1482 case BUILT_IN_EH_COPY_VALUES:
1483 r = gimple_call_arg (copy, 1);
1484 r = remap_eh_region_tree_nr (r, id);
1485 gimple_call_set_arg (copy, 1, r);
1486 /* FALLTHRU */
1488 case BUILT_IN_EH_POINTER:
1489 case BUILT_IN_EH_FILTER:
1490 r = gimple_call_arg (copy, 0);
1491 r = remap_eh_region_tree_nr (r, id);
1492 gimple_call_set_arg (copy, 0, r);
1493 break;
1495 default:
1496 break;
1499 /* Reset alias info if we didn't apply measures to
1500 keep it valid over inlining by setting DECL_PT_UID. */
1501 if (!id->src_cfun->gimple_df
1502 || !id->src_cfun->gimple_df->ipa_pta)
1503 gimple_call_reset_alias_info (copy);
1505 break;
1507 case GIMPLE_RESX:
1509 int r = gimple_resx_region (copy);
1510 r = remap_eh_region_nr (r, id);
1511 gimple_resx_set_region (copy, r);
1513 break;
1515 case GIMPLE_EH_DISPATCH:
1517 int r = gimple_eh_dispatch_region (copy);
1518 r = remap_eh_region_nr (r, id);
1519 gimple_eh_dispatch_set_region (copy, r);
1521 break;
1523 default:
1524 break;
1528 /* If STMT has a block defined, map it to the newly constructed
1529 block. */
1530 if (gimple_block (copy))
1532 tree *n;
1533 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1534 gcc_assert (n);
1535 gimple_set_block (copy, *n);
1538 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1539 return copy;
1541 /* Remap all the operands in COPY. */
1542 memset (&wi, 0, sizeof (wi));
1543 wi.info = id;
1544 if (skip_first)
1545 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1546 else
1547 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1549 /* Clear the copied virtual operands. We are not remapping them here
1550 but are going to recreate them from scratch. */
1551 if (gimple_has_mem_ops (copy))
1553 gimple_set_vdef (copy, NULL_TREE);
1554 gimple_set_vuse (copy, NULL_TREE);
1557 return copy;
1561 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1562 later */
1564 static basic_block
1565 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1566 gcov_type count_scale)
1568 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1569 basic_block copy_basic_block;
1570 tree decl;
1571 gcov_type freq;
1572 basic_block prev;
1574 /* Search for previous copied basic block. */
1575 prev = bb->prev_bb;
1576 while (!prev->aux)
1577 prev = prev->prev_bb;
1579 /* create_basic_block() will append every new block to
1580 basic_block_info automatically. */
1581 copy_basic_block = create_basic_block (NULL, (void *) 0,
1582 (basic_block) prev->aux);
1583 copy_basic_block->count = apply_scale (bb->count, count_scale);
1585 /* We are going to rebuild frequencies from scratch. These values
1586 have just small importance to drive canonicalize_loop_headers. */
1587 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1589 /* We recompute frequencies after inlining, so this is quite safe. */
1590 if (freq > BB_FREQ_MAX)
1591 freq = BB_FREQ_MAX;
1592 copy_basic_block->frequency = freq;
1594 copy_gsi = gsi_start_bb (copy_basic_block);
1596 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1598 gimple stmt = gsi_stmt (gsi);
1599 gimple orig_stmt = stmt;
1601 id->regimplify = false;
1602 stmt = remap_gimple_stmt (stmt, id);
1603 if (gimple_nop_p (stmt))
1604 continue;
1606 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1607 seq_gsi = copy_gsi;
1609 /* With return slot optimization we can end up with
1610 non-gimple (foo *)&this->m, fix that here. */
1611 if (is_gimple_assign (stmt)
1612 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1613 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1615 tree new_rhs;
1616 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1617 gimple_assign_rhs1 (stmt),
1618 true, NULL, false,
1619 GSI_CONTINUE_LINKING);
1620 gimple_assign_set_rhs1 (stmt, new_rhs);
1621 id->regimplify = false;
1624 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1626 if (id->regimplify)
1627 gimple_regimplify_operands (stmt, &seq_gsi);
1629 /* If copy_basic_block has been empty at the start of this iteration,
1630 call gsi_start_bb again to get at the newly added statements. */
1631 if (gsi_end_p (copy_gsi))
1632 copy_gsi = gsi_start_bb (copy_basic_block);
1633 else
1634 gsi_next (&copy_gsi);
1636 /* Process the new statement. The call to gimple_regimplify_operands
1637 possibly turned the statement into multiple statements, we
1638 need to process all of them. */
1641 tree fn;
1643 stmt = gsi_stmt (copy_gsi);
1644 if (is_gimple_call (stmt)
1645 && gimple_call_va_arg_pack_p (stmt)
1646 && id->gimple_call)
1648 /* __builtin_va_arg_pack () should be replaced by
1649 all arguments corresponding to ... in the caller. */
1650 tree p;
1651 gimple new_call;
1652 vec<tree> argarray;
1653 size_t nargs = gimple_call_num_args (id->gimple_call);
1654 size_t n;
1656 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1657 nargs--;
1659 /* Create the new array of arguments. */
1660 n = nargs + gimple_call_num_args (stmt);
1661 argarray.create (n);
1662 argarray.safe_grow_cleared (n);
1664 /* Copy all the arguments before '...' */
1665 memcpy (argarray.address (),
1666 gimple_call_arg_ptr (stmt, 0),
1667 gimple_call_num_args (stmt) * sizeof (tree));
1669 /* Append the arguments passed in '...' */
1670 memcpy (argarray.address () + gimple_call_num_args (stmt),
1671 gimple_call_arg_ptr (id->gimple_call, 0)
1672 + (gimple_call_num_args (id->gimple_call) - nargs),
1673 nargs * sizeof (tree));
1675 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1676 argarray);
1678 argarray.release ();
1680 /* Copy all GIMPLE_CALL flags, location and block, except
1681 GF_CALL_VA_ARG_PACK. */
1682 gimple_call_copy_flags (new_call, stmt);
1683 gimple_call_set_va_arg_pack (new_call, false);
1684 gimple_set_location (new_call, gimple_location (stmt));
1685 gimple_set_block (new_call, gimple_block (stmt));
1686 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1688 gsi_replace (&copy_gsi, new_call, false);
1689 stmt = new_call;
1691 else if (is_gimple_call (stmt)
1692 && id->gimple_call
1693 && (decl = gimple_call_fndecl (stmt))
1694 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1695 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1697 /* __builtin_va_arg_pack_len () should be replaced by
1698 the number of anonymous arguments. */
1699 size_t nargs = gimple_call_num_args (id->gimple_call);
1700 tree count, p;
1701 gimple new_stmt;
1703 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1704 nargs--;
1706 count = build_int_cst (integer_type_node, nargs);
1707 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1708 gsi_replace (&copy_gsi, new_stmt, false);
1709 stmt = new_stmt;
1712 /* Statements produced by inlining can be unfolded, especially
1713 when we constant propagated some operands. We can't fold
1714 them right now for two reasons:
1715 1) folding require SSA_NAME_DEF_STMTs to be correct
1716 2) we can't change function calls to builtins.
1717 So we just mark statement for later folding. We mark
1718 all new statements, instead just statements that has changed
1719 by some nontrivial substitution so even statements made
1720 foldable indirectly are updated. If this turns out to be
1721 expensive, copy_body can be told to watch for nontrivial
1722 changes. */
1723 if (id->statements_to_fold)
1724 pointer_set_insert (id->statements_to_fold, stmt);
1726 /* We're duplicating a CALL_EXPR. Find any corresponding
1727 callgraph edges and update or duplicate them. */
1728 if (is_gimple_call (stmt))
1730 struct cgraph_edge *edge;
1731 int flags;
1733 switch (id->transform_call_graph_edges)
1735 case CB_CGE_DUPLICATE:
1736 edge = cgraph_edge (id->src_node, orig_stmt);
1737 if (edge)
1739 int edge_freq = edge->frequency;
1740 int new_freq;
1741 struct cgraph_edge *old_edge = edge;
1742 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1743 gimple_uid (stmt),
1744 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1745 true);
1746 /* We could also just rescale the frequency, but
1747 doing so would introduce roundoff errors and make
1748 verifier unhappy. */
1749 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
1750 copy_basic_block);
1752 /* Speculative calls consist of two edges - direct and indirect.
1753 Duplicate the whole thing and distribute frequencies accordingly. */
1754 if (edge->speculative)
1756 struct cgraph_edge *direct, *indirect;
1757 struct ipa_ref *ref;
1759 gcc_assert (!edge->indirect_unknown_callee);
1760 cgraph_speculative_call_info (old_edge, direct, indirect, ref);
1761 indirect = cgraph_clone_edge (indirect, id->dst_node, stmt,
1762 gimple_uid (stmt),
1763 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1764 true);
1765 if (old_edge->frequency + indirect->frequency)
1767 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
1768 (old_edge->frequency + indirect->frequency)),
1769 CGRAPH_FREQ_MAX);
1770 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
1771 (old_edge->frequency + indirect->frequency)),
1772 CGRAPH_FREQ_MAX);
1774 ipa_clone_ref (ref, id->dst_node, stmt);
1776 else
1778 edge->frequency = new_freq;
1779 if (dump_file
1780 && profile_status_for_function (cfun) != PROFILE_ABSENT
1781 && (edge_freq > edge->frequency + 10
1782 || edge_freq < edge->frequency - 10))
1784 fprintf (dump_file, "Edge frequency estimated by "
1785 "cgraph %i diverge from inliner's estimate %i\n",
1786 edge_freq,
1787 edge->frequency);
1788 fprintf (dump_file,
1789 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1790 bb->index,
1791 bb->frequency,
1792 copy_basic_block->frequency);
1796 break;
1798 case CB_CGE_MOVE_CLONES:
1799 cgraph_set_call_stmt_including_clones (id->dst_node,
1800 orig_stmt, stmt);
1801 edge = cgraph_edge (id->dst_node, stmt);
1802 break;
1804 case CB_CGE_MOVE:
1805 edge = cgraph_edge (id->dst_node, orig_stmt);
1806 if (edge)
1807 cgraph_set_call_stmt (edge, stmt);
1808 break;
1810 default:
1811 gcc_unreachable ();
1814 /* Constant propagation on argument done during inlining
1815 may create new direct call. Produce an edge for it. */
1816 if ((!edge
1817 || (edge->indirect_inlining_edge
1818 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1819 && id->dst_node->definition
1820 && (fn = gimple_call_fndecl (stmt)) != NULL)
1822 struct cgraph_node *dest = cgraph_get_node (fn);
1824 /* We have missing edge in the callgraph. This can happen
1825 when previous inlining turned an indirect call into a
1826 direct call by constant propagating arguments or we are
1827 producing dead clone (for further cloning). In all
1828 other cases we hit a bug (incorrect node sharing is the
1829 most common reason for missing edges). */
1830 gcc_assert (!dest->definition
1831 || dest->address_taken
1832 || !id->src_node->definition
1833 || !id->dst_node->definition);
1834 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1835 cgraph_create_edge_including_clones
1836 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1837 compute_call_stmt_bb_frequency (id->dst_node->decl,
1838 copy_basic_block),
1839 CIF_ORIGINALLY_INDIRECT_CALL);
1840 else
1841 cgraph_create_edge (id->dst_node, dest, stmt,
1842 bb->count,
1843 compute_call_stmt_bb_frequency
1844 (id->dst_node->decl,
1845 copy_basic_block))->inline_failed
1846 = CIF_ORIGINALLY_INDIRECT_CALL;
1847 if (dump_file)
1849 fprintf (dump_file, "Created new direct edge to %s\n",
1850 cgraph_node_name (dest));
1854 flags = gimple_call_flags (stmt);
1855 if (flags & ECF_MAY_BE_ALLOCA)
1856 cfun->calls_alloca = true;
1857 if (flags & ECF_RETURNS_TWICE)
1858 cfun->calls_setjmp = true;
1861 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1862 id->eh_map, id->eh_lp_nr);
1864 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1866 ssa_op_iter i;
1867 tree def;
1869 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1870 if (TREE_CODE (def) == SSA_NAME)
1871 SSA_NAME_DEF_STMT (def) = stmt;
1874 gsi_next (&copy_gsi);
1876 while (!gsi_end_p (copy_gsi));
1878 copy_gsi = gsi_last_bb (copy_basic_block);
1881 return copy_basic_block;
1884 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1885 form is quite easy, since dominator relationship for old basic blocks does
1886 not change.
1888 There is however exception where inlining might change dominator relation
1889 across EH edges from basic block within inlined functions destinating
1890 to landing pads in function we inline into.
1892 The function fills in PHI_RESULTs of such PHI nodes if they refer
1893 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1894 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1895 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1896 set, and this means that there will be no overlapping live ranges
1897 for the underlying symbol.
1899 This might change in future if we allow redirecting of EH edges and
1900 we might want to change way build CFG pre-inlining to include
1901 all the possible edges then. */
1902 static void
1903 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1904 bool can_throw, bool nonlocal_goto)
1906 edge e;
1907 edge_iterator ei;
1909 FOR_EACH_EDGE (e, ei, bb->succs)
1910 if (!e->dest->aux
1911 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1913 gimple phi;
1914 gimple_stmt_iterator si;
1916 if (!nonlocal_goto)
1917 gcc_assert (e->flags & EDGE_EH);
1919 if (!can_throw)
1920 gcc_assert (!(e->flags & EDGE_EH));
1922 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1924 edge re;
1926 phi = gsi_stmt (si);
1928 /* For abnormal goto/call edges the receiver can be the
1929 ENTRY_BLOCK. Do not assert this cannot happen. */
1931 gcc_assert ((e->flags & EDGE_EH)
1932 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1934 re = find_edge (ret_bb, e->dest);
1935 gcc_checking_assert (re);
1936 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1937 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1939 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1940 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1946 /* Copy edges from BB into its copy constructed earlier, scale profile
1947 accordingly. Edges will be taken care of later. Assume aux
1948 pointers to point to the copies of each BB. Return true if any
1949 debug stmts are left after a statement that must end the basic block. */
1951 static bool
1952 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
1953 bool can_make_abnormal_goto)
1955 basic_block new_bb = (basic_block) bb->aux;
1956 edge_iterator ei;
1957 edge old_edge;
1958 gimple_stmt_iterator si;
1959 int flags;
1960 bool need_debug_cleanup = false;
1962 /* Use the indices from the original blocks to create edges for the
1963 new ones. */
1964 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1965 if (!(old_edge->flags & EDGE_EH))
1967 edge new_edge;
1969 flags = old_edge->flags;
1971 /* Return edges do get a FALLTHRU flag when the get inlined. */
1972 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1973 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1974 flags |= EDGE_FALLTHRU;
1975 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1976 new_edge->count = apply_scale (old_edge->count, count_scale);
1977 new_edge->probability = old_edge->probability;
1980 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1981 return false;
1983 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1985 gimple copy_stmt;
1986 bool can_throw, nonlocal_goto;
1988 copy_stmt = gsi_stmt (si);
1989 if (!is_gimple_debug (copy_stmt))
1990 update_stmt (copy_stmt);
1992 /* Do this before the possible split_block. */
1993 gsi_next (&si);
1995 /* If this tree could throw an exception, there are two
1996 cases where we need to add abnormal edge(s): the
1997 tree wasn't in a region and there is a "current
1998 region" in the caller; or the original tree had
1999 EH edges. In both cases split the block after the tree,
2000 and add abnormal edge(s) as needed; we need both
2001 those from the callee and the caller.
2002 We check whether the copy can throw, because the const
2003 propagation can change an INDIRECT_REF which throws
2004 into a COMPONENT_REF which doesn't. If the copy
2005 can throw, the original could also throw. */
2006 can_throw = stmt_can_throw_internal (copy_stmt);
2007 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
2009 if (can_throw || nonlocal_goto)
2011 if (!gsi_end_p (si))
2013 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2014 gsi_next (&si);
2015 if (gsi_end_p (si))
2016 need_debug_cleanup = true;
2018 if (!gsi_end_p (si))
2019 /* Note that bb's predecessor edges aren't necessarily
2020 right at this point; split_block doesn't care. */
2022 edge e = split_block (new_bb, copy_stmt);
2024 new_bb = e->dest;
2025 new_bb->aux = e->src->aux;
2026 si = gsi_start_bb (new_bb);
2030 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2031 make_eh_dispatch_edges (copy_stmt);
2032 else if (can_throw)
2033 make_eh_edges (copy_stmt);
2035 /* If the call we inline cannot make abnormal goto do not add
2036 additional abnormal edges but only retain those already present
2037 in the original function body. */
2038 nonlocal_goto &= can_make_abnormal_goto;
2039 if (nonlocal_goto)
2040 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
2042 if ((can_throw || nonlocal_goto)
2043 && gimple_in_ssa_p (cfun))
2044 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2045 can_throw, nonlocal_goto);
2047 return need_debug_cleanup;
2050 /* Copy the PHIs. All blocks and edges are copied, some blocks
2051 was possibly split and new outgoing EH edges inserted.
2052 BB points to the block of original function and AUX pointers links
2053 the original and newly copied blocks. */
2055 static void
2056 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2058 basic_block const new_bb = (basic_block) bb->aux;
2059 edge_iterator ei;
2060 gimple phi;
2061 gimple_stmt_iterator si;
2062 edge new_edge;
2063 bool inserted = false;
2065 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2067 tree res, new_res;
2068 gimple new_phi;
2070 phi = gsi_stmt (si);
2071 res = PHI_RESULT (phi);
2072 new_res = res;
2073 if (!virtual_operand_p (res))
2075 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2076 new_phi = create_phi_node (new_res, new_bb);
2077 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2079 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2080 tree arg;
2081 tree new_arg;
2082 edge_iterator ei2;
2083 location_t locus;
2085 /* When doing partial cloning, we allow PHIs on the entry block
2086 as long as all the arguments are the same. Find any input
2087 edge to see argument to copy. */
2088 if (!old_edge)
2089 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2090 if (!old_edge->src->aux)
2091 break;
2093 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2094 new_arg = arg;
2095 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2096 gcc_assert (new_arg);
2097 /* With return slot optimization we can end up with
2098 non-gimple (foo *)&this->m, fix that here. */
2099 if (TREE_CODE (new_arg) != SSA_NAME
2100 && TREE_CODE (new_arg) != FUNCTION_DECL
2101 && !is_gimple_val (new_arg))
2103 gimple_seq stmts = NULL;
2104 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2105 gsi_insert_seq_on_edge (new_edge, stmts);
2106 inserted = true;
2108 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2109 if (LOCATION_BLOCK (locus))
2111 tree *n;
2112 n = (tree *) pointer_map_contains (id->decl_map,
2113 LOCATION_BLOCK (locus));
2114 gcc_assert (n);
2115 if (*n)
2116 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2117 else
2118 locus = LOCATION_LOCUS (locus);
2120 else
2121 locus = LOCATION_LOCUS (locus);
2123 add_phi_arg (new_phi, new_arg, new_edge, locus);
2128 /* Commit the delayed edge insertions. */
2129 if (inserted)
2130 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2131 gsi_commit_one_edge_insert (new_edge, NULL);
2135 /* Wrapper for remap_decl so it can be used as a callback. */
2137 static tree
2138 remap_decl_1 (tree decl, void *data)
2140 return remap_decl (decl, (copy_body_data *) data);
2143 /* Build struct function and associated datastructures for the new clone
2144 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2145 the cfun to the function of new_fndecl (and current_function_decl too). */
2147 static void
2148 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2150 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2151 gcov_type count_scale;
2153 if (!DECL_ARGUMENTS (new_fndecl))
2154 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2155 if (!DECL_RESULT (new_fndecl))
2156 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2158 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2159 count_scale
2160 = GCOV_COMPUTE_SCALE (count,
2161 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2162 else
2163 count_scale = REG_BR_PROB_BASE;
2165 /* Register specific tree functions. */
2166 gimple_register_cfg_hooks ();
2168 /* Get clean struct function. */
2169 push_struct_function (new_fndecl);
2171 /* We will rebuild these, so just sanity check that they are empty. */
2172 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2173 gcc_assert (cfun->local_decls == NULL);
2174 gcc_assert (cfun->cfg == NULL);
2175 gcc_assert (cfun->decl == new_fndecl);
2177 /* Copy items we preserve during cloning. */
2178 cfun->static_chain_decl = src_cfun->static_chain_decl;
2179 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2180 cfun->function_end_locus = src_cfun->function_end_locus;
2181 cfun->curr_properties = src_cfun->curr_properties;
2182 cfun->last_verified = src_cfun->last_verified;
2183 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2184 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2185 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2186 cfun->stdarg = src_cfun->stdarg;
2187 cfun->after_inlining = src_cfun->after_inlining;
2188 cfun->can_throw_non_call_exceptions
2189 = src_cfun->can_throw_non_call_exceptions;
2190 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2191 cfun->returns_struct = src_cfun->returns_struct;
2192 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2194 init_empty_tree_cfg ();
2196 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2197 ENTRY_BLOCK_PTR->count =
2198 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2199 REG_BR_PROB_BASE);
2200 ENTRY_BLOCK_PTR->frequency
2201 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2202 EXIT_BLOCK_PTR->count =
2203 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2204 REG_BR_PROB_BASE);
2205 EXIT_BLOCK_PTR->frequency =
2206 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2207 if (src_cfun->eh)
2208 init_eh_for_function ();
2210 if (src_cfun->gimple_df)
2212 init_tree_ssa (cfun);
2213 cfun->gimple_df->in_ssa_p = true;
2214 init_ssa_operands (cfun);
2218 /* Helper function for copy_cfg_body. Move debug stmts from the end
2219 of NEW_BB to the beginning of successor basic blocks when needed. If the
2220 successor has multiple predecessors, reset them, otherwise keep
2221 their value. */
2223 static void
2224 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2226 edge e;
2227 edge_iterator ei;
2228 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2230 if (gsi_end_p (si)
2231 || gsi_one_before_end_p (si)
2232 || !(stmt_can_throw_internal (gsi_stmt (si))
2233 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2234 return;
2236 FOR_EACH_EDGE (e, ei, new_bb->succs)
2238 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2239 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2240 while (is_gimple_debug (gsi_stmt (ssi)))
2242 gimple stmt = gsi_stmt (ssi), new_stmt;
2243 tree var;
2244 tree value;
2246 /* For the last edge move the debug stmts instead of copying
2247 them. */
2248 if (ei_one_before_end_p (ei))
2250 si = ssi;
2251 gsi_prev (&ssi);
2252 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2253 gimple_debug_bind_reset_value (stmt);
2254 gsi_remove (&si, false);
2255 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2256 continue;
2259 if (gimple_debug_bind_p (stmt))
2261 var = gimple_debug_bind_get_var (stmt);
2262 if (single_pred_p (e->dest))
2264 value = gimple_debug_bind_get_value (stmt);
2265 value = unshare_expr (value);
2267 else
2268 value = NULL_TREE;
2269 new_stmt = gimple_build_debug_bind (var, value, stmt);
2271 else if (gimple_debug_source_bind_p (stmt))
2273 var = gimple_debug_source_bind_get_var (stmt);
2274 value = gimple_debug_source_bind_get_value (stmt);
2275 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2277 else
2278 gcc_unreachable ();
2279 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2280 id->debug_stmts.safe_push (new_stmt);
2281 gsi_prev (&ssi);
2286 /* Make a copy of the sub-loops of SRC_PARENT and place them
2287 as siblings of DEST_PARENT. */
2289 static void
2290 copy_loops (copy_body_data *id,
2291 struct loop *dest_parent, struct loop *src_parent)
2293 struct loop *src_loop = src_parent->inner;
2294 while (src_loop)
2296 if (!id->blocks_to_copy
2297 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2299 struct loop *dest_loop = alloc_loop ();
2301 /* Assign the new loop its header and latch and associate
2302 those with the new loop. */
2303 if (src_loop->header != NULL)
2305 dest_loop->header = (basic_block)src_loop->header->aux;
2306 dest_loop->header->loop_father = dest_loop;
2308 if (src_loop->latch != NULL)
2310 dest_loop->latch = (basic_block)src_loop->latch->aux;
2311 dest_loop->latch->loop_father = dest_loop;
2314 /* Copy loop meta-data. */
2315 copy_loop_info (src_loop, dest_loop);
2317 /* Finally place it into the loop array and the loop tree. */
2318 place_new_loop (cfun, dest_loop);
2319 flow_loop_tree_node_add (dest_parent, dest_loop);
2321 if (src_loop->simduid)
2323 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2324 cfun->has_simduid_loops = true;
2326 if (src_loop->force_vect)
2328 dest_loop->force_vect = true;
2329 cfun->has_force_vect_loops = true;
2332 /* Recurse. */
2333 copy_loops (id, dest_loop, src_loop);
2335 src_loop = src_loop->next;
2339 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2341 void
2342 redirect_all_calls (copy_body_data * id, basic_block bb)
2344 gimple_stmt_iterator si;
2345 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2347 if (is_gimple_call (gsi_stmt (si)))
2349 struct cgraph_edge *edge = cgraph_edge (id->dst_node, gsi_stmt (si));
2350 if (edge)
2351 cgraph_redirect_edge_call_stmt_to_callee (edge);
2356 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2357 with each bb's frequency. Used when NODE has a 0-weight entry
2358 but we are about to inline it into a non-zero count call bb.
2359 See the comments for handle_missing_profiles() in predict.c for
2360 when this can happen for COMDATs. */
2362 void
2363 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2365 basic_block bb;
2366 edge_iterator ei;
2367 edge e;
2368 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2370 FOR_ALL_BB_FN(bb, fn)
2372 bb->count = apply_scale (count,
2373 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2374 FOR_EACH_EDGE (e, ei, bb->succs)
2375 e->count = apply_probability (e->src->count, e->probability);
2379 /* Make a copy of the body of FN so that it can be inserted inline in
2380 another function. Walks FN via CFG, returns new fndecl. */
2382 static tree
2383 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2384 basic_block entry_block_map, basic_block exit_block_map,
2385 basic_block new_entry)
2387 tree callee_fndecl = id->src_fn;
2388 /* Original cfun for the callee, doesn't change. */
2389 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2390 struct function *cfun_to_copy;
2391 basic_block bb;
2392 tree new_fndecl = NULL;
2393 bool need_debug_cleanup = false;
2394 gcov_type count_scale;
2395 int last;
2396 int incoming_frequency = 0;
2397 gcov_type incoming_count = 0;
2399 /* This can happen for COMDAT routines that end up with 0 counts
2400 despite being called (see the comments for handle_missing_profiles()
2401 in predict.c as to why). Apply counts to the blocks in the callee
2402 before inlining, using the guessed edge frequencies, so that we don't
2403 end up with a 0-count inline body which can confuse downstream
2404 optimizations such as function splitting. */
2405 if (!ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count && count)
2407 /* Apply the larger of the call bb count and the total incoming
2408 call edge count to the callee. */
2409 gcov_type in_count = 0;
2410 struct cgraph_edge *in_edge;
2411 for (in_edge = id->src_node->callers; in_edge;
2412 in_edge = in_edge->next_caller)
2413 in_count += in_edge->count;
2414 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2417 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2418 count_scale
2419 = GCOV_COMPUTE_SCALE (count,
2420 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2421 else
2422 count_scale = REG_BR_PROB_BASE;
2424 /* Register specific tree functions. */
2425 gimple_register_cfg_hooks ();
2427 /* If we are inlining just region of the function, make sure to connect new entry
2428 to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute
2429 frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
2430 probabilities of edges incoming from nonduplicated region. */
2431 if (new_entry)
2433 edge e;
2434 edge_iterator ei;
2436 FOR_EACH_EDGE (e, ei, new_entry->preds)
2437 if (!e->src->aux)
2439 incoming_frequency += EDGE_FREQUENCY (e);
2440 incoming_count += e->count;
2442 incoming_count = apply_scale (incoming_count, count_scale);
2443 incoming_frequency
2444 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2445 ENTRY_BLOCK_PTR->count = incoming_count;
2446 ENTRY_BLOCK_PTR->frequency = incoming_frequency;
2449 /* Must have a CFG here at this point. */
2450 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2451 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2453 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2455 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2456 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2457 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2458 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2460 /* Duplicate any exception-handling regions. */
2461 if (cfun->eh)
2462 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2463 remap_decl_1, id);
2465 /* Use aux pointers to map the original blocks to copy. */
2466 FOR_EACH_BB_FN (bb, cfun_to_copy)
2467 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2469 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2470 bb->aux = new_bb;
2471 new_bb->aux = bb;
2472 new_bb->loop_father = entry_block_map->loop_father;
2475 last = last_basic_block;
2477 /* Now that we've duplicated the blocks, duplicate their edges. */
2478 bool can_make_abormal_goto
2479 = id->gimple_call && stmt_can_make_abnormal_goto (id->gimple_call);
2480 FOR_ALL_BB_FN (bb, cfun_to_copy)
2481 if (!id->blocks_to_copy
2482 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2483 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2484 can_make_abormal_goto);
2486 if (new_entry)
2488 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2489 e->probability = REG_BR_PROB_BASE;
2490 e->count = incoming_count;
2493 /* Duplicate the loop tree, if available and wanted. */
2494 if (loops_for_fn (src_cfun) != NULL
2495 && current_loops != NULL)
2497 copy_loops (id, entry_block_map->loop_father,
2498 get_loop (src_cfun, 0));
2499 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2500 loops_state_set (LOOPS_NEED_FIXUP);
2503 /* If the loop tree in the source function needed fixup, mark the
2504 destination loop tree for fixup, too. */
2505 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2506 loops_state_set (LOOPS_NEED_FIXUP);
2508 if (gimple_in_ssa_p (cfun))
2509 FOR_ALL_BB_FN (bb, cfun_to_copy)
2510 if (!id->blocks_to_copy
2511 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2512 copy_phis_for_bb (bb, id);
2514 FOR_ALL_BB_FN (bb, cfun_to_copy)
2515 if (bb->aux)
2517 if (need_debug_cleanup
2518 && bb->index != ENTRY_BLOCK
2519 && bb->index != EXIT_BLOCK)
2520 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2521 /* Update call edge destinations. This can not be done before loop
2522 info is updated, because we may split basic blocks. */
2523 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2524 redirect_all_calls (id, (basic_block)bb->aux);
2525 ((basic_block)bb->aux)->aux = NULL;
2526 bb->aux = NULL;
2529 /* Zero out AUX fields of newly created block during EH edge
2530 insertion. */
2531 for (; last < last_basic_block; last++)
2533 if (need_debug_cleanup)
2534 maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2535 BASIC_BLOCK (last)->aux = NULL;
2536 /* Update call edge destinations. This can not be done before loop
2537 info is updated, because we may split basic blocks. */
2538 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2539 redirect_all_calls (id, BASIC_BLOCK (last));
2541 entry_block_map->aux = NULL;
2542 exit_block_map->aux = NULL;
2544 if (id->eh_map)
2546 pointer_map_destroy (id->eh_map);
2547 id->eh_map = NULL;
2550 return new_fndecl;
2553 /* Copy the debug STMT using ID. We deal with these statements in a
2554 special way: if any variable in their VALUE expression wasn't
2555 remapped yet, we won't remap it, because that would get decl uids
2556 out of sync, causing codegen differences between -g and -g0. If
2557 this arises, we drop the VALUE expression altogether. */
2559 static void
2560 copy_debug_stmt (gimple stmt, copy_body_data *id)
2562 tree t, *n;
2563 struct walk_stmt_info wi;
2565 if (gimple_block (stmt))
2567 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2568 gimple_set_block (stmt, n ? *n : id->block);
2571 /* Remap all the operands in COPY. */
2572 memset (&wi, 0, sizeof (wi));
2573 wi.info = id;
2575 processing_debug_stmt = 1;
2577 if (gimple_debug_source_bind_p (stmt))
2578 t = gimple_debug_source_bind_get_var (stmt);
2579 else
2580 t = gimple_debug_bind_get_var (stmt);
2582 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2583 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2585 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2586 t = *n;
2588 else if (TREE_CODE (t) == VAR_DECL
2589 && !is_global_var (t)
2590 && !pointer_map_contains (id->decl_map, t))
2591 /* T is a non-localized variable. */;
2592 else
2593 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2595 if (gimple_debug_bind_p (stmt))
2597 gimple_debug_bind_set_var (stmt, t);
2599 if (gimple_debug_bind_has_value_p (stmt))
2600 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2601 remap_gimple_op_r, &wi, NULL);
2603 /* Punt if any decl couldn't be remapped. */
2604 if (processing_debug_stmt < 0)
2605 gimple_debug_bind_reset_value (stmt);
2607 else if (gimple_debug_source_bind_p (stmt))
2609 gimple_debug_source_bind_set_var (stmt, t);
2610 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2611 remap_gimple_op_r, &wi, NULL);
2612 /* When inlining and source bind refers to one of the optimized
2613 away parameters, change the source bind into normal debug bind
2614 referring to the corresponding DEBUG_EXPR_DECL that should have
2615 been bound before the call stmt. */
2616 t = gimple_debug_source_bind_get_value (stmt);
2617 if (t != NULL_TREE
2618 && TREE_CODE (t) == PARM_DECL
2619 && id->gimple_call)
2621 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2622 unsigned int i;
2623 if (debug_args != NULL)
2625 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2626 if ((**debug_args)[i] == DECL_ORIGIN (t)
2627 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2629 t = (**debug_args)[i + 1];
2630 stmt->gsbase.subcode = GIMPLE_DEBUG_BIND;
2631 gimple_debug_bind_set_value (stmt, t);
2632 break;
2638 processing_debug_stmt = 0;
2640 update_stmt (stmt);
2643 /* Process deferred debug stmts. In order to give values better odds
2644 of being successfully remapped, we delay the processing of debug
2645 stmts until all other stmts that might require remapping are
2646 processed. */
2648 static void
2649 copy_debug_stmts (copy_body_data *id)
2651 size_t i;
2652 gimple stmt;
2654 if (!id->debug_stmts.exists ())
2655 return;
2657 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2658 copy_debug_stmt (stmt, id);
2660 id->debug_stmts.release ();
2663 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2664 another function. */
2666 static tree
2667 copy_tree_body (copy_body_data *id)
2669 tree fndecl = id->src_fn;
2670 tree body = DECL_SAVED_TREE (fndecl);
2672 walk_tree (&body, copy_tree_body_r, id, NULL);
2674 return body;
2677 /* Make a copy of the body of FN so that it can be inserted inline in
2678 another function. */
2680 static tree
2681 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2682 basic_block entry_block_map, basic_block exit_block_map,
2683 basic_block new_entry)
2685 tree fndecl = id->src_fn;
2686 tree body;
2688 /* If this body has a CFG, walk CFG and copy. */
2689 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2690 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2691 new_entry);
2692 copy_debug_stmts (id);
2694 return body;
2697 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2698 defined in function FN, or of a data member thereof. */
2700 static bool
2701 self_inlining_addr_expr (tree value, tree fn)
2703 tree var;
2705 if (TREE_CODE (value) != ADDR_EXPR)
2706 return false;
2708 var = get_base_address (TREE_OPERAND (value, 0));
2710 return var && auto_var_in_fn_p (var, fn);
2713 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2714 lexical block and line number information from base_stmt, if given,
2715 or from the last stmt of the block otherwise. */
2717 static gimple
2718 insert_init_debug_bind (copy_body_data *id,
2719 basic_block bb, tree var, tree value,
2720 gimple base_stmt)
2722 gimple note;
2723 gimple_stmt_iterator gsi;
2724 tree tracked_var;
2726 if (!gimple_in_ssa_p (id->src_cfun))
2727 return NULL;
2729 if (!MAY_HAVE_DEBUG_STMTS)
2730 return NULL;
2732 tracked_var = target_for_debug_bind (var);
2733 if (!tracked_var)
2734 return NULL;
2736 if (bb)
2738 gsi = gsi_last_bb (bb);
2739 if (!base_stmt && !gsi_end_p (gsi))
2740 base_stmt = gsi_stmt (gsi);
2743 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2745 if (bb)
2747 if (!gsi_end_p (gsi))
2748 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2749 else
2750 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2753 return note;
2756 static void
2757 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2759 /* If VAR represents a zero-sized variable, it's possible that the
2760 assignment statement may result in no gimple statements. */
2761 if (init_stmt)
2763 gimple_stmt_iterator si = gsi_last_bb (bb);
2765 /* We can end up with init statements that store to a non-register
2766 from a rhs with a conversion. Handle that here by forcing the
2767 rhs into a temporary. gimple_regimplify_operands is not
2768 prepared to do this for us. */
2769 if (!is_gimple_debug (init_stmt)
2770 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2771 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2772 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2774 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2775 gimple_expr_type (init_stmt),
2776 gimple_assign_rhs1 (init_stmt));
2777 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2778 GSI_NEW_STMT);
2779 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2780 gimple_assign_set_rhs1 (init_stmt, rhs);
2782 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2783 gimple_regimplify_operands (init_stmt, &si);
2785 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2787 tree def = gimple_assign_lhs (init_stmt);
2788 insert_init_debug_bind (id, bb, def, def, init_stmt);
2793 /* Initialize parameter P with VALUE. If needed, produce init statement
2794 at the end of BB. When BB is NULL, we return init statement to be
2795 output later. */
2796 static gimple
2797 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2798 basic_block bb, tree *vars)
2800 gimple init_stmt = NULL;
2801 tree var;
2802 tree rhs = value;
2803 tree def = (gimple_in_ssa_p (cfun)
2804 ? ssa_default_def (id->src_cfun, p) : NULL);
2806 if (value
2807 && value != error_mark_node
2808 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2810 /* If we can match up types by promotion/demotion do so. */
2811 if (fold_convertible_p (TREE_TYPE (p), value))
2812 rhs = fold_convert (TREE_TYPE (p), value);
2813 else
2815 /* ??? For valid programs we should not end up here.
2816 Still if we end up with truly mismatched types here, fall back
2817 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2818 GIMPLE to the following passes. */
2819 if (!is_gimple_reg_type (TREE_TYPE (value))
2820 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2821 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2822 else
2823 rhs = build_zero_cst (TREE_TYPE (p));
2827 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2828 here since the type of this decl must be visible to the calling
2829 function. */
2830 var = copy_decl_to_var (p, id);
2832 /* Declare this new variable. */
2833 DECL_CHAIN (var) = *vars;
2834 *vars = var;
2836 /* Make gimplifier happy about this variable. */
2837 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2839 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2840 we would not need to create a new variable here at all, if it
2841 weren't for debug info. Still, we can just use the argument
2842 value. */
2843 if (TREE_READONLY (p)
2844 && !TREE_ADDRESSABLE (p)
2845 && value && !TREE_SIDE_EFFECTS (value)
2846 && !def)
2848 /* We may produce non-gimple trees by adding NOPs or introduce
2849 invalid sharing when operand is not really constant.
2850 It is not big deal to prohibit constant propagation here as
2851 we will constant propagate in DOM1 pass anyway. */
2852 if (is_gimple_min_invariant (value)
2853 && useless_type_conversion_p (TREE_TYPE (p),
2854 TREE_TYPE (value))
2855 /* We have to be very careful about ADDR_EXPR. Make sure
2856 the base variable isn't a local variable of the inlined
2857 function, e.g., when doing recursive inlining, direct or
2858 mutually-recursive or whatever, which is why we don't
2859 just test whether fn == current_function_decl. */
2860 && ! self_inlining_addr_expr (value, fn))
2862 insert_decl_map (id, p, value);
2863 insert_debug_decl_map (id, p, var);
2864 return insert_init_debug_bind (id, bb, var, value, NULL);
2868 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2869 that way, when the PARM_DECL is encountered, it will be
2870 automatically replaced by the VAR_DECL. */
2871 insert_decl_map (id, p, var);
2873 /* Even if P was TREE_READONLY, the new VAR should not be.
2874 In the original code, we would have constructed a
2875 temporary, and then the function body would have never
2876 changed the value of P. However, now, we will be
2877 constructing VAR directly. The constructor body may
2878 change its value multiple times as it is being
2879 constructed. Therefore, it must not be TREE_READONLY;
2880 the back-end assumes that TREE_READONLY variable is
2881 assigned to only once. */
2882 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2883 TREE_READONLY (var) = 0;
2885 /* If there is no setup required and we are in SSA, take the easy route
2886 replacing all SSA names representing the function parameter by the
2887 SSA name passed to function.
2889 We need to construct map for the variable anyway as it might be used
2890 in different SSA names when parameter is set in function.
2892 Do replacement at -O0 for const arguments replaced by constant.
2893 This is important for builtin_constant_p and other construct requiring
2894 constant argument to be visible in inlined function body. */
2895 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2896 && (optimize
2897 || (TREE_READONLY (p)
2898 && is_gimple_min_invariant (rhs)))
2899 && (TREE_CODE (rhs) == SSA_NAME
2900 || is_gimple_min_invariant (rhs))
2901 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2903 insert_decl_map (id, def, rhs);
2904 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2907 /* If the value of argument is never used, don't care about initializing
2908 it. */
2909 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2911 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2912 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2915 /* Initialize this VAR_DECL from the equivalent argument. Convert
2916 the argument to the proper type in case it was promoted. */
2917 if (value)
2919 if (rhs == error_mark_node)
2921 insert_decl_map (id, p, var);
2922 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2925 STRIP_USELESS_TYPE_CONVERSION (rhs);
2927 /* If we are in SSA form properly remap the default definition
2928 or assign to a dummy SSA name if the parameter is unused and
2929 we are not optimizing. */
2930 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2932 if (def)
2934 def = remap_ssa_name (def, id);
2935 init_stmt = gimple_build_assign (def, rhs);
2936 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2937 set_ssa_default_def (cfun, var, NULL);
2939 else if (!optimize)
2941 def = make_ssa_name (var, NULL);
2942 init_stmt = gimple_build_assign (def, rhs);
2945 else
2946 init_stmt = gimple_build_assign (var, rhs);
2948 if (bb && init_stmt)
2949 insert_init_stmt (id, bb, init_stmt);
2951 return init_stmt;
2954 /* Generate code to initialize the parameters of the function at the
2955 top of the stack in ID from the GIMPLE_CALL STMT. */
2957 static void
2958 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2959 tree fn, basic_block bb)
2961 tree parms;
2962 size_t i;
2963 tree p;
2964 tree vars = NULL_TREE;
2965 tree static_chain = gimple_call_chain (stmt);
2967 /* Figure out what the parameters are. */
2968 parms = DECL_ARGUMENTS (fn);
2970 /* Loop through the parameter declarations, replacing each with an
2971 equivalent VAR_DECL, appropriately initialized. */
2972 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2974 tree val;
2975 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2976 setup_one_parameter (id, p, val, fn, bb, &vars);
2978 /* After remapping parameters remap their types. This has to be done
2979 in a second loop over all parameters to appropriately remap
2980 variable sized arrays when the size is specified in a
2981 parameter following the array. */
2982 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2984 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
2985 if (varp
2986 && TREE_CODE (*varp) == VAR_DECL)
2988 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
2989 ? ssa_default_def (id->src_cfun, p) : NULL);
2990 tree var = *varp;
2991 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
2992 /* Also remap the default definition if it was remapped
2993 to the default definition of the parameter replacement
2994 by the parameter setup. */
2995 if (def)
2997 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
2998 if (defp
2999 && TREE_CODE (*defp) == SSA_NAME
3000 && SSA_NAME_VAR (*defp) == var)
3001 TREE_TYPE (*defp) = TREE_TYPE (var);
3006 /* Initialize the static chain. */
3007 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3008 gcc_assert (fn != current_function_decl);
3009 if (p)
3011 /* No static chain? Seems like a bug in tree-nested.c. */
3012 gcc_assert (static_chain);
3014 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3017 declare_inline_vars (id->block, vars);
3021 /* Declare a return variable to replace the RESULT_DECL for the
3022 function we are calling. An appropriate DECL_STMT is returned.
3023 The USE_STMT is filled to contain a use of the declaration to
3024 indicate the return value of the function.
3026 RETURN_SLOT, if non-null is place where to store the result. It
3027 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3028 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3030 The return value is a (possibly null) value that holds the result
3031 as seen by the caller. */
3033 static tree
3034 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3035 basic_block entry_bb)
3037 tree callee = id->src_fn;
3038 tree result = DECL_RESULT (callee);
3039 tree callee_type = TREE_TYPE (result);
3040 tree caller_type;
3041 tree var, use;
3043 /* Handle type-mismatches in the function declaration return type
3044 vs. the call expression. */
3045 if (modify_dest)
3046 caller_type = TREE_TYPE (modify_dest);
3047 else
3048 caller_type = TREE_TYPE (TREE_TYPE (callee));
3050 /* We don't need to do anything for functions that don't return anything. */
3051 if (VOID_TYPE_P (callee_type))
3052 return NULL_TREE;
3054 /* If there was a return slot, then the return value is the
3055 dereferenced address of that object. */
3056 if (return_slot)
3058 /* The front end shouldn't have used both return_slot and
3059 a modify expression. */
3060 gcc_assert (!modify_dest);
3061 if (DECL_BY_REFERENCE (result))
3063 tree return_slot_addr = build_fold_addr_expr (return_slot);
3064 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3066 /* We are going to construct *&return_slot and we can't do that
3067 for variables believed to be not addressable.
3069 FIXME: This check possibly can match, because values returned
3070 via return slot optimization are not believed to have address
3071 taken by alias analysis. */
3072 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3073 var = return_slot_addr;
3075 else
3077 var = return_slot;
3078 gcc_assert (TREE_CODE (var) != SSA_NAME);
3079 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
3081 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3082 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3083 && !DECL_GIMPLE_REG_P (result)
3084 && DECL_P (var))
3085 DECL_GIMPLE_REG_P (var) = 0;
3086 use = NULL;
3087 goto done;
3090 /* All types requiring non-trivial constructors should have been handled. */
3091 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3093 /* Attempt to avoid creating a new temporary variable. */
3094 if (modify_dest
3095 && TREE_CODE (modify_dest) != SSA_NAME)
3097 bool use_it = false;
3099 /* We can't use MODIFY_DEST if there's type promotion involved. */
3100 if (!useless_type_conversion_p (callee_type, caller_type))
3101 use_it = false;
3103 /* ??? If we're assigning to a variable sized type, then we must
3104 reuse the destination variable, because we've no good way to
3105 create variable sized temporaries at this point. */
3106 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3107 use_it = true;
3109 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3110 reuse it as the result of the call directly. Don't do this if
3111 it would promote MODIFY_DEST to addressable. */
3112 else if (TREE_ADDRESSABLE (result))
3113 use_it = false;
3114 else
3116 tree base_m = get_base_address (modify_dest);
3118 /* If the base isn't a decl, then it's a pointer, and we don't
3119 know where that's going to go. */
3120 if (!DECL_P (base_m))
3121 use_it = false;
3122 else if (is_global_var (base_m))
3123 use_it = false;
3124 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3125 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3126 && !DECL_GIMPLE_REG_P (result)
3127 && DECL_GIMPLE_REG_P (base_m))
3128 use_it = false;
3129 else if (!TREE_ADDRESSABLE (base_m))
3130 use_it = true;
3133 if (use_it)
3135 var = modify_dest;
3136 use = NULL;
3137 goto done;
3141 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3143 var = copy_result_decl_to_var (result, id);
3144 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3146 /* Do not have the rest of GCC warn about this variable as it should
3147 not be visible to the user. */
3148 TREE_NO_WARNING (var) = 1;
3150 declare_inline_vars (id->block, var);
3152 /* Build the use expr. If the return type of the function was
3153 promoted, convert it back to the expected type. */
3154 use = var;
3155 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3157 /* If we can match up types by promotion/demotion do so. */
3158 if (fold_convertible_p (caller_type, var))
3159 use = fold_convert (caller_type, var);
3160 else
3162 /* ??? For valid programs we should not end up here.
3163 Still if we end up with truly mismatched types here, fall back
3164 to using a MEM_REF to not leak invalid GIMPLE to the following
3165 passes. */
3166 /* Prevent var from being written into SSA form. */
3167 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3168 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3169 DECL_GIMPLE_REG_P (var) = false;
3170 else if (is_gimple_reg_type (TREE_TYPE (var)))
3171 TREE_ADDRESSABLE (var) = true;
3172 use = fold_build2 (MEM_REF, caller_type,
3173 build_fold_addr_expr (var),
3174 build_int_cst (ptr_type_node, 0));
3178 STRIP_USELESS_TYPE_CONVERSION (use);
3180 if (DECL_BY_REFERENCE (result))
3182 TREE_ADDRESSABLE (var) = 1;
3183 var = build_fold_addr_expr (var);
3186 done:
3187 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3188 way, when the RESULT_DECL is encountered, it will be
3189 automatically replaced by the VAR_DECL.
3191 When returning by reference, ensure that RESULT_DECL remaps to
3192 gimple_val. */
3193 if (DECL_BY_REFERENCE (result)
3194 && !is_gimple_val (var))
3196 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3197 insert_decl_map (id, result, temp);
3198 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3199 it's default_def SSA_NAME. */
3200 if (gimple_in_ssa_p (id->src_cfun)
3201 && is_gimple_reg (result))
3203 temp = make_ssa_name (temp, NULL);
3204 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3206 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3208 else
3209 insert_decl_map (id, result, var);
3211 /* Remember this so we can ignore it in remap_decls. */
3212 id->retvar = var;
3214 return use;
3217 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3218 to a local label. */
3220 static tree
3221 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3223 tree node = *nodep;
3224 tree fn = (tree) fnp;
3226 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3227 return node;
3229 if (TYPE_P (node))
3230 *walk_subtrees = 0;
3232 return NULL_TREE;
3235 /* Determine if the function can be copied. If so return NULL. If
3236 not return a string describng the reason for failure. */
3238 static const char *
3239 copy_forbidden (struct function *fun, tree fndecl)
3241 const char *reason = fun->cannot_be_copied_reason;
3242 tree decl;
3243 unsigned ix;
3245 /* Only examine the function once. */
3246 if (fun->cannot_be_copied_set)
3247 return reason;
3249 /* We cannot copy a function that receives a non-local goto
3250 because we cannot remap the destination label used in the
3251 function that is performing the non-local goto. */
3252 /* ??? Actually, this should be possible, if we work at it.
3253 No doubt there's just a handful of places that simply
3254 assume it doesn't happen and don't substitute properly. */
3255 if (fun->has_nonlocal_label)
3257 reason = G_("function %q+F can never be copied "
3258 "because it receives a non-local goto");
3259 goto fail;
3262 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3263 if (TREE_CODE (decl) == VAR_DECL
3264 && TREE_STATIC (decl)
3265 && !DECL_EXTERNAL (decl)
3266 && DECL_INITIAL (decl)
3267 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3268 has_label_address_in_static_1,
3269 fndecl))
3271 reason = G_("function %q+F can never be copied because it saves "
3272 "address of local label in a static variable");
3273 goto fail;
3276 fail:
3277 fun->cannot_be_copied_reason = reason;
3278 fun->cannot_be_copied_set = true;
3279 return reason;
3283 static const char *inline_forbidden_reason;
3285 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3286 iff a function can not be inlined. Also sets the reason why. */
3288 static tree
3289 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3290 struct walk_stmt_info *wip)
3292 tree fn = (tree) wip->info;
3293 tree t;
3294 gimple stmt = gsi_stmt (*gsi);
3296 switch (gimple_code (stmt))
3298 case GIMPLE_CALL:
3299 /* Refuse to inline alloca call unless user explicitly forced so as
3300 this may change program's memory overhead drastically when the
3301 function using alloca is called in loop. In GCC present in
3302 SPEC2000 inlining into schedule_block cause it to require 2GB of
3303 RAM instead of 256MB. Don't do so for alloca calls emitted for
3304 VLA objects as those can't cause unbounded growth (they're always
3305 wrapped inside stack_save/stack_restore regions. */
3306 if (gimple_alloca_call_p (stmt)
3307 && !gimple_call_alloca_for_var_p (stmt)
3308 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3310 inline_forbidden_reason
3311 = G_("function %q+F can never be inlined because it uses "
3312 "alloca (override using the always_inline attribute)");
3313 *handled_ops_p = true;
3314 return fn;
3317 t = gimple_call_fndecl (stmt);
3318 if (t == NULL_TREE)
3319 break;
3321 /* We cannot inline functions that call setjmp. */
3322 if (setjmp_call_p (t))
3324 inline_forbidden_reason
3325 = G_("function %q+F can never be inlined because it uses setjmp");
3326 *handled_ops_p = true;
3327 return t;
3330 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3331 switch (DECL_FUNCTION_CODE (t))
3333 /* We cannot inline functions that take a variable number of
3334 arguments. */
3335 case BUILT_IN_VA_START:
3336 case BUILT_IN_NEXT_ARG:
3337 case BUILT_IN_VA_END:
3338 inline_forbidden_reason
3339 = G_("function %q+F can never be inlined because it "
3340 "uses variable argument lists");
3341 *handled_ops_p = true;
3342 return t;
3344 case BUILT_IN_LONGJMP:
3345 /* We can't inline functions that call __builtin_longjmp at
3346 all. The non-local goto machinery really requires the
3347 destination be in a different function. If we allow the
3348 function calling __builtin_longjmp to be inlined into the
3349 function calling __builtin_setjmp, Things will Go Awry. */
3350 inline_forbidden_reason
3351 = G_("function %q+F can never be inlined because "
3352 "it uses setjmp-longjmp exception handling");
3353 *handled_ops_p = true;
3354 return t;
3356 case BUILT_IN_NONLOCAL_GOTO:
3357 /* Similarly. */
3358 inline_forbidden_reason
3359 = G_("function %q+F can never be inlined because "
3360 "it uses non-local goto");
3361 *handled_ops_p = true;
3362 return t;
3364 case BUILT_IN_RETURN:
3365 case BUILT_IN_APPLY_ARGS:
3366 /* If a __builtin_apply_args caller would be inlined,
3367 it would be saving arguments of the function it has
3368 been inlined into. Similarly __builtin_return would
3369 return from the function the inline has been inlined into. */
3370 inline_forbidden_reason
3371 = G_("function %q+F can never be inlined because "
3372 "it uses __builtin_return or __builtin_apply_args");
3373 *handled_ops_p = true;
3374 return t;
3376 default:
3377 break;
3379 break;
3381 case GIMPLE_GOTO:
3382 t = gimple_goto_dest (stmt);
3384 /* We will not inline a function which uses computed goto. The
3385 addresses of its local labels, which may be tucked into
3386 global storage, are of course not constant across
3387 instantiations, which causes unexpected behavior. */
3388 if (TREE_CODE (t) != LABEL_DECL)
3390 inline_forbidden_reason
3391 = G_("function %q+F can never be inlined "
3392 "because it contains a computed goto");
3393 *handled_ops_p = true;
3394 return t;
3396 break;
3398 default:
3399 break;
3402 *handled_ops_p = false;
3403 return NULL_TREE;
3406 /* Return true if FNDECL is a function that cannot be inlined into
3407 another one. */
3409 static bool
3410 inline_forbidden_p (tree fndecl)
3412 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3413 struct walk_stmt_info wi;
3414 struct pointer_set_t *visited_nodes;
3415 basic_block bb;
3416 bool forbidden_p = false;
3418 /* First check for shared reasons not to copy the code. */
3419 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3420 if (inline_forbidden_reason != NULL)
3421 return true;
3423 /* Next, walk the statements of the function looking for
3424 constraucts we can't handle, or are non-optimal for inlining. */
3425 visited_nodes = pointer_set_create ();
3426 memset (&wi, 0, sizeof (wi));
3427 wi.info = (void *) fndecl;
3428 wi.pset = visited_nodes;
3430 FOR_EACH_BB_FN (bb, fun)
3432 gimple ret;
3433 gimple_seq seq = bb_seq (bb);
3434 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3435 forbidden_p = (ret != NULL);
3436 if (forbidden_p)
3437 break;
3440 pointer_set_destroy (visited_nodes);
3441 return forbidden_p;
3444 /* Return false if the function FNDECL cannot be inlined on account of its
3445 attributes, true otherwise. */
3446 static bool
3447 function_attribute_inlinable_p (const_tree fndecl)
3449 if (targetm.attribute_table)
3451 const_tree a;
3453 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3455 const_tree name = TREE_PURPOSE (a);
3456 int i;
3458 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3459 if (is_attribute_p (targetm.attribute_table[i].name, name))
3460 return targetm.function_attribute_inlinable_p (fndecl);
3464 return true;
3467 /* Returns nonzero if FN is a function that does not have any
3468 fundamental inline blocking properties. */
3470 bool
3471 tree_inlinable_function_p (tree fn)
3473 bool inlinable = true;
3474 bool do_warning;
3475 tree always_inline;
3477 /* If we've already decided this function shouldn't be inlined,
3478 there's no need to check again. */
3479 if (DECL_UNINLINABLE (fn))
3480 return false;
3482 /* We only warn for functions declared `inline' by the user. */
3483 do_warning = (warn_inline
3484 && DECL_DECLARED_INLINE_P (fn)
3485 && !DECL_NO_INLINE_WARNING_P (fn)
3486 && !DECL_IN_SYSTEM_HEADER (fn));
3488 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3490 if (flag_no_inline
3491 && always_inline == NULL)
3493 if (do_warning)
3494 warning (OPT_Winline, "function %q+F can never be inlined because it "
3495 "is suppressed using -fno-inline", fn);
3496 inlinable = false;
3499 else if (!function_attribute_inlinable_p (fn))
3501 if (do_warning)
3502 warning (OPT_Winline, "function %q+F can never be inlined because it "
3503 "uses attributes conflicting with inlining", fn);
3504 inlinable = false;
3507 else if (inline_forbidden_p (fn))
3509 /* See if we should warn about uninlinable functions. Previously,
3510 some of these warnings would be issued while trying to expand
3511 the function inline, but that would cause multiple warnings
3512 about functions that would for example call alloca. But since
3513 this a property of the function, just one warning is enough.
3514 As a bonus we can now give more details about the reason why a
3515 function is not inlinable. */
3516 if (always_inline)
3517 error (inline_forbidden_reason, fn);
3518 else if (do_warning)
3519 warning (OPT_Winline, inline_forbidden_reason, fn);
3521 inlinable = false;
3524 /* Squirrel away the result so that we don't have to check again. */
3525 DECL_UNINLINABLE (fn) = !inlinable;
3527 return inlinable;
3530 /* Estimate the cost of a memory move. Use machine dependent
3531 word size and take possible memcpy call into account. */
3534 estimate_move_cost (tree type)
3536 HOST_WIDE_INT size;
3538 gcc_assert (!VOID_TYPE_P (type));
3540 if (TREE_CODE (type) == VECTOR_TYPE)
3542 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3543 enum machine_mode simd
3544 = targetm.vectorize.preferred_simd_mode (inner);
3545 int simd_mode_size = GET_MODE_SIZE (simd);
3546 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3547 / simd_mode_size);
3550 size = int_size_in_bytes (type);
3552 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3553 /* Cost of a memcpy call, 3 arguments and the call. */
3554 return 4;
3555 else
3556 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3559 /* Returns cost of operation CODE, according to WEIGHTS */
3561 static int
3562 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3563 tree op1 ATTRIBUTE_UNUSED, tree op2)
3565 switch (code)
3567 /* These are "free" conversions, or their presumed cost
3568 is folded into other operations. */
3569 case RANGE_EXPR:
3570 CASE_CONVERT:
3571 case COMPLEX_EXPR:
3572 case PAREN_EXPR:
3573 case VIEW_CONVERT_EXPR:
3574 return 0;
3576 /* Assign cost of 1 to usual operations.
3577 ??? We may consider mapping RTL costs to this. */
3578 case COND_EXPR:
3579 case VEC_COND_EXPR:
3580 case VEC_PERM_EXPR:
3582 case PLUS_EXPR:
3583 case POINTER_PLUS_EXPR:
3584 case MINUS_EXPR:
3585 case MULT_EXPR:
3586 case MULT_HIGHPART_EXPR:
3587 case FMA_EXPR:
3589 case ADDR_SPACE_CONVERT_EXPR:
3590 case FIXED_CONVERT_EXPR:
3591 case FIX_TRUNC_EXPR:
3593 case NEGATE_EXPR:
3594 case FLOAT_EXPR:
3595 case MIN_EXPR:
3596 case MAX_EXPR:
3597 case ABS_EXPR:
3599 case LSHIFT_EXPR:
3600 case RSHIFT_EXPR:
3601 case LROTATE_EXPR:
3602 case RROTATE_EXPR:
3603 case VEC_LSHIFT_EXPR:
3604 case VEC_RSHIFT_EXPR:
3606 case BIT_IOR_EXPR:
3607 case BIT_XOR_EXPR:
3608 case BIT_AND_EXPR:
3609 case BIT_NOT_EXPR:
3611 case TRUTH_ANDIF_EXPR:
3612 case TRUTH_ORIF_EXPR:
3613 case TRUTH_AND_EXPR:
3614 case TRUTH_OR_EXPR:
3615 case TRUTH_XOR_EXPR:
3616 case TRUTH_NOT_EXPR:
3618 case LT_EXPR:
3619 case LE_EXPR:
3620 case GT_EXPR:
3621 case GE_EXPR:
3622 case EQ_EXPR:
3623 case NE_EXPR:
3624 case ORDERED_EXPR:
3625 case UNORDERED_EXPR:
3627 case UNLT_EXPR:
3628 case UNLE_EXPR:
3629 case UNGT_EXPR:
3630 case UNGE_EXPR:
3631 case UNEQ_EXPR:
3632 case LTGT_EXPR:
3634 case CONJ_EXPR:
3636 case PREDECREMENT_EXPR:
3637 case PREINCREMENT_EXPR:
3638 case POSTDECREMENT_EXPR:
3639 case POSTINCREMENT_EXPR:
3641 case REALIGN_LOAD_EXPR:
3643 case REDUC_MAX_EXPR:
3644 case REDUC_MIN_EXPR:
3645 case REDUC_PLUS_EXPR:
3646 case WIDEN_SUM_EXPR:
3647 case WIDEN_MULT_EXPR:
3648 case DOT_PROD_EXPR:
3649 case WIDEN_MULT_PLUS_EXPR:
3650 case WIDEN_MULT_MINUS_EXPR:
3651 case WIDEN_LSHIFT_EXPR:
3653 case VEC_WIDEN_MULT_HI_EXPR:
3654 case VEC_WIDEN_MULT_LO_EXPR:
3655 case VEC_WIDEN_MULT_EVEN_EXPR:
3656 case VEC_WIDEN_MULT_ODD_EXPR:
3657 case VEC_UNPACK_HI_EXPR:
3658 case VEC_UNPACK_LO_EXPR:
3659 case VEC_UNPACK_FLOAT_HI_EXPR:
3660 case VEC_UNPACK_FLOAT_LO_EXPR:
3661 case VEC_PACK_TRUNC_EXPR:
3662 case VEC_PACK_SAT_EXPR:
3663 case VEC_PACK_FIX_TRUNC_EXPR:
3664 case VEC_WIDEN_LSHIFT_HI_EXPR:
3665 case VEC_WIDEN_LSHIFT_LO_EXPR:
3667 return 1;
3669 /* Few special cases of expensive operations. This is useful
3670 to avoid inlining on functions having too many of these. */
3671 case TRUNC_DIV_EXPR:
3672 case CEIL_DIV_EXPR:
3673 case FLOOR_DIV_EXPR:
3674 case ROUND_DIV_EXPR:
3675 case EXACT_DIV_EXPR:
3676 case TRUNC_MOD_EXPR:
3677 case CEIL_MOD_EXPR:
3678 case FLOOR_MOD_EXPR:
3679 case ROUND_MOD_EXPR:
3680 case RDIV_EXPR:
3681 if (TREE_CODE (op2) != INTEGER_CST)
3682 return weights->div_mod_cost;
3683 return 1;
3685 default:
3686 /* We expect a copy assignment with no operator. */
3687 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3688 return 0;
3693 /* Estimate number of instructions that will be created by expanding
3694 the statements in the statement sequence STMTS.
3695 WEIGHTS contains weights attributed to various constructs. */
3697 static
3698 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3700 int cost;
3701 gimple_stmt_iterator gsi;
3703 cost = 0;
3704 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3705 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3707 return cost;
3711 /* Estimate number of instructions that will be created by expanding STMT.
3712 WEIGHTS contains weights attributed to various constructs. */
3715 estimate_num_insns (gimple stmt, eni_weights *weights)
3717 unsigned cost, i;
3718 enum gimple_code code = gimple_code (stmt);
3719 tree lhs;
3720 tree rhs;
3722 switch (code)
3724 case GIMPLE_ASSIGN:
3725 /* Try to estimate the cost of assignments. We have three cases to
3726 deal with:
3727 1) Simple assignments to registers;
3728 2) Stores to things that must live in memory. This includes
3729 "normal" stores to scalars, but also assignments of large
3730 structures, or constructors of big arrays;
3732 Let us look at the first two cases, assuming we have "a = b + C":
3733 <GIMPLE_ASSIGN <var_decl "a">
3734 <plus_expr <var_decl "b"> <constant C>>
3735 If "a" is a GIMPLE register, the assignment to it is free on almost
3736 any target, because "a" usually ends up in a real register. Hence
3737 the only cost of this expression comes from the PLUS_EXPR, and we
3738 can ignore the GIMPLE_ASSIGN.
3739 If "a" is not a GIMPLE register, the assignment to "a" will most
3740 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3741 of moving something into "a", which we compute using the function
3742 estimate_move_cost. */
3743 if (gimple_clobber_p (stmt))
3744 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3746 lhs = gimple_assign_lhs (stmt);
3747 rhs = gimple_assign_rhs1 (stmt);
3749 cost = 0;
3751 /* Account for the cost of moving to / from memory. */
3752 if (gimple_store_p (stmt))
3753 cost += estimate_move_cost (TREE_TYPE (lhs));
3754 if (gimple_assign_load_p (stmt))
3755 cost += estimate_move_cost (TREE_TYPE (rhs));
3757 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3758 gimple_assign_rhs1 (stmt),
3759 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3760 == GIMPLE_BINARY_RHS
3761 ? gimple_assign_rhs2 (stmt) : NULL);
3762 break;
3764 case GIMPLE_COND:
3765 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3766 gimple_op (stmt, 0),
3767 gimple_op (stmt, 1));
3768 break;
3770 case GIMPLE_SWITCH:
3771 /* Take into account cost of the switch + guess 2 conditional jumps for
3772 each case label.
3774 TODO: once the switch expansion logic is sufficiently separated, we can
3775 do better job on estimating cost of the switch. */
3776 if (weights->time_based)
3777 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3778 else
3779 cost = gimple_switch_num_labels (stmt) * 2;
3780 break;
3782 case GIMPLE_CALL:
3784 tree decl = gimple_call_fndecl (stmt);
3785 struct cgraph_node *node = NULL;
3787 /* Do not special case builtins where we see the body.
3788 This just confuse inliner. */
3789 if (!decl || !(node = cgraph_get_node (decl)) || node->definition)
3791 /* For buitins that are likely expanded to nothing or
3792 inlined do not account operand costs. */
3793 else if (is_simple_builtin (decl))
3794 return 0;
3795 else if (is_inexpensive_builtin (decl))
3796 return weights->target_builtin_call_cost;
3797 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3799 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3800 specialize the cheap expansion we do here.
3801 ??? This asks for a more general solution. */
3802 switch (DECL_FUNCTION_CODE (decl))
3804 case BUILT_IN_POW:
3805 case BUILT_IN_POWF:
3806 case BUILT_IN_POWL:
3807 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3808 && REAL_VALUES_EQUAL
3809 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3810 return estimate_operator_cost (MULT_EXPR, weights,
3811 gimple_call_arg (stmt, 0),
3812 gimple_call_arg (stmt, 0));
3813 break;
3815 default:
3816 break;
3820 cost = node ? weights->call_cost : weights->indirect_call_cost;
3821 if (gimple_call_lhs (stmt))
3822 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3823 for (i = 0; i < gimple_call_num_args (stmt); i++)
3825 tree arg = gimple_call_arg (stmt, i);
3826 cost += estimate_move_cost (TREE_TYPE (arg));
3828 break;
3831 case GIMPLE_RETURN:
3832 return weights->return_cost;
3834 case GIMPLE_GOTO:
3835 case GIMPLE_LABEL:
3836 case GIMPLE_NOP:
3837 case GIMPLE_PHI:
3838 case GIMPLE_PREDICT:
3839 case GIMPLE_DEBUG:
3840 return 0;
3842 case GIMPLE_ASM:
3844 int count = asm_str_count (gimple_asm_string (stmt));
3845 /* 1000 means infinity. This avoids overflows later
3846 with very long asm statements. */
3847 if (count > 1000)
3848 count = 1000;
3849 return count;
3852 case GIMPLE_RESX:
3853 /* This is either going to be an external function call with one
3854 argument, or two register copy statements plus a goto. */
3855 return 2;
3857 case GIMPLE_EH_DISPATCH:
3858 /* ??? This is going to turn into a switch statement. Ideally
3859 we'd have a look at the eh region and estimate the number of
3860 edges involved. */
3861 return 10;
3863 case GIMPLE_BIND:
3864 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3866 case GIMPLE_EH_FILTER:
3867 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3869 case GIMPLE_CATCH:
3870 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3872 case GIMPLE_TRY:
3873 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3874 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3876 /* OpenMP directives are generally very expensive. */
3878 case GIMPLE_OMP_RETURN:
3879 case GIMPLE_OMP_SECTIONS_SWITCH:
3880 case GIMPLE_OMP_ATOMIC_STORE:
3881 case GIMPLE_OMP_CONTINUE:
3882 /* ...except these, which are cheap. */
3883 return 0;
3885 case GIMPLE_OMP_ATOMIC_LOAD:
3886 return weights->omp_cost;
3888 case GIMPLE_OMP_FOR:
3889 return (weights->omp_cost
3890 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3891 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3893 case GIMPLE_OMP_PARALLEL:
3894 case GIMPLE_OMP_TASK:
3895 case GIMPLE_OMP_CRITICAL:
3896 case GIMPLE_OMP_MASTER:
3897 case GIMPLE_OMP_TASKGROUP:
3898 case GIMPLE_OMP_ORDERED:
3899 case GIMPLE_OMP_SECTION:
3900 case GIMPLE_OMP_SECTIONS:
3901 case GIMPLE_OMP_SINGLE:
3902 case GIMPLE_OMP_TARGET:
3903 case GIMPLE_OMP_TEAMS:
3904 return (weights->omp_cost
3905 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3907 case GIMPLE_TRANSACTION:
3908 return (weights->tm_cost
3909 + estimate_num_insns_seq (gimple_transaction_body (stmt),
3910 weights));
3912 default:
3913 gcc_unreachable ();
3916 return cost;
3919 /* Estimate number of instructions that will be created by expanding
3920 function FNDECL. WEIGHTS contains weights attributed to various
3921 constructs. */
3924 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3926 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3927 gimple_stmt_iterator bsi;
3928 basic_block bb;
3929 int n = 0;
3931 gcc_assert (my_function && my_function->cfg);
3932 FOR_EACH_BB_FN (bb, my_function)
3934 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3935 n += estimate_num_insns (gsi_stmt (bsi), weights);
3938 return n;
3942 /* Initializes weights used by estimate_num_insns. */
3944 void
3945 init_inline_once (void)
3947 eni_size_weights.call_cost = 1;
3948 eni_size_weights.indirect_call_cost = 3;
3949 eni_size_weights.target_builtin_call_cost = 1;
3950 eni_size_weights.div_mod_cost = 1;
3951 eni_size_weights.omp_cost = 40;
3952 eni_size_weights.tm_cost = 10;
3953 eni_size_weights.time_based = false;
3954 eni_size_weights.return_cost = 1;
3956 /* Estimating time for call is difficult, since we have no idea what the
3957 called function does. In the current uses of eni_time_weights,
3958 underestimating the cost does less harm than overestimating it, so
3959 we choose a rather small value here. */
3960 eni_time_weights.call_cost = 10;
3961 eni_time_weights.indirect_call_cost = 15;
3962 eni_time_weights.target_builtin_call_cost = 1;
3963 eni_time_weights.div_mod_cost = 10;
3964 eni_time_weights.omp_cost = 40;
3965 eni_time_weights.tm_cost = 40;
3966 eni_time_weights.time_based = true;
3967 eni_time_weights.return_cost = 2;
3970 /* Estimate the number of instructions in a gimple_seq. */
3973 count_insns_seq (gimple_seq seq, eni_weights *weights)
3975 gimple_stmt_iterator gsi;
3976 int n = 0;
3977 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3978 n += estimate_num_insns (gsi_stmt (gsi), weights);
3980 return n;
3984 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3986 static void
3987 prepend_lexical_block (tree current_block, tree new_block)
3989 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3990 BLOCK_SUBBLOCKS (current_block) = new_block;
3991 BLOCK_SUPERCONTEXT (new_block) = current_block;
3994 /* Add local variables from CALLEE to CALLER. */
3996 static inline void
3997 add_local_variables (struct function *callee, struct function *caller,
3998 copy_body_data *id)
4000 tree var;
4001 unsigned ix;
4003 FOR_EACH_LOCAL_DECL (callee, ix, var)
4004 if (!can_be_nonlocal (var, id))
4006 tree new_var = remap_decl (var, id);
4008 /* Remap debug-expressions. */
4009 if (TREE_CODE (new_var) == VAR_DECL
4010 && DECL_HAS_DEBUG_EXPR_P (var)
4011 && new_var != var)
4013 tree tem = DECL_DEBUG_EXPR (var);
4014 bool old_regimplify = id->regimplify;
4015 id->remapping_type_depth++;
4016 walk_tree (&tem, copy_tree_body_r, id, NULL);
4017 id->remapping_type_depth--;
4018 id->regimplify = old_regimplify;
4019 SET_DECL_DEBUG_EXPR (new_var, tem);
4020 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4022 add_local_decl (caller, new_var);
4026 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4028 static bool
4029 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
4031 tree use_retvar;
4032 tree fn;
4033 struct pointer_map_t *st, *dst;
4034 tree return_slot;
4035 tree modify_dest;
4036 location_t saved_location;
4037 struct cgraph_edge *cg_edge;
4038 cgraph_inline_failed_t reason;
4039 basic_block return_block;
4040 edge e;
4041 gimple_stmt_iterator gsi, stmt_gsi;
4042 bool successfully_inlined = FALSE;
4043 bool purge_dead_abnormal_edges;
4045 /* Set input_location here so we get the right instantiation context
4046 if we call instantiate_decl from inlinable_function_p. */
4047 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4048 saved_location = input_location;
4049 input_location = gimple_location (stmt);
4051 /* From here on, we're only interested in CALL_EXPRs. */
4052 if (gimple_code (stmt) != GIMPLE_CALL)
4053 goto egress;
4055 cg_edge = cgraph_edge (id->dst_node, stmt);
4056 gcc_checking_assert (cg_edge);
4057 /* First, see if we can figure out what function is being called.
4058 If we cannot, then there is no hope of inlining the function. */
4059 if (cg_edge->indirect_unknown_callee)
4060 goto egress;
4061 fn = cg_edge->callee->decl;
4062 gcc_checking_assert (fn);
4064 /* If FN is a declaration of a function in a nested scope that was
4065 globally declared inline, we don't set its DECL_INITIAL.
4066 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4067 C++ front-end uses it for cdtors to refer to their internal
4068 declarations, that are not real functions. Fortunately those
4069 don't have trees to be saved, so we can tell by checking their
4070 gimple_body. */
4071 if (!DECL_INITIAL (fn)
4072 && DECL_ABSTRACT_ORIGIN (fn)
4073 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4074 fn = DECL_ABSTRACT_ORIGIN (fn);
4076 /* Don't try to inline functions that are not well-suited to inlining. */
4077 if (cg_edge->inline_failed)
4079 reason = cg_edge->inline_failed;
4080 /* If this call was originally indirect, we do not want to emit any
4081 inlining related warnings or sorry messages because there are no
4082 guarantees regarding those. */
4083 if (cg_edge->indirect_inlining_edge)
4084 goto egress;
4086 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4087 /* For extern inline functions that get redefined we always
4088 silently ignored always_inline flag. Better behaviour would
4089 be to be able to keep both bodies and use extern inline body
4090 for inlining, but we can't do that because frontends overwrite
4091 the body. */
4092 && !cg_edge->callee->local.redefined_extern_inline
4093 /* During early inline pass, report only when optimization is
4094 not turned on. */
4095 && (cgraph_global_info_ready
4096 || !optimize)
4097 /* PR 20090218-1_0.c. Body can be provided by another module. */
4098 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4100 error ("inlining failed in call to always_inline %q+F: %s", fn,
4101 cgraph_inline_failed_string (reason));
4102 error ("called from here");
4104 else if (warn_inline
4105 && DECL_DECLARED_INLINE_P (fn)
4106 && !DECL_NO_INLINE_WARNING_P (fn)
4107 && !DECL_IN_SYSTEM_HEADER (fn)
4108 && reason != CIF_UNSPECIFIED
4109 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4110 /* Do not warn about not inlined recursive calls. */
4111 && !cgraph_edge_recursive_p (cg_edge)
4112 /* Avoid warnings during early inline pass. */
4113 && cgraph_global_info_ready)
4115 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4116 fn, _(cgraph_inline_failed_string (reason)));
4117 warning (OPT_Winline, "called from here");
4119 goto egress;
4121 fn = cg_edge->callee->decl;
4122 cgraph_get_body (cg_edge->callee);
4124 #ifdef ENABLE_CHECKING
4125 if (cg_edge->callee->decl != id->dst_node->decl)
4126 verify_cgraph_node (cg_edge->callee);
4127 #endif
4129 /* We will be inlining this callee. */
4130 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4132 /* Update the callers EH personality. */
4133 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4134 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4135 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4137 /* Split the block holding the GIMPLE_CALL. */
4138 e = split_block (bb, stmt);
4139 bb = e->src;
4140 return_block = e->dest;
4141 remove_edge (e);
4143 /* split_block splits after the statement; work around this by
4144 moving the call into the second block manually. Not pretty,
4145 but seems easier than doing the CFG manipulation by hand
4146 when the GIMPLE_CALL is in the last statement of BB. */
4147 stmt_gsi = gsi_last_bb (bb);
4148 gsi_remove (&stmt_gsi, false);
4150 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4151 been the source of abnormal edges. In this case, schedule
4152 the removal of dead abnormal edges. */
4153 gsi = gsi_start_bb (return_block);
4154 if (gsi_end_p (gsi))
4156 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4157 purge_dead_abnormal_edges = true;
4159 else
4161 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4162 purge_dead_abnormal_edges = false;
4165 stmt_gsi = gsi_start_bb (return_block);
4167 /* Build a block containing code to initialize the arguments, the
4168 actual inline expansion of the body, and a label for the return
4169 statements within the function to jump to. The type of the
4170 statement expression is the return type of the function call.
4171 ??? If the call does not have an associated block then we will
4172 remap all callee blocks to NULL, effectively dropping most of
4173 its debug information. This should only happen for calls to
4174 artificial decls inserted by the compiler itself. We need to
4175 either link the inlined blocks into the caller block tree or
4176 not refer to them in any way to not break GC for locations. */
4177 if (gimple_block (stmt))
4179 id->block = make_node (BLOCK);
4180 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4181 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4182 prepend_lexical_block (gimple_block (stmt), id->block);
4185 /* Local declarations will be replaced by their equivalents in this
4186 map. */
4187 st = id->decl_map;
4188 id->decl_map = pointer_map_create ();
4189 dst = id->debug_map;
4190 id->debug_map = NULL;
4192 /* Record the function we are about to inline. */
4193 id->src_fn = fn;
4194 id->src_node = cg_edge->callee;
4195 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4196 id->gimple_call = stmt;
4198 gcc_assert (!id->src_cfun->after_inlining);
4200 id->entry_bb = bb;
4201 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4203 gimple_stmt_iterator si = gsi_last_bb (bb);
4204 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4205 NOT_TAKEN),
4206 GSI_NEW_STMT);
4208 initialize_inlined_parameters (id, stmt, fn, bb);
4210 if (DECL_INITIAL (fn))
4212 if (gimple_block (stmt))
4214 tree *var;
4216 prepend_lexical_block (id->block,
4217 remap_blocks (DECL_INITIAL (fn), id));
4218 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4219 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4220 == NULL_TREE));
4221 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4222 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4223 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4224 under it. The parameters can be then evaluated in the debugger,
4225 but don't show in backtraces. */
4226 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4227 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4229 tree v = *var;
4230 *var = TREE_CHAIN (v);
4231 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4232 BLOCK_VARS (id->block) = v;
4234 else
4235 var = &TREE_CHAIN (*var);
4237 else
4238 remap_blocks_to_null (DECL_INITIAL (fn), id);
4241 /* Return statements in the function body will be replaced by jumps
4242 to the RET_LABEL. */
4243 gcc_assert (DECL_INITIAL (fn));
4244 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4246 /* Find the LHS to which the result of this call is assigned. */
4247 return_slot = NULL;
4248 if (gimple_call_lhs (stmt))
4250 modify_dest = gimple_call_lhs (stmt);
4252 /* The function which we are inlining might not return a value,
4253 in which case we should issue a warning that the function
4254 does not return a value. In that case the optimizers will
4255 see that the variable to which the value is assigned was not
4256 initialized. We do not want to issue a warning about that
4257 uninitialized variable. */
4258 if (DECL_P (modify_dest))
4259 TREE_NO_WARNING (modify_dest) = 1;
4261 if (gimple_call_return_slot_opt_p (stmt))
4263 return_slot = modify_dest;
4264 modify_dest = NULL;
4267 else
4268 modify_dest = NULL;
4270 /* If we are inlining a call to the C++ operator new, we don't want
4271 to use type based alias analysis on the return value. Otherwise
4272 we may get confused if the compiler sees that the inlined new
4273 function returns a pointer which was just deleted. See bug
4274 33407. */
4275 if (DECL_IS_OPERATOR_NEW (fn))
4277 return_slot = NULL;
4278 modify_dest = NULL;
4281 /* Declare the return variable for the function. */
4282 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4284 /* Add local vars in this inlined callee to caller. */
4285 add_local_variables (id->src_cfun, cfun, id);
4287 if (dump_file && (dump_flags & TDF_DETAILS))
4289 fprintf (dump_file, "Inlining ");
4290 print_generic_expr (dump_file, id->src_fn, 0);
4291 fprintf (dump_file, " to ");
4292 print_generic_expr (dump_file, id->dst_fn, 0);
4293 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4296 /* This is it. Duplicate the callee body. Assume callee is
4297 pre-gimplified. Note that we must not alter the caller
4298 function in any way before this point, as this CALL_EXPR may be
4299 a self-referential call; if we're calling ourselves, we need to
4300 duplicate our body before altering anything. */
4301 copy_body (id, bb->count,
4302 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4303 bb, return_block, NULL);
4305 /* Reset the escaped solution. */
4306 if (cfun->gimple_df)
4307 pt_solution_reset (&cfun->gimple_df->escaped);
4309 /* Clean up. */
4310 if (id->debug_map)
4312 pointer_map_destroy (id->debug_map);
4313 id->debug_map = dst;
4315 pointer_map_destroy (id->decl_map);
4316 id->decl_map = st;
4318 /* Unlink the calls virtual operands before replacing it. */
4319 unlink_stmt_vdef (stmt);
4321 /* If the inlined function returns a result that we care about,
4322 substitute the GIMPLE_CALL with an assignment of the return
4323 variable to the LHS of the call. That is, if STMT was
4324 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4325 if (use_retvar && gimple_call_lhs (stmt))
4327 gimple old_stmt = stmt;
4328 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4329 gsi_replace (&stmt_gsi, stmt, false);
4330 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4332 else
4334 /* Handle the case of inlining a function with no return
4335 statement, which causes the return value to become undefined. */
4336 if (gimple_call_lhs (stmt)
4337 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4339 tree name = gimple_call_lhs (stmt);
4340 tree var = SSA_NAME_VAR (name);
4341 tree def = ssa_default_def (cfun, var);
4343 if (def)
4345 /* If the variable is used undefined, make this name
4346 undefined via a move. */
4347 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4348 gsi_replace (&stmt_gsi, stmt, true);
4350 else
4352 /* Otherwise make this variable undefined. */
4353 gsi_remove (&stmt_gsi, true);
4354 set_ssa_default_def (cfun, var, name);
4355 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4358 else
4359 gsi_remove (&stmt_gsi, true);
4362 if (purge_dead_abnormal_edges)
4364 gimple_purge_dead_eh_edges (return_block);
4365 gimple_purge_dead_abnormal_call_edges (return_block);
4368 /* If the value of the new expression is ignored, that's OK. We
4369 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4370 the equivalent inlined version either. */
4371 if (is_gimple_assign (stmt))
4373 gcc_assert (gimple_assign_single_p (stmt)
4374 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4375 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4378 /* Output the inlining info for this abstract function, since it has been
4379 inlined. If we don't do this now, we can lose the information about the
4380 variables in the function when the blocks get blown away as soon as we
4381 remove the cgraph node. */
4382 if (gimple_block (stmt))
4383 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4385 /* Update callgraph if needed. */
4386 cgraph_remove_node (cg_edge->callee);
4388 id->block = NULL_TREE;
4389 successfully_inlined = TRUE;
4391 egress:
4392 input_location = saved_location;
4393 return successfully_inlined;
4396 /* Expand call statements reachable from STMT_P.
4397 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4398 in a MODIFY_EXPR. */
4400 static bool
4401 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4403 gimple_stmt_iterator gsi;
4405 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4407 gimple stmt = gsi_stmt (gsi);
4409 if (is_gimple_call (stmt)
4410 && expand_call_inline (bb, stmt, id))
4411 return true;
4414 return false;
4418 /* Walk all basic blocks created after FIRST and try to fold every statement
4419 in the STATEMENTS pointer set. */
4421 static void
4422 fold_marked_statements (int first, struct pointer_set_t *statements)
4424 for (; first < n_basic_blocks; first++)
4425 if (BASIC_BLOCK (first))
4427 gimple_stmt_iterator gsi;
4429 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4430 !gsi_end_p (gsi);
4431 gsi_next (&gsi))
4432 if (pointer_set_contains (statements, gsi_stmt (gsi)))
4434 gimple old_stmt = gsi_stmt (gsi);
4435 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4437 if (old_decl && DECL_BUILT_IN (old_decl))
4439 /* Folding builtins can create multiple instructions,
4440 we need to look at all of them. */
4441 gimple_stmt_iterator i2 = gsi;
4442 gsi_prev (&i2);
4443 if (fold_stmt (&gsi))
4445 gimple new_stmt;
4446 /* If a builtin at the end of a bb folded into nothing,
4447 the following loop won't work. */
4448 if (gsi_end_p (gsi))
4450 cgraph_update_edges_for_call_stmt (old_stmt,
4451 old_decl, NULL);
4452 break;
4454 if (gsi_end_p (i2))
4455 i2 = gsi_start_bb (BASIC_BLOCK (first));
4456 else
4457 gsi_next (&i2);
4458 while (1)
4460 new_stmt = gsi_stmt (i2);
4461 update_stmt (new_stmt);
4462 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4463 new_stmt);
4465 if (new_stmt == gsi_stmt (gsi))
4467 /* It is okay to check only for the very last
4468 of these statements. If it is a throwing
4469 statement nothing will change. If it isn't
4470 this can remove EH edges. If that weren't
4471 correct then because some intermediate stmts
4472 throw, but not the last one. That would mean
4473 we'd have to split the block, which we can't
4474 here and we'd loose anyway. And as builtins
4475 probably never throw, this all
4476 is mood anyway. */
4477 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4478 new_stmt))
4479 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4480 break;
4482 gsi_next (&i2);
4486 else if (fold_stmt (&gsi))
4488 /* Re-read the statement from GSI as fold_stmt() may
4489 have changed it. */
4490 gimple new_stmt = gsi_stmt (gsi);
4491 update_stmt (new_stmt);
4493 if (is_gimple_call (old_stmt)
4494 || is_gimple_call (new_stmt))
4495 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4496 new_stmt);
4498 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4499 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4505 /* Return true if BB has at least one abnormal outgoing edge. */
4507 static inline bool
4508 has_abnormal_outgoing_edge_p (basic_block bb)
4510 edge e;
4511 edge_iterator ei;
4513 FOR_EACH_EDGE (e, ei, bb->succs)
4514 if (e->flags & EDGE_ABNORMAL)
4515 return true;
4517 return false;
4520 /* Expand calls to inline functions in the body of FN. */
4522 unsigned int
4523 optimize_inline_calls (tree fn)
4525 copy_body_data id;
4526 basic_block bb;
4527 int last = n_basic_blocks;
4528 struct gimplify_ctx gctx;
4529 bool inlined_p = false;
4531 /* Clear out ID. */
4532 memset (&id, 0, sizeof (id));
4534 id.src_node = id.dst_node = cgraph_get_node (fn);
4535 gcc_assert (id.dst_node->definition);
4536 id.dst_fn = fn;
4537 /* Or any functions that aren't finished yet. */
4538 if (current_function_decl)
4539 id.dst_fn = current_function_decl;
4541 id.copy_decl = copy_decl_maybe_to_var;
4542 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4543 id.transform_new_cfg = false;
4544 id.transform_return_to_modify = true;
4545 id.transform_parameter = true;
4546 id.transform_lang_insert_block = NULL;
4547 id.statements_to_fold = pointer_set_create ();
4549 push_gimplify_context (&gctx);
4551 /* We make no attempts to keep dominance info up-to-date. */
4552 free_dominance_info (CDI_DOMINATORS);
4553 free_dominance_info (CDI_POST_DOMINATORS);
4555 /* Register specific gimple functions. */
4556 gimple_register_cfg_hooks ();
4558 /* Reach the trees by walking over the CFG, and note the
4559 enclosing basic-blocks in the call edges. */
4560 /* We walk the blocks going forward, because inlined function bodies
4561 will split id->current_basic_block, and the new blocks will
4562 follow it; we'll trudge through them, processing their CALL_EXPRs
4563 along the way. */
4564 FOR_EACH_BB (bb)
4565 inlined_p |= gimple_expand_calls_inline (bb, &id);
4567 pop_gimplify_context (NULL);
4569 #ifdef ENABLE_CHECKING
4571 struct cgraph_edge *e;
4573 verify_cgraph_node (id.dst_node);
4575 /* Double check that we inlined everything we are supposed to inline. */
4576 for (e = id.dst_node->callees; e; e = e->next_callee)
4577 gcc_assert (e->inline_failed);
4579 #endif
4581 /* Fold queued statements. */
4582 fold_marked_statements (last, id.statements_to_fold);
4583 pointer_set_destroy (id.statements_to_fold);
4585 gcc_assert (!id.debug_stmts.exists ());
4587 /* If we didn't inline into the function there is nothing to do. */
4588 if (!inlined_p)
4589 return 0;
4591 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4592 number_blocks (fn);
4594 delete_unreachable_blocks_update_callgraph (&id);
4595 #ifdef ENABLE_CHECKING
4596 verify_cgraph_node (id.dst_node);
4597 #endif
4599 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4600 not possible yet - the IPA passes might make various functions to not
4601 throw and they don't care to proactively update local EH info. This is
4602 done later in fixup_cfg pass that also execute the verification. */
4603 return (TODO_update_ssa
4604 | TODO_cleanup_cfg
4605 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4606 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4607 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4610 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4612 tree
4613 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4615 enum tree_code code = TREE_CODE (*tp);
4616 enum tree_code_class cl = TREE_CODE_CLASS (code);
4618 /* We make copies of most nodes. */
4619 if (IS_EXPR_CODE_CLASS (cl)
4620 || code == TREE_LIST
4621 || code == TREE_VEC
4622 || code == TYPE_DECL
4623 || code == OMP_CLAUSE)
4625 /* Because the chain gets clobbered when we make a copy, we save it
4626 here. */
4627 tree chain = NULL_TREE, new_tree;
4629 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4630 chain = TREE_CHAIN (*tp);
4632 /* Copy the node. */
4633 new_tree = copy_node (*tp);
4635 *tp = new_tree;
4637 /* Now, restore the chain, if appropriate. That will cause
4638 walk_tree to walk into the chain as well. */
4639 if (code == PARM_DECL
4640 || code == TREE_LIST
4641 || code == OMP_CLAUSE)
4642 TREE_CHAIN (*tp) = chain;
4644 /* For now, we don't update BLOCKs when we make copies. So, we
4645 have to nullify all BIND_EXPRs. */
4646 if (TREE_CODE (*tp) == BIND_EXPR)
4647 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4649 else if (code == CONSTRUCTOR)
4651 /* CONSTRUCTOR nodes need special handling because
4652 we need to duplicate the vector of elements. */
4653 tree new_tree;
4655 new_tree = copy_node (*tp);
4656 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
4657 *tp = new_tree;
4659 else if (code == STATEMENT_LIST)
4660 /* We used to just abort on STATEMENT_LIST, but we can run into them
4661 with statement-expressions (c++/40975). */
4662 copy_statement_list (tp);
4663 else if (TREE_CODE_CLASS (code) == tcc_type)
4664 *walk_subtrees = 0;
4665 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4666 *walk_subtrees = 0;
4667 else if (TREE_CODE_CLASS (code) == tcc_constant)
4668 *walk_subtrees = 0;
4669 return NULL_TREE;
4672 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4673 information indicating to what new SAVE_EXPR this one should be mapped,
4674 use that one. Otherwise, create a new node and enter it in ST. FN is
4675 the function into which the copy will be placed. */
4677 static void
4678 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4680 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4681 tree *n;
4682 tree t;
4684 /* See if we already encountered this SAVE_EXPR. */
4685 n = (tree *) pointer_map_contains (st, *tp);
4687 /* If we didn't already remap this SAVE_EXPR, do so now. */
4688 if (!n)
4690 t = copy_node (*tp);
4692 /* Remember this SAVE_EXPR. */
4693 *pointer_map_insert (st, *tp) = t;
4694 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4695 *pointer_map_insert (st, t) = t;
4697 else
4699 /* We've already walked into this SAVE_EXPR; don't do it again. */
4700 *walk_subtrees = 0;
4701 t = *n;
4704 /* Replace this SAVE_EXPR with the copy. */
4705 *tp = t;
4708 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4709 label, copies the declaration and enters it in the splay_tree in DATA (which
4710 is really a 'copy_body_data *'. */
4712 static tree
4713 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4714 bool *handled_ops_p ATTRIBUTE_UNUSED,
4715 struct walk_stmt_info *wi)
4717 copy_body_data *id = (copy_body_data *) wi->info;
4718 gimple stmt = gsi_stmt (*gsip);
4720 if (gimple_code (stmt) == GIMPLE_LABEL)
4722 tree decl = gimple_label_label (stmt);
4724 /* Copy the decl and remember the copy. */
4725 insert_decl_map (id, decl, id->copy_decl (decl, id));
4728 return NULL_TREE;
4732 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4733 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4734 remaps all local declarations to appropriate replacements in gimple
4735 operands. */
4737 static tree
4738 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4740 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4741 copy_body_data *id = (copy_body_data *) wi->info;
4742 struct pointer_map_t *st = id->decl_map;
4743 tree *n;
4744 tree expr = *tp;
4746 /* Only a local declaration (variable or label). */
4747 if ((TREE_CODE (expr) == VAR_DECL
4748 && !TREE_STATIC (expr))
4749 || TREE_CODE (expr) == LABEL_DECL)
4751 /* Lookup the declaration. */
4752 n = (tree *) pointer_map_contains (st, expr);
4754 /* If it's there, remap it. */
4755 if (n)
4756 *tp = *n;
4757 *walk_subtrees = 0;
4759 else if (TREE_CODE (expr) == STATEMENT_LIST
4760 || TREE_CODE (expr) == BIND_EXPR
4761 || TREE_CODE (expr) == SAVE_EXPR)
4762 gcc_unreachable ();
4763 else if (TREE_CODE (expr) == TARGET_EXPR)
4765 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4766 It's OK for this to happen if it was part of a subtree that
4767 isn't immediately expanded, such as operand 2 of another
4768 TARGET_EXPR. */
4769 if (!TREE_OPERAND (expr, 1))
4771 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4772 TREE_OPERAND (expr, 3) = NULL_TREE;
4776 /* Keep iterating. */
4777 return NULL_TREE;
4781 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4782 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4783 remaps all local declarations to appropriate replacements in gimple
4784 statements. */
4786 static tree
4787 replace_locals_stmt (gimple_stmt_iterator *gsip,
4788 bool *handled_ops_p ATTRIBUTE_UNUSED,
4789 struct walk_stmt_info *wi)
4791 copy_body_data *id = (copy_body_data *) wi->info;
4792 gimple stmt = gsi_stmt (*gsip);
4794 if (gimple_code (stmt) == GIMPLE_BIND)
4796 tree block = gimple_bind_block (stmt);
4798 if (block)
4800 remap_block (&block, id);
4801 gimple_bind_set_block (stmt, block);
4804 /* This will remap a lot of the same decls again, but this should be
4805 harmless. */
4806 if (gimple_bind_vars (stmt))
4807 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
4808 NULL, id));
4811 /* Keep iterating. */
4812 return NULL_TREE;
4816 /* Copies everything in SEQ and replaces variables and labels local to
4817 current_function_decl. */
4819 gimple_seq
4820 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4822 copy_body_data id;
4823 struct walk_stmt_info wi;
4824 struct pointer_set_t *visited;
4825 gimple_seq copy;
4827 /* There's nothing to do for NULL_TREE. */
4828 if (seq == NULL)
4829 return seq;
4831 /* Set up ID. */
4832 memset (&id, 0, sizeof (id));
4833 id.src_fn = current_function_decl;
4834 id.dst_fn = current_function_decl;
4835 id.decl_map = pointer_map_create ();
4836 id.debug_map = NULL;
4838 id.copy_decl = copy_decl_no_change;
4839 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4840 id.transform_new_cfg = false;
4841 id.transform_return_to_modify = false;
4842 id.transform_parameter = false;
4843 id.transform_lang_insert_block = NULL;
4845 /* Walk the tree once to find local labels. */
4846 memset (&wi, 0, sizeof (wi));
4847 visited = pointer_set_create ();
4848 wi.info = &id;
4849 wi.pset = visited;
4850 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4851 pointer_set_destroy (visited);
4853 copy = gimple_seq_copy (seq);
4855 /* Walk the copy, remapping decls. */
4856 memset (&wi, 0, sizeof (wi));
4857 wi.info = &id;
4858 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4860 /* Clean up. */
4861 pointer_map_destroy (id.decl_map);
4862 if (id.debug_map)
4863 pointer_map_destroy (id.debug_map);
4865 return copy;
4869 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4871 static tree
4872 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4874 if (*tp == data)
4875 return (tree) data;
4876 else
4877 return NULL;
4880 DEBUG_FUNCTION bool
4881 debug_find_tree (tree top, tree search)
4883 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4887 /* Declare the variables created by the inliner. Add all the variables in
4888 VARS to BIND_EXPR. */
4890 static void
4891 declare_inline_vars (tree block, tree vars)
4893 tree t;
4894 for (t = vars; t; t = DECL_CHAIN (t))
4896 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4897 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4898 add_local_decl (cfun, t);
4901 if (block)
4902 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4905 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4906 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4907 VAR_DECL translation. */
4909 static tree
4910 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4912 /* Don't generate debug information for the copy if we wouldn't have
4913 generated it for the copy either. */
4914 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4915 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4917 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4918 declaration inspired this copy. */
4919 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4921 /* The new variable/label has no RTL, yet. */
4922 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4923 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4924 SET_DECL_RTL (copy, 0);
4926 /* These args would always appear unused, if not for this. */
4927 TREE_USED (copy) = 1;
4929 /* Set the context for the new declaration. */
4930 if (!DECL_CONTEXT (decl))
4931 /* Globals stay global. */
4933 else if (DECL_CONTEXT (decl) != id->src_fn)
4934 /* Things that weren't in the scope of the function we're inlining
4935 from aren't in the scope we're inlining to, either. */
4937 else if (TREE_STATIC (decl))
4938 /* Function-scoped static variables should stay in the original
4939 function. */
4941 else
4942 /* Ordinary automatic local variables are now in the scope of the
4943 new function. */
4944 DECL_CONTEXT (copy) = id->dst_fn;
4946 return copy;
4949 static tree
4950 copy_decl_to_var (tree decl, copy_body_data *id)
4952 tree copy, type;
4954 gcc_assert (TREE_CODE (decl) == PARM_DECL
4955 || TREE_CODE (decl) == RESULT_DECL);
4957 type = TREE_TYPE (decl);
4959 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4960 VAR_DECL, DECL_NAME (decl), type);
4961 if (DECL_PT_UID_SET_P (decl))
4962 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4963 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4964 TREE_READONLY (copy) = TREE_READONLY (decl);
4965 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4966 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4968 return copy_decl_for_dup_finish (id, decl, copy);
4971 /* Like copy_decl_to_var, but create a return slot object instead of a
4972 pointer variable for return by invisible reference. */
4974 static tree
4975 copy_result_decl_to_var (tree decl, copy_body_data *id)
4977 tree copy, type;
4979 gcc_assert (TREE_CODE (decl) == PARM_DECL
4980 || TREE_CODE (decl) == RESULT_DECL);
4982 type = TREE_TYPE (decl);
4983 if (DECL_BY_REFERENCE (decl))
4984 type = TREE_TYPE (type);
4986 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4987 VAR_DECL, DECL_NAME (decl), type);
4988 if (DECL_PT_UID_SET_P (decl))
4989 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4990 TREE_READONLY (copy) = TREE_READONLY (decl);
4991 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4992 if (!DECL_BY_REFERENCE (decl))
4994 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4995 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4998 return copy_decl_for_dup_finish (id, decl, copy);
5001 tree
5002 copy_decl_no_change (tree decl, copy_body_data *id)
5004 tree copy;
5006 copy = copy_node (decl);
5008 /* The COPY is not abstract; it will be generated in DST_FN. */
5009 DECL_ABSTRACT (copy) = 0;
5010 lang_hooks.dup_lang_specific_decl (copy);
5012 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5013 been taken; it's for internal bookkeeping in expand_goto_internal. */
5014 if (TREE_CODE (copy) == LABEL_DECL)
5016 TREE_ADDRESSABLE (copy) = 0;
5017 LABEL_DECL_UID (copy) = -1;
5020 return copy_decl_for_dup_finish (id, decl, copy);
5023 static tree
5024 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5026 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5027 return copy_decl_to_var (decl, id);
5028 else
5029 return copy_decl_no_change (decl, id);
5032 /* Return a copy of the function's argument tree. */
5033 static tree
5034 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5035 bitmap args_to_skip, tree *vars)
5037 tree arg, *parg;
5038 tree new_parm = NULL;
5039 int i = 0;
5041 parg = &new_parm;
5043 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5044 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5046 tree new_tree = remap_decl (arg, id);
5047 if (TREE_CODE (new_tree) != PARM_DECL)
5048 new_tree = id->copy_decl (arg, id);
5049 lang_hooks.dup_lang_specific_decl (new_tree);
5050 *parg = new_tree;
5051 parg = &DECL_CHAIN (new_tree);
5053 else if (!pointer_map_contains (id->decl_map, arg))
5055 /* Make an equivalent VAR_DECL. If the argument was used
5056 as temporary variable later in function, the uses will be
5057 replaced by local variable. */
5058 tree var = copy_decl_to_var (arg, id);
5059 insert_decl_map (id, arg, var);
5060 /* Declare this new variable. */
5061 DECL_CHAIN (var) = *vars;
5062 *vars = var;
5064 return new_parm;
5067 /* Return a copy of the function's static chain. */
5068 static tree
5069 copy_static_chain (tree static_chain, copy_body_data * id)
5071 tree *chain_copy, *pvar;
5073 chain_copy = &static_chain;
5074 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5076 tree new_tree = remap_decl (*pvar, id);
5077 lang_hooks.dup_lang_specific_decl (new_tree);
5078 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5079 *pvar = new_tree;
5081 return static_chain;
5084 /* Return true if the function is allowed to be versioned.
5085 This is a guard for the versioning functionality. */
5087 bool
5088 tree_versionable_function_p (tree fndecl)
5090 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5091 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5094 /* Delete all unreachable basic blocks and update callgraph.
5095 Doing so is somewhat nontrivial because we need to update all clones and
5096 remove inline function that become unreachable. */
5098 static bool
5099 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5101 bool changed = false;
5102 basic_block b, next_bb;
5104 find_unreachable_blocks ();
5106 /* Delete all unreachable basic blocks. */
5108 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
5110 next_bb = b->next_bb;
5112 if (!(b->flags & BB_REACHABLE))
5114 gimple_stmt_iterator bsi;
5116 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5118 struct cgraph_edge *e;
5119 struct cgraph_node *node;
5121 ipa_remove_stmt_references (id->dst_node, gsi_stmt (bsi));
5123 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5124 &&(e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
5126 if (!e->inline_failed)
5127 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5128 else
5129 cgraph_remove_edge (e);
5131 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5132 && id->dst_node->clones)
5133 for (node = id->dst_node->clones; node != id->dst_node;)
5135 ipa_remove_stmt_references (node, gsi_stmt (bsi));
5136 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5137 && (e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
5139 if (!e->inline_failed)
5140 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5141 else
5142 cgraph_remove_edge (e);
5145 if (node->clones)
5146 node = node->clones;
5147 else if (node->next_sibling_clone)
5148 node = node->next_sibling_clone;
5149 else
5151 while (node != id->dst_node && !node->next_sibling_clone)
5152 node = node->clone_of;
5153 if (node != id->dst_node)
5154 node = node->next_sibling_clone;
5158 delete_basic_block (b);
5159 changed = true;
5163 return changed;
5166 /* Update clone info after duplication. */
5168 static void
5169 update_clone_info (copy_body_data * id)
5171 struct cgraph_node *node;
5172 if (!id->dst_node->clones)
5173 return;
5174 for (node = id->dst_node->clones; node != id->dst_node;)
5176 /* First update replace maps to match the new body. */
5177 if (node->clone.tree_map)
5179 unsigned int i;
5180 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5182 struct ipa_replace_map *replace_info;
5183 replace_info = (*node->clone.tree_map)[i];
5184 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5185 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5188 if (node->clones)
5189 node = node->clones;
5190 else if (node->next_sibling_clone)
5191 node = node->next_sibling_clone;
5192 else
5194 while (node != id->dst_node && !node->next_sibling_clone)
5195 node = node->clone_of;
5196 if (node != id->dst_node)
5197 node = node->next_sibling_clone;
5202 /* Create a copy of a function's tree.
5203 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5204 of the original function and the new copied function
5205 respectively. In case we want to replace a DECL
5206 tree with another tree while duplicating the function's
5207 body, TREE_MAP represents the mapping between these
5208 trees. If UPDATE_CLONES is set, the call_stmt fields
5209 of edges of clones of the function will be updated.
5211 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5212 from new version.
5213 If SKIP_RETURN is true, the new version will return void.
5214 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5215 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5217 void
5218 tree_function_versioning (tree old_decl, tree new_decl,
5219 vec<ipa_replace_map_p, va_gc> *tree_map,
5220 bool update_clones, bitmap args_to_skip,
5221 bool skip_return, bitmap blocks_to_copy,
5222 basic_block new_entry)
5224 struct cgraph_node *old_version_node;
5225 struct cgraph_node *new_version_node;
5226 copy_body_data id;
5227 tree p;
5228 unsigned i;
5229 struct ipa_replace_map *replace_info;
5230 basic_block old_entry_block, bb;
5231 stack_vec<gimple, 10> init_stmts;
5232 tree vars = NULL_TREE;
5234 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5235 && TREE_CODE (new_decl) == FUNCTION_DECL);
5236 DECL_POSSIBLY_INLINED (old_decl) = 1;
5238 old_version_node = cgraph_get_node (old_decl);
5239 gcc_checking_assert (old_version_node);
5240 new_version_node = cgraph_get_node (new_decl);
5241 gcc_checking_assert (new_version_node);
5243 /* Copy over debug args. */
5244 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5246 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5247 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5248 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5249 old_debug_args = decl_debug_args_lookup (old_decl);
5250 if (old_debug_args)
5252 new_debug_args = decl_debug_args_insert (new_decl);
5253 *new_debug_args = vec_safe_copy (*old_debug_args);
5257 /* Output the inlining info for this abstract function, since it has been
5258 inlined. If we don't do this now, we can lose the information about the
5259 variables in the function when the blocks get blown away as soon as we
5260 remove the cgraph node. */
5261 (*debug_hooks->outlining_inline_function) (old_decl);
5263 DECL_ARTIFICIAL (new_decl) = 1;
5264 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5265 if (DECL_ORIGIN (old_decl) == old_decl)
5266 old_version_node->used_as_abstract_origin = true;
5267 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5269 /* Prepare the data structures for the tree copy. */
5270 memset (&id, 0, sizeof (id));
5272 /* Generate a new name for the new version. */
5273 id.statements_to_fold = pointer_set_create ();
5275 id.decl_map = pointer_map_create ();
5276 id.debug_map = NULL;
5277 id.src_fn = old_decl;
5278 id.dst_fn = new_decl;
5279 id.src_node = old_version_node;
5280 id.dst_node = new_version_node;
5281 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5282 id.blocks_to_copy = blocks_to_copy;
5283 if (id.src_node->ipa_transforms_to_apply.exists ())
5285 vec<ipa_opt_pass> old_transforms_to_apply
5286 = id.dst_node->ipa_transforms_to_apply;
5287 unsigned int i;
5289 id.dst_node->ipa_transforms_to_apply
5290 = id.src_node->ipa_transforms_to_apply.copy ();
5291 for (i = 0; i < old_transforms_to_apply.length (); i++)
5292 id.dst_node->ipa_transforms_to_apply.safe_push (old_transforms_to_apply[i]);
5293 old_transforms_to_apply.release ();
5296 id.copy_decl = copy_decl_no_change;
5297 id.transform_call_graph_edges
5298 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5299 id.transform_new_cfg = true;
5300 id.transform_return_to_modify = false;
5301 id.transform_parameter = false;
5302 id.transform_lang_insert_block = NULL;
5304 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5305 (DECL_STRUCT_FUNCTION (old_decl));
5306 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5307 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5308 initialize_cfun (new_decl, old_decl,
5309 old_entry_block->count);
5310 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5311 = id.src_cfun->gimple_df->ipa_pta;
5313 /* Copy the function's static chain. */
5314 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5315 if (p)
5316 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5317 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5318 &id);
5320 /* If there's a tree_map, prepare for substitution. */
5321 if (tree_map)
5322 for (i = 0; i < tree_map->length (); i++)
5324 gimple init;
5325 replace_info = (*tree_map)[i];
5326 if (replace_info->replace_p)
5328 if (!replace_info->old_tree)
5330 int i = replace_info->parm_num;
5331 tree parm;
5332 tree req_type;
5334 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5335 i --;
5336 replace_info->old_tree = parm;
5337 req_type = TREE_TYPE (parm);
5338 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5340 if (fold_convertible_p (req_type, replace_info->new_tree))
5341 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5342 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5343 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5344 else
5346 if (dump_file)
5348 fprintf (dump_file, " const ");
5349 print_generic_expr (dump_file, replace_info->new_tree, 0);
5350 fprintf (dump_file, " can't be converted to param ");
5351 print_generic_expr (dump_file, parm, 0);
5352 fprintf (dump_file, "\n");
5354 replace_info->old_tree = NULL;
5358 else
5359 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5360 if (replace_info->old_tree)
5362 init = setup_one_parameter (&id, replace_info->old_tree,
5363 replace_info->new_tree, id.src_fn,
5364 NULL,
5365 &vars);
5366 if (init)
5367 init_stmts.safe_push (init);
5371 /* Copy the function's arguments. */
5372 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5373 DECL_ARGUMENTS (new_decl) =
5374 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5375 args_to_skip, &vars);
5377 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5378 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5380 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5382 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5383 /* Add local vars. */
5384 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5386 if (DECL_RESULT (old_decl) == NULL_TREE)
5388 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5390 DECL_RESULT (new_decl)
5391 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5392 RESULT_DECL, NULL_TREE, void_type_node);
5393 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5394 cfun->returns_struct = 0;
5395 cfun->returns_pcc_struct = 0;
5397 else
5399 tree old_name;
5400 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5401 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5402 if (gimple_in_ssa_p (id.src_cfun)
5403 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5404 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5406 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5407 insert_decl_map (&id, old_name, new_name);
5408 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5409 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5413 /* Set up the destination functions loop tree. */
5414 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5416 cfun->curr_properties &= ~PROP_loops;
5417 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5418 cfun->curr_properties |= PROP_loops;
5421 /* Copy the Function's body. */
5422 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5423 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, new_entry);
5425 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5426 number_blocks (new_decl);
5428 /* We want to create the BB unconditionally, so that the addition of
5429 debug stmts doesn't affect BB count, which may in the end cause
5430 codegen differences. */
5431 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5432 while (init_stmts.length ())
5433 insert_init_stmt (&id, bb, init_stmts.pop ());
5434 update_clone_info (&id);
5436 /* Remap the nonlocal_goto_save_area, if any. */
5437 if (cfun->nonlocal_goto_save_area)
5439 struct walk_stmt_info wi;
5441 memset (&wi, 0, sizeof (wi));
5442 wi.info = &id;
5443 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5446 /* Clean up. */
5447 pointer_map_destroy (id.decl_map);
5448 if (id.debug_map)
5449 pointer_map_destroy (id.debug_map);
5450 free_dominance_info (CDI_DOMINATORS);
5451 free_dominance_info (CDI_POST_DOMINATORS);
5453 fold_marked_statements (0, id.statements_to_fold);
5454 pointer_set_destroy (id.statements_to_fold);
5455 fold_cond_expr_cond ();
5456 delete_unreachable_blocks_update_callgraph (&id);
5457 if (id.dst_node->definition)
5458 cgraph_rebuild_references ();
5459 update_ssa (TODO_update_ssa);
5461 /* After partial cloning we need to rescale frequencies, so they are
5462 within proper range in the cloned function. */
5463 if (new_entry)
5465 struct cgraph_edge *e;
5466 rebuild_frequencies ();
5468 new_version_node->count = ENTRY_BLOCK_PTR->count;
5469 for (e = new_version_node->callees; e; e = e->next_callee)
5471 basic_block bb = gimple_bb (e->call_stmt);
5472 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5473 bb);
5474 e->count = bb->count;
5476 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5478 basic_block bb = gimple_bb (e->call_stmt);
5479 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5480 bb);
5481 e->count = bb->count;
5485 free_dominance_info (CDI_DOMINATORS);
5486 free_dominance_info (CDI_POST_DOMINATORS);
5488 gcc_assert (!id.debug_stmts.exists ());
5489 pop_cfun ();
5490 return;
5493 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5494 the callee and return the inlined body on success. */
5496 tree
5497 maybe_inline_call_in_expr (tree exp)
5499 tree fn = get_callee_fndecl (exp);
5501 /* We can only try to inline "const" functions. */
5502 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5504 struct pointer_map_t *decl_map = pointer_map_create ();
5505 call_expr_arg_iterator iter;
5506 copy_body_data id;
5507 tree param, arg, t;
5509 /* Remap the parameters. */
5510 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5511 param;
5512 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5513 *pointer_map_insert (decl_map, param) = arg;
5515 memset (&id, 0, sizeof (id));
5516 id.src_fn = fn;
5517 id.dst_fn = current_function_decl;
5518 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5519 id.decl_map = decl_map;
5521 id.copy_decl = copy_decl_no_change;
5522 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5523 id.transform_new_cfg = false;
5524 id.transform_return_to_modify = true;
5525 id.transform_parameter = true;
5526 id.transform_lang_insert_block = NULL;
5528 /* Make sure not to unshare trees behind the front-end's back
5529 since front-end specific mechanisms may rely on sharing. */
5530 id.regimplify = false;
5531 id.do_not_unshare = true;
5533 /* We're not inside any EH region. */
5534 id.eh_lp_nr = 0;
5536 t = copy_tree_body (&id);
5537 pointer_map_destroy (decl_map);
5539 /* We can only return something suitable for use in a GENERIC
5540 expression tree. */
5541 if (TREE_CODE (t) == MODIFY_EXPR)
5542 return TREE_OPERAND (t, 1);
5545 return NULL_TREE;
5548 /* Duplicate a type, fields and all. */
5550 tree
5551 build_duplicate_type (tree type)
5553 struct copy_body_data id;
5555 memset (&id, 0, sizeof (id));
5556 id.src_fn = current_function_decl;
5557 id.dst_fn = current_function_decl;
5558 id.src_cfun = cfun;
5559 id.decl_map = pointer_map_create ();
5560 id.debug_map = NULL;
5561 id.copy_decl = copy_decl_no_change;
5563 type = remap_type_1 (type, &id);
5565 pointer_map_destroy (id.decl_map);
5566 if (id.debug_map)
5567 pointer_map_destroy (id.debug_map);
5569 TYPE_CANONICAL (type) = type;
5571 return type;