* config/rx/rx.c (ADD_RX_BUILTIN0): New macro, used for builtins
[official-gcc.git] / gcc / tree-inline.c
blob74f333b82690db90a72fbe52e67a021c21d743b8
1 /* Tree inlining.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "tree.h"
27 #include "tree-inline.h"
28 #include "flags.h"
29 #include "params.h"
30 #include "input.h"
31 #include "insn-config.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-iterator.h"
36 #include "intl.h"
37 #include "gimple.h"
38 #include "gimple-ssa.h"
39 #include "tree-cfg.h"
40 #include "tree-phinodes.h"
41 #include "ssa-iterators.h"
42 #include "tree-ssanames.h"
43 #include "tree-into-ssa.h"
44 #include "tree-dfa.h"
45 #include "tree-ssa.h"
46 #include "function.h"
47 #include "tree-pretty-print.h"
48 #include "except.h"
49 #include "debug.h"
50 #include "pointer-set.h"
51 #include "ipa-prop.h"
52 #include "value-prof.h"
53 #include "tree-pass.h"
54 #include "target.h"
55 #include "cfgloop.h"
57 #include "rtl.h" /* FIXME: For asm_str_count. */
59 /* I'm not real happy about this, but we need to handle gimple and
60 non-gimple trees. */
62 /* Inlining, Cloning, Versioning, Parallelization
64 Inlining: a function body is duplicated, but the PARM_DECLs are
65 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
66 MODIFY_EXPRs that store to a dedicated returned-value variable.
67 The duplicated eh_region info of the copy will later be appended
68 to the info for the caller; the eh_region info in copied throwing
69 statements and RESX statements are adjusted accordingly.
71 Cloning: (only in C++) We have one body for a con/de/structor, and
72 multiple function decls, each with a unique parameter list.
73 Duplicate the body, using the given splay tree; some parameters
74 will become constants (like 0 or 1).
76 Versioning: a function body is duplicated and the result is a new
77 function rather than into blocks of an existing function as with
78 inlining. Some parameters will become constants.
80 Parallelization: a region of a function is duplicated resulting in
81 a new function. Variables may be replaced with complex expressions
82 to enable shared variable semantics.
84 All of these will simultaneously lookup any callgraph edges. If
85 we're going to inline the duplicated function body, and the given
86 function has some cloned callgraph nodes (one for each place this
87 function will be inlined) those callgraph edges will be duplicated.
88 If we're cloning the body, those callgraph edges will be
89 updated to point into the new body. (Note that the original
90 callgraph node and edge list will not be altered.)
92 See the CALL_EXPR handling case in copy_tree_body_r (). */
94 /* To Do:
96 o In order to make inlining-on-trees work, we pessimized
97 function-local static constants. In particular, they are now
98 always output, even when not addressed. Fix this by treating
99 function-local static constants just like global static
100 constants; the back-end already knows not to output them if they
101 are not needed.
103 o Provide heuristics to clamp inlining of recursive template
104 calls? */
107 /* Weights that estimate_num_insns uses to estimate the size of the
108 produced code. */
110 eni_weights eni_size_weights;
112 /* Weights that estimate_num_insns uses to estimate the time necessary
113 to execute the produced code. */
115 eni_weights eni_time_weights;
117 /* Prototypes. */
119 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
120 static void remap_block (tree *, copy_body_data *);
121 static void copy_bind_expr (tree *, int *, copy_body_data *);
122 static void declare_inline_vars (tree, tree);
123 static void remap_save_expr (tree *, void *, int *);
124 static void prepend_lexical_block (tree current_block, tree new_block);
125 static tree copy_decl_to_var (tree, copy_body_data *);
126 static tree copy_result_decl_to_var (tree, copy_body_data *);
127 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
128 static gimple remap_gimple_stmt (gimple, copy_body_data *);
129 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
131 /* Insert a tree->tree mapping for ID. Despite the name suggests
132 that the trees should be variables, it is used for more than that. */
134 void
135 insert_decl_map (copy_body_data *id, tree key, tree value)
137 *pointer_map_insert (id->decl_map, key) = value;
139 /* Always insert an identity map as well. If we see this same new
140 node again, we won't want to duplicate it a second time. */
141 if (key != value)
142 *pointer_map_insert (id->decl_map, value) = value;
145 /* Insert a tree->tree mapping for ID. This is only used for
146 variables. */
148 static void
149 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
151 if (!gimple_in_ssa_p (id->src_cfun))
152 return;
154 if (!MAY_HAVE_DEBUG_STMTS)
155 return;
157 if (!target_for_debug_bind (key))
158 return;
160 gcc_assert (TREE_CODE (key) == PARM_DECL);
161 gcc_assert (TREE_CODE (value) == VAR_DECL);
163 if (!id->debug_map)
164 id->debug_map = pointer_map_create ();
166 *pointer_map_insert (id->debug_map, key) = value;
169 /* If nonzero, we're remapping the contents of inlined debug
170 statements. If negative, an error has occurred, such as a
171 reference to a variable that isn't available in the inlined
172 context. */
173 static int processing_debug_stmt = 0;
175 /* Construct new SSA name for old NAME. ID is the inline context. */
177 static tree
178 remap_ssa_name (tree name, copy_body_data *id)
180 tree new_tree, var;
181 tree *n;
183 gcc_assert (TREE_CODE (name) == SSA_NAME);
185 n = (tree *) pointer_map_contains (id->decl_map, name);
186 if (n)
187 return unshare_expr (*n);
189 if (processing_debug_stmt)
191 if (SSA_NAME_IS_DEFAULT_DEF (name)
192 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
193 && id->entry_bb == NULL
194 && single_succ_p (ENTRY_BLOCK_PTR))
196 tree vexpr = make_node (DEBUG_EXPR_DECL);
197 gimple def_temp;
198 gimple_stmt_iterator gsi;
199 tree val = SSA_NAME_VAR (name);
201 n = (tree *) pointer_map_contains (id->decl_map, val);
202 if (n != NULL)
203 val = *n;
204 if (TREE_CODE (val) != PARM_DECL)
206 processing_debug_stmt = -1;
207 return name;
209 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
210 DECL_ARTIFICIAL (vexpr) = 1;
211 TREE_TYPE (vexpr) = TREE_TYPE (name);
212 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
213 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
214 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
215 return vexpr;
218 processing_debug_stmt = -1;
219 return name;
222 /* Remap anonymous SSA names or SSA names of anonymous decls. */
223 var = SSA_NAME_VAR (name);
224 if (!var
225 || (!SSA_NAME_IS_DEFAULT_DEF (name)
226 && TREE_CODE (var) == VAR_DECL
227 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
228 && DECL_ARTIFICIAL (var)
229 && DECL_IGNORED_P (var)
230 && !DECL_NAME (var)))
232 struct ptr_info_def *pi;
233 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id), NULL);
234 if (!var && SSA_NAME_IDENTIFIER (name))
235 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
236 insert_decl_map (id, name, new_tree);
237 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
238 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
239 /* At least IPA points-to info can be directly transferred. */
240 if (id->src_cfun->gimple_df
241 && id->src_cfun->gimple_df->ipa_pta
242 && (pi = SSA_NAME_PTR_INFO (name))
243 && !pi->pt.anything)
245 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
246 new_pi->pt = pi->pt;
248 return new_tree;
251 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
252 in copy_bb. */
253 new_tree = remap_decl (var, id);
255 /* We might've substituted constant or another SSA_NAME for
256 the variable.
258 Replace the SSA name representing RESULT_DECL by variable during
259 inlining: this saves us from need to introduce PHI node in a case
260 return value is just partly initialized. */
261 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
262 && (!SSA_NAME_VAR (name)
263 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
264 || !id->transform_return_to_modify))
266 struct ptr_info_def *pi;
267 new_tree = make_ssa_name (new_tree, NULL);
268 insert_decl_map (id, name, new_tree);
269 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
270 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
271 /* At least IPA points-to info can be directly transferred. */
272 if (id->src_cfun->gimple_df
273 && id->src_cfun->gimple_df->ipa_pta
274 && (pi = SSA_NAME_PTR_INFO (name))
275 && !pi->pt.anything)
277 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
278 new_pi->pt = pi->pt;
280 if (SSA_NAME_IS_DEFAULT_DEF (name))
282 /* By inlining function having uninitialized variable, we might
283 extend the lifetime (variable might get reused). This cause
284 ICE in the case we end up extending lifetime of SSA name across
285 abnormal edge, but also increase register pressure.
287 We simply initialize all uninitialized vars by 0 except
288 for case we are inlining to very first BB. We can avoid
289 this for all BBs that are not inside strongly connected
290 regions of the CFG, but this is expensive to test. */
291 if (id->entry_bb
292 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
293 && (!SSA_NAME_VAR (name)
294 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
295 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
296 || EDGE_COUNT (id->entry_bb->preds) != 1))
298 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
299 gimple init_stmt;
300 tree zero = build_zero_cst (TREE_TYPE (new_tree));
302 init_stmt = gimple_build_assign (new_tree, zero);
303 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
304 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
306 else
308 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
309 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
313 else
314 insert_decl_map (id, name, new_tree);
315 return new_tree;
318 /* Remap DECL during the copying of the BLOCK tree for the function. */
320 tree
321 remap_decl (tree decl, copy_body_data *id)
323 tree *n;
325 /* We only remap local variables in the current function. */
327 /* See if we have remapped this declaration. */
329 n = (tree *) pointer_map_contains (id->decl_map, decl);
331 if (!n && processing_debug_stmt)
333 processing_debug_stmt = -1;
334 return decl;
337 /* If we didn't already have an equivalent for this declaration,
338 create one now. */
339 if (!n)
341 /* Make a copy of the variable or label. */
342 tree t = id->copy_decl (decl, id);
344 /* Remember it, so that if we encounter this local entity again
345 we can reuse this copy. Do this early because remap_type may
346 need this decl for TYPE_STUB_DECL. */
347 insert_decl_map (id, decl, t);
349 if (!DECL_P (t))
350 return t;
352 /* Remap types, if necessary. */
353 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
354 if (TREE_CODE (t) == TYPE_DECL)
355 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
357 /* Remap sizes as necessary. */
358 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
359 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
361 /* If fields, do likewise for offset and qualifier. */
362 if (TREE_CODE (t) == FIELD_DECL)
364 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
365 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
366 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
369 return t;
372 if (id->do_not_unshare)
373 return *n;
374 else
375 return unshare_expr (*n);
378 static tree
379 remap_type_1 (tree type, copy_body_data *id)
381 tree new_tree, t;
383 /* We do need a copy. build and register it now. If this is a pointer or
384 reference type, remap the designated type and make a new pointer or
385 reference type. */
386 if (TREE_CODE (type) == POINTER_TYPE)
388 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
389 TYPE_MODE (type),
390 TYPE_REF_CAN_ALIAS_ALL (type));
391 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
392 new_tree = build_type_attribute_qual_variant (new_tree,
393 TYPE_ATTRIBUTES (type),
394 TYPE_QUALS (type));
395 insert_decl_map (id, type, new_tree);
396 return new_tree;
398 else if (TREE_CODE (type) == REFERENCE_TYPE)
400 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
401 TYPE_MODE (type),
402 TYPE_REF_CAN_ALIAS_ALL (type));
403 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
404 new_tree = build_type_attribute_qual_variant (new_tree,
405 TYPE_ATTRIBUTES (type),
406 TYPE_QUALS (type));
407 insert_decl_map (id, type, new_tree);
408 return new_tree;
410 else
411 new_tree = copy_node (type);
413 insert_decl_map (id, type, new_tree);
415 /* This is a new type, not a copy of an old type. Need to reassociate
416 variants. We can handle everything except the main variant lazily. */
417 t = TYPE_MAIN_VARIANT (type);
418 if (type != t)
420 t = remap_type (t, id);
421 TYPE_MAIN_VARIANT (new_tree) = t;
422 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
423 TYPE_NEXT_VARIANT (t) = new_tree;
425 else
427 TYPE_MAIN_VARIANT (new_tree) = new_tree;
428 TYPE_NEXT_VARIANT (new_tree) = NULL;
431 if (TYPE_STUB_DECL (type))
432 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
434 /* Lazily create pointer and reference types. */
435 TYPE_POINTER_TO (new_tree) = NULL;
436 TYPE_REFERENCE_TO (new_tree) = NULL;
438 switch (TREE_CODE (new_tree))
440 case INTEGER_TYPE:
441 case REAL_TYPE:
442 case FIXED_POINT_TYPE:
443 case ENUMERAL_TYPE:
444 case BOOLEAN_TYPE:
445 t = TYPE_MIN_VALUE (new_tree);
446 if (t && TREE_CODE (t) != INTEGER_CST)
447 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
449 t = TYPE_MAX_VALUE (new_tree);
450 if (t && TREE_CODE (t) != INTEGER_CST)
451 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
452 return new_tree;
454 case FUNCTION_TYPE:
455 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
456 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
457 return new_tree;
459 case ARRAY_TYPE:
460 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
461 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
462 break;
464 case RECORD_TYPE:
465 case UNION_TYPE:
466 case QUAL_UNION_TYPE:
468 tree f, nf = NULL;
470 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
472 t = remap_decl (f, id);
473 DECL_CONTEXT (t) = new_tree;
474 DECL_CHAIN (t) = nf;
475 nf = t;
477 TYPE_FIELDS (new_tree) = nreverse (nf);
479 break;
481 case OFFSET_TYPE:
482 default:
483 /* Shouldn't have been thought variable sized. */
484 gcc_unreachable ();
487 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
488 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
490 return new_tree;
493 tree
494 remap_type (tree type, copy_body_data *id)
496 tree *node;
497 tree tmp;
499 if (type == NULL)
500 return type;
502 /* See if we have remapped this type. */
503 node = (tree *) pointer_map_contains (id->decl_map, type);
504 if (node)
505 return *node;
507 /* The type only needs remapping if it's variably modified. */
508 if (! variably_modified_type_p (type, id->src_fn))
510 insert_decl_map (id, type, type);
511 return type;
514 id->remapping_type_depth++;
515 tmp = remap_type_1 (type, id);
516 id->remapping_type_depth--;
518 return tmp;
521 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
523 static bool
524 can_be_nonlocal (tree decl, copy_body_data *id)
526 /* We can not duplicate function decls. */
527 if (TREE_CODE (decl) == FUNCTION_DECL)
528 return true;
530 /* Local static vars must be non-local or we get multiple declaration
531 problems. */
532 if (TREE_CODE (decl) == VAR_DECL
533 && !auto_var_in_fn_p (decl, id->src_fn))
534 return true;
536 return false;
539 static tree
540 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
541 copy_body_data *id)
543 tree old_var;
544 tree new_decls = NULL_TREE;
546 /* Remap its variables. */
547 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
549 tree new_var;
551 if (can_be_nonlocal (old_var, id))
553 /* We need to add this variable to the local decls as otherwise
554 nothing else will do so. */
555 if (TREE_CODE (old_var) == VAR_DECL
556 && ! DECL_EXTERNAL (old_var))
557 add_local_decl (cfun, old_var);
558 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
559 && !DECL_IGNORED_P (old_var)
560 && nonlocalized_list)
561 vec_safe_push (*nonlocalized_list, old_var);
562 continue;
565 /* Remap the variable. */
566 new_var = remap_decl (old_var, id);
568 /* If we didn't remap this variable, we can't mess with its
569 TREE_CHAIN. If we remapped this variable to the return slot, it's
570 already declared somewhere else, so don't declare it here. */
572 if (new_var == id->retvar)
574 else if (!new_var)
576 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
577 && !DECL_IGNORED_P (old_var)
578 && nonlocalized_list)
579 vec_safe_push (*nonlocalized_list, old_var);
581 else
583 gcc_assert (DECL_P (new_var));
584 DECL_CHAIN (new_var) = new_decls;
585 new_decls = new_var;
587 /* Also copy value-expressions. */
588 if (TREE_CODE (new_var) == VAR_DECL
589 && DECL_HAS_VALUE_EXPR_P (new_var))
591 tree tem = DECL_VALUE_EXPR (new_var);
592 bool old_regimplify = id->regimplify;
593 id->remapping_type_depth++;
594 walk_tree (&tem, copy_tree_body_r, id, NULL);
595 id->remapping_type_depth--;
596 id->regimplify = old_regimplify;
597 SET_DECL_VALUE_EXPR (new_var, tem);
602 return nreverse (new_decls);
605 /* Copy the BLOCK to contain remapped versions of the variables
606 therein. And hook the new block into the block-tree. */
608 static void
609 remap_block (tree *block, copy_body_data *id)
611 tree old_block;
612 tree new_block;
614 /* Make the new block. */
615 old_block = *block;
616 new_block = make_node (BLOCK);
617 TREE_USED (new_block) = TREE_USED (old_block);
618 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
619 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
620 BLOCK_NONLOCALIZED_VARS (new_block)
621 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
622 *block = new_block;
624 /* Remap its variables. */
625 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
626 &BLOCK_NONLOCALIZED_VARS (new_block),
627 id);
629 if (id->transform_lang_insert_block)
630 id->transform_lang_insert_block (new_block);
632 /* Remember the remapped block. */
633 insert_decl_map (id, old_block, new_block);
636 /* Copy the whole block tree and root it in id->block. */
637 static tree
638 remap_blocks (tree block, copy_body_data *id)
640 tree t;
641 tree new_tree = block;
643 if (!block)
644 return NULL;
646 remap_block (&new_tree, id);
647 gcc_assert (new_tree != block);
648 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
649 prepend_lexical_block (new_tree, remap_blocks (t, id));
650 /* Blocks are in arbitrary order, but make things slightly prettier and do
651 not swap order when producing a copy. */
652 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
653 return new_tree;
656 /* Remap the block tree rooted at BLOCK to nothing. */
657 static void
658 remap_blocks_to_null (tree block, copy_body_data *id)
660 tree t;
661 insert_decl_map (id, block, NULL_TREE);
662 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
663 remap_blocks_to_null (t, id);
666 static void
667 copy_statement_list (tree *tp)
669 tree_stmt_iterator oi, ni;
670 tree new_tree;
672 new_tree = alloc_stmt_list ();
673 ni = tsi_start (new_tree);
674 oi = tsi_start (*tp);
675 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
676 *tp = new_tree;
678 for (; !tsi_end_p (oi); tsi_next (&oi))
680 tree stmt = tsi_stmt (oi);
681 if (TREE_CODE (stmt) == STATEMENT_LIST)
682 /* This copy is not redundant; tsi_link_after will smash this
683 STATEMENT_LIST into the end of the one we're building, and we
684 don't want to do that with the original. */
685 copy_statement_list (&stmt);
686 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
690 static void
691 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
693 tree block = BIND_EXPR_BLOCK (*tp);
694 /* Copy (and replace) the statement. */
695 copy_tree_r (tp, walk_subtrees, NULL);
696 if (block)
698 remap_block (&block, id);
699 BIND_EXPR_BLOCK (*tp) = block;
702 if (BIND_EXPR_VARS (*tp))
703 /* This will remap a lot of the same decls again, but this should be
704 harmless. */
705 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
709 /* Create a new gimple_seq by remapping all the statements in BODY
710 using the inlining information in ID. */
712 static gimple_seq
713 remap_gimple_seq (gimple_seq body, copy_body_data *id)
715 gimple_stmt_iterator si;
716 gimple_seq new_body = NULL;
718 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
720 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
721 gimple_seq_add_stmt (&new_body, new_stmt);
724 return new_body;
728 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
729 block using the mapping information in ID. */
731 static gimple
732 copy_gimple_bind (gimple stmt, copy_body_data *id)
734 gimple new_bind;
735 tree new_block, new_vars;
736 gimple_seq body, new_body;
738 /* Copy the statement. Note that we purposely don't use copy_stmt
739 here because we need to remap statements as we copy. */
740 body = gimple_bind_body (stmt);
741 new_body = remap_gimple_seq (body, id);
743 new_block = gimple_bind_block (stmt);
744 if (new_block)
745 remap_block (&new_block, id);
747 /* This will remap a lot of the same decls again, but this should be
748 harmless. */
749 new_vars = gimple_bind_vars (stmt);
750 if (new_vars)
751 new_vars = remap_decls (new_vars, NULL, id);
753 new_bind = gimple_build_bind (new_vars, new_body, new_block);
755 return new_bind;
758 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
760 static bool
761 is_parm (tree decl)
763 if (TREE_CODE (decl) == SSA_NAME)
765 decl = SSA_NAME_VAR (decl);
766 if (!decl)
767 return false;
770 return (TREE_CODE (decl) == PARM_DECL);
773 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
774 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
775 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
776 recursing into the children nodes of *TP. */
778 static tree
779 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
781 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
782 copy_body_data *id = (copy_body_data *) wi_p->info;
783 tree fn = id->src_fn;
785 if (TREE_CODE (*tp) == SSA_NAME)
787 *tp = remap_ssa_name (*tp, id);
788 *walk_subtrees = 0;
789 return NULL;
791 else if (auto_var_in_fn_p (*tp, fn))
793 /* Local variables and labels need to be replaced by equivalent
794 variables. We don't want to copy static variables; there's
795 only one of those, no matter how many times we inline the
796 containing function. Similarly for globals from an outer
797 function. */
798 tree new_decl;
800 /* Remap the declaration. */
801 new_decl = remap_decl (*tp, id);
802 gcc_assert (new_decl);
803 /* Replace this variable with the copy. */
804 STRIP_TYPE_NOPS (new_decl);
805 /* ??? The C++ frontend uses void * pointer zero to initialize
806 any other type. This confuses the middle-end type verification.
807 As cloned bodies do not go through gimplification again the fixup
808 there doesn't trigger. */
809 if (TREE_CODE (new_decl) == INTEGER_CST
810 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
811 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
812 *tp = new_decl;
813 *walk_subtrees = 0;
815 else if (TREE_CODE (*tp) == STATEMENT_LIST)
816 gcc_unreachable ();
817 else if (TREE_CODE (*tp) == SAVE_EXPR)
818 gcc_unreachable ();
819 else if (TREE_CODE (*tp) == LABEL_DECL
820 && (!DECL_CONTEXT (*tp)
821 || decl_function_context (*tp) == id->src_fn))
822 /* These may need to be remapped for EH handling. */
823 *tp = remap_decl (*tp, id);
824 else if (TREE_CODE (*tp) == FIELD_DECL)
826 /* If the enclosing record type is variably_modified_type_p, the field
827 has already been remapped. Otherwise, it need not be. */
828 tree *n = (tree *) pointer_map_contains (id->decl_map, *tp);
829 if (n)
830 *tp = *n;
831 *walk_subtrees = 0;
833 else if (TYPE_P (*tp))
834 /* Types may need remapping as well. */
835 *tp = remap_type (*tp, id);
836 else if (CONSTANT_CLASS_P (*tp))
838 /* If this is a constant, we have to copy the node iff the type
839 will be remapped. copy_tree_r will not copy a constant. */
840 tree new_type = remap_type (TREE_TYPE (*tp), id);
842 if (new_type == TREE_TYPE (*tp))
843 *walk_subtrees = 0;
845 else if (TREE_CODE (*tp) == INTEGER_CST)
846 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
847 TREE_INT_CST_HIGH (*tp));
848 else
850 *tp = copy_node (*tp);
851 TREE_TYPE (*tp) = new_type;
854 else
856 /* Otherwise, just copy the node. Note that copy_tree_r already
857 knows not to copy VAR_DECLs, etc., so this is safe. */
859 if (TREE_CODE (*tp) == MEM_REF)
861 /* We need to re-canonicalize MEM_REFs from inline substitutions
862 that can happen when a pointer argument is an ADDR_EXPR.
863 Recurse here manually to allow that. */
864 tree ptr = TREE_OPERAND (*tp, 0);
865 tree type = remap_type (TREE_TYPE (*tp), id);
866 tree old = *tp;
867 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
868 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
869 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
870 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
871 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
872 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
873 remapped a parameter as the property might be valid only
874 for the parameter itself. */
875 if (TREE_THIS_NOTRAP (old)
876 && (!is_parm (TREE_OPERAND (old, 0))
877 || (!id->transform_parameter && is_parm (ptr))))
878 TREE_THIS_NOTRAP (*tp) = 1;
879 *walk_subtrees = 0;
880 return NULL;
883 /* Here is the "usual case". Copy this tree node, and then
884 tweak some special cases. */
885 copy_tree_r (tp, walk_subtrees, NULL);
887 if (TREE_CODE (*tp) != OMP_CLAUSE)
888 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
890 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
892 /* The copied TARGET_EXPR has never been expanded, even if the
893 original node was expanded already. */
894 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
895 TREE_OPERAND (*tp, 3) = NULL_TREE;
897 else if (TREE_CODE (*tp) == ADDR_EXPR)
899 /* Variable substitution need not be simple. In particular,
900 the MEM_REF substitution above. Make sure that
901 TREE_CONSTANT and friends are up-to-date. */
902 int invariant = is_gimple_min_invariant (*tp);
903 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
904 recompute_tree_invariant_for_addr_expr (*tp);
906 /* If this used to be invariant, but is not any longer,
907 then regimplification is probably needed. */
908 if (invariant && !is_gimple_min_invariant (*tp))
909 id->regimplify = true;
911 *walk_subtrees = 0;
915 /* Update the TREE_BLOCK for the cloned expr. */
916 if (EXPR_P (*tp))
918 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
919 tree old_block = TREE_BLOCK (*tp);
920 if (old_block)
922 tree *n;
923 n = (tree *) pointer_map_contains (id->decl_map,
924 TREE_BLOCK (*tp));
925 if (n)
926 new_block = *n;
928 TREE_SET_BLOCK (*tp, new_block);
931 /* Keep iterating. */
932 return NULL_TREE;
936 /* Called from copy_body_id via walk_tree. DATA is really a
937 `copy_body_data *'. */
939 tree
940 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
942 copy_body_data *id = (copy_body_data *) data;
943 tree fn = id->src_fn;
944 tree new_block;
946 /* Begin by recognizing trees that we'll completely rewrite for the
947 inlining context. Our output for these trees is completely
948 different from out input (e.g. RETURN_EXPR is deleted, and morphs
949 into an edge). Further down, we'll handle trees that get
950 duplicated and/or tweaked. */
952 /* When requested, RETURN_EXPRs should be transformed to just the
953 contained MODIFY_EXPR. The branch semantics of the return will
954 be handled elsewhere by manipulating the CFG rather than a statement. */
955 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
957 tree assignment = TREE_OPERAND (*tp, 0);
959 /* If we're returning something, just turn that into an
960 assignment into the equivalent of the original RESULT_DECL.
961 If the "assignment" is just the result decl, the result
962 decl has already been set (e.g. a recent "foo (&result_decl,
963 ...)"); just toss the entire RETURN_EXPR. */
964 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
966 /* Replace the RETURN_EXPR with (a copy of) the
967 MODIFY_EXPR hanging underneath. */
968 *tp = copy_node (assignment);
970 else /* Else the RETURN_EXPR returns no value. */
972 *tp = NULL;
973 return (tree) (void *)1;
976 else if (TREE_CODE (*tp) == SSA_NAME)
978 *tp = remap_ssa_name (*tp, id);
979 *walk_subtrees = 0;
980 return NULL;
983 /* Local variables and labels need to be replaced by equivalent
984 variables. We don't want to copy static variables; there's only
985 one of those, no matter how many times we inline the containing
986 function. Similarly for globals from an outer function. */
987 else if (auto_var_in_fn_p (*tp, fn))
989 tree new_decl;
991 /* Remap the declaration. */
992 new_decl = remap_decl (*tp, id);
993 gcc_assert (new_decl);
994 /* Replace this variable with the copy. */
995 STRIP_TYPE_NOPS (new_decl);
996 *tp = new_decl;
997 *walk_subtrees = 0;
999 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1000 copy_statement_list (tp);
1001 else if (TREE_CODE (*tp) == SAVE_EXPR
1002 || TREE_CODE (*tp) == TARGET_EXPR)
1003 remap_save_expr (tp, id->decl_map, walk_subtrees);
1004 else if (TREE_CODE (*tp) == LABEL_DECL
1005 && (! DECL_CONTEXT (*tp)
1006 || decl_function_context (*tp) == id->src_fn))
1007 /* These may need to be remapped for EH handling. */
1008 *tp = remap_decl (*tp, id);
1009 else if (TREE_CODE (*tp) == BIND_EXPR)
1010 copy_bind_expr (tp, walk_subtrees, id);
1011 /* Types may need remapping as well. */
1012 else if (TYPE_P (*tp))
1013 *tp = remap_type (*tp, id);
1015 /* If this is a constant, we have to copy the node iff the type will be
1016 remapped. copy_tree_r will not copy a constant. */
1017 else if (CONSTANT_CLASS_P (*tp))
1019 tree new_type = remap_type (TREE_TYPE (*tp), id);
1021 if (new_type == TREE_TYPE (*tp))
1022 *walk_subtrees = 0;
1024 else if (TREE_CODE (*tp) == INTEGER_CST)
1025 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1026 TREE_INT_CST_HIGH (*tp));
1027 else
1029 *tp = copy_node (*tp);
1030 TREE_TYPE (*tp) = new_type;
1034 /* Otherwise, just copy the node. Note that copy_tree_r already
1035 knows not to copy VAR_DECLs, etc., so this is safe. */
1036 else
1038 /* Here we handle trees that are not completely rewritten.
1039 First we detect some inlining-induced bogosities for
1040 discarding. */
1041 if (TREE_CODE (*tp) == MODIFY_EXPR
1042 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1043 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1045 /* Some assignments VAR = VAR; don't generate any rtl code
1046 and thus don't count as variable modification. Avoid
1047 keeping bogosities like 0 = 0. */
1048 tree decl = TREE_OPERAND (*tp, 0), value;
1049 tree *n;
1051 n = (tree *) pointer_map_contains (id->decl_map, decl);
1052 if (n)
1054 value = *n;
1055 STRIP_TYPE_NOPS (value);
1056 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1058 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1059 return copy_tree_body_r (tp, walk_subtrees, data);
1063 else if (TREE_CODE (*tp) == INDIRECT_REF)
1065 /* Get rid of *& from inline substitutions that can happen when a
1066 pointer argument is an ADDR_EXPR. */
1067 tree decl = TREE_OPERAND (*tp, 0);
1068 tree *n = (tree *) pointer_map_contains (id->decl_map, decl);
1069 if (n)
1071 /* If we happen to get an ADDR_EXPR in n->value, strip
1072 it manually here as we'll eventually get ADDR_EXPRs
1073 which lie about their types pointed to. In this case
1074 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1075 but we absolutely rely on that. As fold_indirect_ref
1076 does other useful transformations, try that first, though. */
1077 tree type = TREE_TYPE (*tp);
1078 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1079 tree old = *tp;
1080 *tp = gimple_fold_indirect_ref (ptr);
1081 if (! *tp)
1083 if (TREE_CODE (ptr) == ADDR_EXPR)
1086 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1087 /* ??? We should either assert here or build
1088 a VIEW_CONVERT_EXPR instead of blindly leaking
1089 incompatible types to our IL. */
1090 if (! *tp)
1091 *tp = TREE_OPERAND (ptr, 0);
1093 else
1095 *tp = build1 (INDIRECT_REF, type, ptr);
1096 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1097 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1098 TREE_READONLY (*tp) = TREE_READONLY (old);
1099 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1100 have remapped a parameter as the property might be
1101 valid only for the parameter itself. */
1102 if (TREE_THIS_NOTRAP (old)
1103 && (!is_parm (TREE_OPERAND (old, 0))
1104 || (!id->transform_parameter && is_parm (ptr))))
1105 TREE_THIS_NOTRAP (*tp) = 1;
1108 *walk_subtrees = 0;
1109 return NULL;
1112 else if (TREE_CODE (*tp) == MEM_REF)
1114 /* We need to re-canonicalize MEM_REFs from inline substitutions
1115 that can happen when a pointer argument is an ADDR_EXPR.
1116 Recurse here manually to allow that. */
1117 tree ptr = TREE_OPERAND (*tp, 0);
1118 tree type = remap_type (TREE_TYPE (*tp), id);
1119 tree old = *tp;
1120 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1121 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1122 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1123 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1124 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1125 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1126 remapped a parameter as the property might be valid only
1127 for the parameter itself. */
1128 if (TREE_THIS_NOTRAP (old)
1129 && (!is_parm (TREE_OPERAND (old, 0))
1130 || (!id->transform_parameter && is_parm (ptr))))
1131 TREE_THIS_NOTRAP (*tp) = 1;
1132 *walk_subtrees = 0;
1133 return NULL;
1136 /* Here is the "usual case". Copy this tree node, and then
1137 tweak some special cases. */
1138 copy_tree_r (tp, walk_subtrees, NULL);
1140 /* If EXPR has block defined, map it to newly constructed block.
1141 When inlining we want EXPRs without block appear in the block
1142 of function call if we are not remapping a type. */
1143 if (EXPR_P (*tp))
1145 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1146 if (TREE_BLOCK (*tp))
1148 tree *n;
1149 n = (tree *) pointer_map_contains (id->decl_map,
1150 TREE_BLOCK (*tp));
1151 if (n)
1152 new_block = *n;
1154 TREE_SET_BLOCK (*tp, new_block);
1157 if (TREE_CODE (*tp) != OMP_CLAUSE)
1158 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1160 /* The copied TARGET_EXPR has never been expanded, even if the
1161 original node was expanded already. */
1162 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1164 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1165 TREE_OPERAND (*tp, 3) = NULL_TREE;
1168 /* Variable substitution need not be simple. In particular, the
1169 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1170 and friends are up-to-date. */
1171 else if (TREE_CODE (*tp) == ADDR_EXPR)
1173 int invariant = is_gimple_min_invariant (*tp);
1174 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1176 /* Handle the case where we substituted an INDIRECT_REF
1177 into the operand of the ADDR_EXPR. */
1178 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1179 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1180 else
1181 recompute_tree_invariant_for_addr_expr (*tp);
1183 /* If this used to be invariant, but is not any longer,
1184 then regimplification is probably needed. */
1185 if (invariant && !is_gimple_min_invariant (*tp))
1186 id->regimplify = true;
1188 *walk_subtrees = 0;
1192 /* Keep iterating. */
1193 return NULL_TREE;
1196 /* Helper for remap_gimple_stmt. Given an EH region number for the
1197 source function, map that to the duplicate EH region number in
1198 the destination function. */
1200 static int
1201 remap_eh_region_nr (int old_nr, copy_body_data *id)
1203 eh_region old_r, new_r;
1204 void **slot;
1206 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1207 slot = pointer_map_contains (id->eh_map, old_r);
1208 new_r = (eh_region) *slot;
1210 return new_r->index;
1213 /* Similar, but operate on INTEGER_CSTs. */
1215 static tree
1216 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1218 int old_nr, new_nr;
1220 old_nr = tree_low_cst (old_t_nr, 0);
1221 new_nr = remap_eh_region_nr (old_nr, id);
1223 return build_int_cst (integer_type_node, new_nr);
1226 /* Helper for copy_bb. Remap statement STMT using the inlining
1227 information in ID. Return the new statement copy. */
1229 static gimple
1230 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1232 gimple copy = NULL;
1233 struct walk_stmt_info wi;
1234 bool skip_first = false;
1236 /* Begin by recognizing trees that we'll completely rewrite for the
1237 inlining context. Our output for these trees is completely
1238 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1239 into an edge). Further down, we'll handle trees that get
1240 duplicated and/or tweaked. */
1242 /* When requested, GIMPLE_RETURNs should be transformed to just the
1243 contained GIMPLE_ASSIGN. The branch semantics of the return will
1244 be handled elsewhere by manipulating the CFG rather than the
1245 statement. */
1246 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1248 tree retval = gimple_return_retval (stmt);
1250 /* If we're returning something, just turn that into an
1251 assignment into the equivalent of the original RESULT_DECL.
1252 If RETVAL is just the result decl, the result decl has
1253 already been set (e.g. a recent "foo (&result_decl, ...)");
1254 just toss the entire GIMPLE_RETURN. */
1255 if (retval
1256 && (TREE_CODE (retval) != RESULT_DECL
1257 && (TREE_CODE (retval) != SSA_NAME
1258 || ! SSA_NAME_VAR (retval)
1259 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1261 copy = gimple_build_assign (id->retvar, retval);
1262 /* id->retvar is already substituted. Skip it on later remapping. */
1263 skip_first = true;
1265 else
1266 return gimple_build_nop ();
1268 else if (gimple_has_substatements (stmt))
1270 gimple_seq s1, s2;
1272 /* When cloning bodies from the C++ front end, we will be handed bodies
1273 in High GIMPLE form. Handle here all the High GIMPLE statements that
1274 have embedded statements. */
1275 switch (gimple_code (stmt))
1277 case GIMPLE_BIND:
1278 copy = copy_gimple_bind (stmt, id);
1279 break;
1281 case GIMPLE_CATCH:
1282 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1283 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1284 break;
1286 case GIMPLE_EH_FILTER:
1287 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1288 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1289 break;
1291 case GIMPLE_TRY:
1292 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1293 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1294 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1295 break;
1297 case GIMPLE_WITH_CLEANUP_EXPR:
1298 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1299 copy = gimple_build_wce (s1);
1300 break;
1302 case GIMPLE_OMP_PARALLEL:
1303 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1304 copy = gimple_build_omp_parallel
1305 (s1,
1306 gimple_omp_parallel_clauses (stmt),
1307 gimple_omp_parallel_child_fn (stmt),
1308 gimple_omp_parallel_data_arg (stmt));
1309 break;
1311 case GIMPLE_OMP_TASK:
1312 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1313 copy = gimple_build_omp_task
1314 (s1,
1315 gimple_omp_task_clauses (stmt),
1316 gimple_omp_task_child_fn (stmt),
1317 gimple_omp_task_data_arg (stmt),
1318 gimple_omp_task_copy_fn (stmt),
1319 gimple_omp_task_arg_size (stmt),
1320 gimple_omp_task_arg_align (stmt));
1321 break;
1323 case GIMPLE_OMP_FOR:
1324 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1325 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1326 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1327 gimple_omp_for_clauses (stmt),
1328 gimple_omp_for_collapse (stmt), s2);
1330 size_t i;
1331 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1333 gimple_omp_for_set_index (copy, i,
1334 gimple_omp_for_index (stmt, i));
1335 gimple_omp_for_set_initial (copy, i,
1336 gimple_omp_for_initial (stmt, i));
1337 gimple_omp_for_set_final (copy, i,
1338 gimple_omp_for_final (stmt, i));
1339 gimple_omp_for_set_incr (copy, i,
1340 gimple_omp_for_incr (stmt, i));
1341 gimple_omp_for_set_cond (copy, i,
1342 gimple_omp_for_cond (stmt, i));
1345 break;
1347 case GIMPLE_OMP_MASTER:
1348 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1349 copy = gimple_build_omp_master (s1);
1350 break;
1352 case GIMPLE_OMP_TASKGROUP:
1353 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1354 copy = gimple_build_omp_taskgroup (s1);
1355 break;
1357 case GIMPLE_OMP_ORDERED:
1358 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1359 copy = gimple_build_omp_ordered (s1);
1360 break;
1362 case GIMPLE_OMP_SECTION:
1363 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1364 copy = gimple_build_omp_section (s1);
1365 break;
1367 case GIMPLE_OMP_SECTIONS:
1368 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1369 copy = gimple_build_omp_sections
1370 (s1, gimple_omp_sections_clauses (stmt));
1371 break;
1373 case GIMPLE_OMP_SINGLE:
1374 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1375 copy = gimple_build_omp_single
1376 (s1, gimple_omp_single_clauses (stmt));
1377 break;
1379 case GIMPLE_OMP_TARGET:
1380 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1381 copy = gimple_build_omp_target
1382 (s1, gimple_omp_target_kind (stmt),
1383 gimple_omp_target_clauses (stmt));
1384 break;
1386 case GIMPLE_OMP_TEAMS:
1387 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1388 copy = gimple_build_omp_teams
1389 (s1, gimple_omp_teams_clauses (stmt));
1390 break;
1392 case GIMPLE_OMP_CRITICAL:
1393 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1394 copy
1395 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1396 break;
1398 case GIMPLE_TRANSACTION:
1399 s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1400 copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1401 gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1402 break;
1404 default:
1405 gcc_unreachable ();
1408 else
1410 if (gimple_assign_copy_p (stmt)
1411 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1412 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1414 /* Here we handle statements that are not completely rewritten.
1415 First we detect some inlining-induced bogosities for
1416 discarding. */
1418 /* Some assignments VAR = VAR; don't generate any rtl code
1419 and thus don't count as variable modification. Avoid
1420 keeping bogosities like 0 = 0. */
1421 tree decl = gimple_assign_lhs (stmt), value;
1422 tree *n;
1424 n = (tree *) pointer_map_contains (id->decl_map, decl);
1425 if (n)
1427 value = *n;
1428 STRIP_TYPE_NOPS (value);
1429 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1430 return gimple_build_nop ();
1434 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1435 in a block that we aren't copying during tree_function_versioning,
1436 just drop the clobber stmt. */
1437 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1439 tree lhs = gimple_assign_lhs (stmt);
1440 if (TREE_CODE (lhs) == MEM_REF
1441 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1443 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1444 if (gimple_bb (def_stmt)
1445 && !bitmap_bit_p (id->blocks_to_copy,
1446 gimple_bb (def_stmt)->index))
1447 return gimple_build_nop ();
1451 if (gimple_debug_bind_p (stmt))
1453 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1454 gimple_debug_bind_get_value (stmt),
1455 stmt);
1456 id->debug_stmts.safe_push (copy);
1457 return copy;
1459 if (gimple_debug_source_bind_p (stmt))
1461 copy = gimple_build_debug_source_bind
1462 (gimple_debug_source_bind_get_var (stmt),
1463 gimple_debug_source_bind_get_value (stmt), stmt);
1464 id->debug_stmts.safe_push (copy);
1465 return copy;
1468 /* Create a new deep copy of the statement. */
1469 copy = gimple_copy (stmt);
1471 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1472 RESX and EH_DISPATCH. */
1473 if (id->eh_map)
1474 switch (gimple_code (copy))
1476 case GIMPLE_CALL:
1478 tree r, fndecl = gimple_call_fndecl (copy);
1479 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1480 switch (DECL_FUNCTION_CODE (fndecl))
1482 case BUILT_IN_EH_COPY_VALUES:
1483 r = gimple_call_arg (copy, 1);
1484 r = remap_eh_region_tree_nr (r, id);
1485 gimple_call_set_arg (copy, 1, r);
1486 /* FALLTHRU */
1488 case BUILT_IN_EH_POINTER:
1489 case BUILT_IN_EH_FILTER:
1490 r = gimple_call_arg (copy, 0);
1491 r = remap_eh_region_tree_nr (r, id);
1492 gimple_call_set_arg (copy, 0, r);
1493 break;
1495 default:
1496 break;
1499 /* Reset alias info if we didn't apply measures to
1500 keep it valid over inlining by setting DECL_PT_UID. */
1501 if (!id->src_cfun->gimple_df
1502 || !id->src_cfun->gimple_df->ipa_pta)
1503 gimple_call_reset_alias_info (copy);
1505 break;
1507 case GIMPLE_RESX:
1509 int r = gimple_resx_region (copy);
1510 r = remap_eh_region_nr (r, id);
1511 gimple_resx_set_region (copy, r);
1513 break;
1515 case GIMPLE_EH_DISPATCH:
1517 int r = gimple_eh_dispatch_region (copy);
1518 r = remap_eh_region_nr (r, id);
1519 gimple_eh_dispatch_set_region (copy, r);
1521 break;
1523 default:
1524 break;
1528 /* If STMT has a block defined, map it to the newly constructed
1529 block. */
1530 if (gimple_block (copy))
1532 tree *n;
1533 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1534 gcc_assert (n);
1535 gimple_set_block (copy, *n);
1538 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1539 return copy;
1541 /* Remap all the operands in COPY. */
1542 memset (&wi, 0, sizeof (wi));
1543 wi.info = id;
1544 if (skip_first)
1545 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1546 else
1547 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1549 /* Clear the copied virtual operands. We are not remapping them here
1550 but are going to recreate them from scratch. */
1551 if (gimple_has_mem_ops (copy))
1553 gimple_set_vdef (copy, NULL_TREE);
1554 gimple_set_vuse (copy, NULL_TREE);
1557 return copy;
1561 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1562 later */
1564 static basic_block
1565 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1566 gcov_type count_scale)
1568 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1569 basic_block copy_basic_block;
1570 tree decl;
1571 gcov_type freq;
1572 basic_block prev;
1574 /* Search for previous copied basic block. */
1575 prev = bb->prev_bb;
1576 while (!prev->aux)
1577 prev = prev->prev_bb;
1579 /* create_basic_block() will append every new block to
1580 basic_block_info automatically. */
1581 copy_basic_block = create_basic_block (NULL, (void *) 0,
1582 (basic_block) prev->aux);
1583 copy_basic_block->count = apply_scale (bb->count, count_scale);
1585 /* We are going to rebuild frequencies from scratch. These values
1586 have just small importance to drive canonicalize_loop_headers. */
1587 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1589 /* We recompute frequencies after inlining, so this is quite safe. */
1590 if (freq > BB_FREQ_MAX)
1591 freq = BB_FREQ_MAX;
1592 copy_basic_block->frequency = freq;
1594 copy_gsi = gsi_start_bb (copy_basic_block);
1596 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1598 gimple stmt = gsi_stmt (gsi);
1599 gimple orig_stmt = stmt;
1601 id->regimplify = false;
1602 stmt = remap_gimple_stmt (stmt, id);
1603 if (gimple_nop_p (stmt))
1604 continue;
1606 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1607 seq_gsi = copy_gsi;
1609 /* With return slot optimization we can end up with
1610 non-gimple (foo *)&this->m, fix that here. */
1611 if (is_gimple_assign (stmt)
1612 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1613 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1615 tree new_rhs;
1616 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1617 gimple_assign_rhs1 (stmt),
1618 true, NULL, false,
1619 GSI_CONTINUE_LINKING);
1620 gimple_assign_set_rhs1 (stmt, new_rhs);
1621 id->regimplify = false;
1624 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1626 if (id->regimplify)
1627 gimple_regimplify_operands (stmt, &seq_gsi);
1629 /* If copy_basic_block has been empty at the start of this iteration,
1630 call gsi_start_bb again to get at the newly added statements. */
1631 if (gsi_end_p (copy_gsi))
1632 copy_gsi = gsi_start_bb (copy_basic_block);
1633 else
1634 gsi_next (&copy_gsi);
1636 /* Process the new statement. The call to gimple_regimplify_operands
1637 possibly turned the statement into multiple statements, we
1638 need to process all of them. */
1641 tree fn;
1643 stmt = gsi_stmt (copy_gsi);
1644 if (is_gimple_call (stmt)
1645 && gimple_call_va_arg_pack_p (stmt)
1646 && id->gimple_call)
1648 /* __builtin_va_arg_pack () should be replaced by
1649 all arguments corresponding to ... in the caller. */
1650 tree p;
1651 gimple new_call;
1652 vec<tree> argarray;
1653 size_t nargs = gimple_call_num_args (id->gimple_call);
1654 size_t n;
1656 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1657 nargs--;
1659 /* Create the new array of arguments. */
1660 n = nargs + gimple_call_num_args (stmt);
1661 argarray.create (n);
1662 argarray.safe_grow_cleared (n);
1664 /* Copy all the arguments before '...' */
1665 memcpy (argarray.address (),
1666 gimple_call_arg_ptr (stmt, 0),
1667 gimple_call_num_args (stmt) * sizeof (tree));
1669 /* Append the arguments passed in '...' */
1670 memcpy (argarray.address () + gimple_call_num_args (stmt),
1671 gimple_call_arg_ptr (id->gimple_call, 0)
1672 + (gimple_call_num_args (id->gimple_call) - nargs),
1673 nargs * sizeof (tree));
1675 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1676 argarray);
1678 argarray.release ();
1680 /* Copy all GIMPLE_CALL flags, location and block, except
1681 GF_CALL_VA_ARG_PACK. */
1682 gimple_call_copy_flags (new_call, stmt);
1683 gimple_call_set_va_arg_pack (new_call, false);
1684 gimple_set_location (new_call, gimple_location (stmt));
1685 gimple_set_block (new_call, gimple_block (stmt));
1686 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1688 gsi_replace (&copy_gsi, new_call, false);
1689 stmt = new_call;
1691 else if (is_gimple_call (stmt)
1692 && id->gimple_call
1693 && (decl = gimple_call_fndecl (stmt))
1694 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1695 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1697 /* __builtin_va_arg_pack_len () should be replaced by
1698 the number of anonymous arguments. */
1699 size_t nargs = gimple_call_num_args (id->gimple_call);
1700 tree count, p;
1701 gimple new_stmt;
1703 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1704 nargs--;
1706 count = build_int_cst (integer_type_node, nargs);
1707 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1708 gsi_replace (&copy_gsi, new_stmt, false);
1709 stmt = new_stmt;
1712 /* Statements produced by inlining can be unfolded, especially
1713 when we constant propagated some operands. We can't fold
1714 them right now for two reasons:
1715 1) folding require SSA_NAME_DEF_STMTs to be correct
1716 2) we can't change function calls to builtins.
1717 So we just mark statement for later folding. We mark
1718 all new statements, instead just statements that has changed
1719 by some nontrivial substitution so even statements made
1720 foldable indirectly are updated. If this turns out to be
1721 expensive, copy_body can be told to watch for nontrivial
1722 changes. */
1723 if (id->statements_to_fold)
1724 pointer_set_insert (id->statements_to_fold, stmt);
1726 /* We're duplicating a CALL_EXPR. Find any corresponding
1727 callgraph edges and update or duplicate them. */
1728 if (is_gimple_call (stmt))
1730 struct cgraph_edge *edge;
1731 int flags;
1733 switch (id->transform_call_graph_edges)
1735 case CB_CGE_DUPLICATE:
1736 edge = cgraph_edge (id->src_node, orig_stmt);
1737 if (edge)
1739 int edge_freq = edge->frequency;
1740 int new_freq;
1741 struct cgraph_edge *old_edge = edge;
1742 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1743 gimple_uid (stmt),
1744 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1745 true);
1746 /* We could also just rescale the frequency, but
1747 doing so would introduce roundoff errors and make
1748 verifier unhappy. */
1749 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
1750 copy_basic_block);
1752 /* Speculative calls consist of two edges - direct and indirect.
1753 Duplicate the whole thing and distribute frequencies accordingly. */
1754 if (edge->speculative)
1756 struct cgraph_edge *direct, *indirect;
1757 struct ipa_ref *ref;
1759 gcc_assert (!edge->indirect_unknown_callee);
1760 cgraph_speculative_call_info (old_edge, direct, indirect, ref);
1761 indirect = cgraph_clone_edge (indirect, id->dst_node, stmt,
1762 gimple_uid (stmt),
1763 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1764 true);
1765 if (old_edge->frequency + indirect->frequency)
1767 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
1768 (old_edge->frequency + indirect->frequency)),
1769 CGRAPH_FREQ_MAX);
1770 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
1771 (old_edge->frequency + indirect->frequency)),
1772 CGRAPH_FREQ_MAX);
1774 ipa_clone_ref (ref, id->dst_node, stmt);
1776 else
1778 edge->frequency = new_freq;
1779 if (dump_file
1780 && profile_status_for_function (cfun) != PROFILE_ABSENT
1781 && (edge_freq > edge->frequency + 10
1782 || edge_freq < edge->frequency - 10))
1784 fprintf (dump_file, "Edge frequency estimated by "
1785 "cgraph %i diverge from inliner's estimate %i\n",
1786 edge_freq,
1787 edge->frequency);
1788 fprintf (dump_file,
1789 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1790 bb->index,
1791 bb->frequency,
1792 copy_basic_block->frequency);
1796 break;
1798 case CB_CGE_MOVE_CLONES:
1799 cgraph_set_call_stmt_including_clones (id->dst_node,
1800 orig_stmt, stmt);
1801 edge = cgraph_edge (id->dst_node, stmt);
1802 break;
1804 case CB_CGE_MOVE:
1805 edge = cgraph_edge (id->dst_node, orig_stmt);
1806 if (edge)
1807 cgraph_set_call_stmt (edge, stmt);
1808 break;
1810 default:
1811 gcc_unreachable ();
1814 /* Constant propagation on argument done during inlining
1815 may create new direct call. Produce an edge for it. */
1816 if ((!edge
1817 || (edge->indirect_inlining_edge
1818 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1819 && id->dst_node->definition
1820 && (fn = gimple_call_fndecl (stmt)) != NULL)
1822 struct cgraph_node *dest = cgraph_get_node (fn);
1824 /* We have missing edge in the callgraph. This can happen
1825 when previous inlining turned an indirect call into a
1826 direct call by constant propagating arguments or we are
1827 producing dead clone (for further cloning). In all
1828 other cases we hit a bug (incorrect node sharing is the
1829 most common reason for missing edges). */
1830 gcc_assert (!dest->definition
1831 || dest->address_taken
1832 || !id->src_node->definition
1833 || !id->dst_node->definition);
1834 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1835 cgraph_create_edge_including_clones
1836 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1837 compute_call_stmt_bb_frequency (id->dst_node->decl,
1838 copy_basic_block),
1839 CIF_ORIGINALLY_INDIRECT_CALL);
1840 else
1841 cgraph_create_edge (id->dst_node, dest, stmt,
1842 bb->count,
1843 compute_call_stmt_bb_frequency
1844 (id->dst_node->decl,
1845 copy_basic_block))->inline_failed
1846 = CIF_ORIGINALLY_INDIRECT_CALL;
1847 if (dump_file)
1849 fprintf (dump_file, "Created new direct edge to %s\n",
1850 cgraph_node_name (dest));
1854 flags = gimple_call_flags (stmt);
1855 if (flags & ECF_MAY_BE_ALLOCA)
1856 cfun->calls_alloca = true;
1857 if (flags & ECF_RETURNS_TWICE)
1858 cfun->calls_setjmp = true;
1861 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1862 id->eh_map, id->eh_lp_nr);
1864 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1866 ssa_op_iter i;
1867 tree def;
1869 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1870 if (TREE_CODE (def) == SSA_NAME)
1871 SSA_NAME_DEF_STMT (def) = stmt;
1874 gsi_next (&copy_gsi);
1876 while (!gsi_end_p (copy_gsi));
1878 copy_gsi = gsi_last_bb (copy_basic_block);
1881 return copy_basic_block;
1884 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1885 form is quite easy, since dominator relationship for old basic blocks does
1886 not change.
1888 There is however exception where inlining might change dominator relation
1889 across EH edges from basic block within inlined functions destinating
1890 to landing pads in function we inline into.
1892 The function fills in PHI_RESULTs of such PHI nodes if they refer
1893 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1894 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1895 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1896 set, and this means that there will be no overlapping live ranges
1897 for the underlying symbol.
1899 This might change in future if we allow redirecting of EH edges and
1900 we might want to change way build CFG pre-inlining to include
1901 all the possible edges then. */
1902 static void
1903 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1904 bool can_throw, bool nonlocal_goto)
1906 edge e;
1907 edge_iterator ei;
1909 FOR_EACH_EDGE (e, ei, bb->succs)
1910 if (!e->dest->aux
1911 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1913 gimple phi;
1914 gimple_stmt_iterator si;
1916 if (!nonlocal_goto)
1917 gcc_assert (e->flags & EDGE_EH);
1919 if (!can_throw)
1920 gcc_assert (!(e->flags & EDGE_EH));
1922 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1924 edge re;
1926 phi = gsi_stmt (si);
1928 /* For abnormal goto/call edges the receiver can be the
1929 ENTRY_BLOCK. Do not assert this cannot happen. */
1931 gcc_assert ((e->flags & EDGE_EH)
1932 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1934 re = find_edge (ret_bb, e->dest);
1935 gcc_checking_assert (re);
1936 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1937 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1939 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1940 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1946 /* Copy edges from BB into its copy constructed earlier, scale profile
1947 accordingly. Edges will be taken care of later. Assume aux
1948 pointers to point to the copies of each BB. Return true if any
1949 debug stmts are left after a statement that must end the basic block. */
1951 static bool
1952 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
1953 bool can_make_abnormal_goto)
1955 basic_block new_bb = (basic_block) bb->aux;
1956 edge_iterator ei;
1957 edge old_edge;
1958 gimple_stmt_iterator si;
1959 int flags;
1960 bool need_debug_cleanup = false;
1962 /* Use the indices from the original blocks to create edges for the
1963 new ones. */
1964 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1965 if (!(old_edge->flags & EDGE_EH))
1967 edge new_edge;
1969 flags = old_edge->flags;
1971 /* Return edges do get a FALLTHRU flag when the get inlined. */
1972 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1973 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1974 flags |= EDGE_FALLTHRU;
1975 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1976 new_edge->count = apply_scale (old_edge->count, count_scale);
1977 new_edge->probability = old_edge->probability;
1980 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1981 return false;
1983 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1985 gimple copy_stmt;
1986 bool can_throw, nonlocal_goto;
1988 copy_stmt = gsi_stmt (si);
1989 if (!is_gimple_debug (copy_stmt))
1990 update_stmt (copy_stmt);
1992 /* Do this before the possible split_block. */
1993 gsi_next (&si);
1995 /* If this tree could throw an exception, there are two
1996 cases where we need to add abnormal edge(s): the
1997 tree wasn't in a region and there is a "current
1998 region" in the caller; or the original tree had
1999 EH edges. In both cases split the block after the tree,
2000 and add abnormal edge(s) as needed; we need both
2001 those from the callee and the caller.
2002 We check whether the copy can throw, because the const
2003 propagation can change an INDIRECT_REF which throws
2004 into a COMPONENT_REF which doesn't. If the copy
2005 can throw, the original could also throw. */
2006 can_throw = stmt_can_throw_internal (copy_stmt);
2007 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
2009 if (can_throw || nonlocal_goto)
2011 if (!gsi_end_p (si))
2013 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2014 gsi_next (&si);
2015 if (gsi_end_p (si))
2016 need_debug_cleanup = true;
2018 if (!gsi_end_p (si))
2019 /* Note that bb's predecessor edges aren't necessarily
2020 right at this point; split_block doesn't care. */
2022 edge e = split_block (new_bb, copy_stmt);
2024 new_bb = e->dest;
2025 new_bb->aux = e->src->aux;
2026 si = gsi_start_bb (new_bb);
2030 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2031 make_eh_dispatch_edges (copy_stmt);
2032 else if (can_throw)
2033 make_eh_edges (copy_stmt);
2035 /* If the call we inline cannot make abnormal goto do not add
2036 additional abnormal edges but only retain those already present
2037 in the original function body. */
2038 nonlocal_goto &= can_make_abnormal_goto;
2039 if (nonlocal_goto)
2040 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
2042 if ((can_throw || nonlocal_goto)
2043 && gimple_in_ssa_p (cfun))
2044 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2045 can_throw, nonlocal_goto);
2047 return need_debug_cleanup;
2050 /* Copy the PHIs. All blocks and edges are copied, some blocks
2051 was possibly split and new outgoing EH edges inserted.
2052 BB points to the block of original function and AUX pointers links
2053 the original and newly copied blocks. */
2055 static void
2056 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2058 basic_block const new_bb = (basic_block) bb->aux;
2059 edge_iterator ei;
2060 gimple phi;
2061 gimple_stmt_iterator si;
2062 edge new_edge;
2063 bool inserted = false;
2065 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2067 tree res, new_res;
2068 gimple new_phi;
2070 phi = gsi_stmt (si);
2071 res = PHI_RESULT (phi);
2072 new_res = res;
2073 if (!virtual_operand_p (res))
2075 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2076 new_phi = create_phi_node (new_res, new_bb);
2077 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2079 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2080 tree arg;
2081 tree new_arg;
2082 edge_iterator ei2;
2083 location_t locus;
2085 /* When doing partial cloning, we allow PHIs on the entry block
2086 as long as all the arguments are the same. Find any input
2087 edge to see argument to copy. */
2088 if (!old_edge)
2089 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2090 if (!old_edge->src->aux)
2091 break;
2093 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2094 new_arg = arg;
2095 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2096 gcc_assert (new_arg);
2097 /* With return slot optimization we can end up with
2098 non-gimple (foo *)&this->m, fix that here. */
2099 if (TREE_CODE (new_arg) != SSA_NAME
2100 && TREE_CODE (new_arg) != FUNCTION_DECL
2101 && !is_gimple_val (new_arg))
2103 gimple_seq stmts = NULL;
2104 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2105 gsi_insert_seq_on_edge (new_edge, stmts);
2106 inserted = true;
2108 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2109 if (LOCATION_BLOCK (locus))
2111 tree *n;
2112 n = (tree *) pointer_map_contains (id->decl_map,
2113 LOCATION_BLOCK (locus));
2114 gcc_assert (n);
2115 if (*n)
2116 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2117 else
2118 locus = LOCATION_LOCUS (locus);
2120 else
2121 locus = LOCATION_LOCUS (locus);
2123 add_phi_arg (new_phi, new_arg, new_edge, locus);
2128 /* Commit the delayed edge insertions. */
2129 if (inserted)
2130 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2131 gsi_commit_one_edge_insert (new_edge, NULL);
2135 /* Wrapper for remap_decl so it can be used as a callback. */
2137 static tree
2138 remap_decl_1 (tree decl, void *data)
2140 return remap_decl (decl, (copy_body_data *) data);
2143 /* Build struct function and associated datastructures for the new clone
2144 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2145 the cfun to the function of new_fndecl (and current_function_decl too). */
2147 static void
2148 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2150 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2151 gcov_type count_scale;
2153 if (!DECL_ARGUMENTS (new_fndecl))
2154 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2155 if (!DECL_RESULT (new_fndecl))
2156 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2158 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2159 count_scale
2160 = GCOV_COMPUTE_SCALE (count,
2161 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2162 else
2163 count_scale = REG_BR_PROB_BASE;
2165 /* Register specific tree functions. */
2166 gimple_register_cfg_hooks ();
2168 /* Get clean struct function. */
2169 push_struct_function (new_fndecl);
2171 /* We will rebuild these, so just sanity check that they are empty. */
2172 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2173 gcc_assert (cfun->local_decls == NULL);
2174 gcc_assert (cfun->cfg == NULL);
2175 gcc_assert (cfun->decl == new_fndecl);
2177 /* Copy items we preserve during cloning. */
2178 cfun->static_chain_decl = src_cfun->static_chain_decl;
2179 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2180 cfun->function_end_locus = src_cfun->function_end_locus;
2181 cfun->curr_properties = src_cfun->curr_properties;
2182 cfun->last_verified = src_cfun->last_verified;
2183 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2184 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2185 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2186 cfun->stdarg = src_cfun->stdarg;
2187 cfun->after_inlining = src_cfun->after_inlining;
2188 cfun->can_throw_non_call_exceptions
2189 = src_cfun->can_throw_non_call_exceptions;
2190 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2191 cfun->returns_struct = src_cfun->returns_struct;
2192 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2194 init_empty_tree_cfg ();
2196 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2197 ENTRY_BLOCK_PTR->count =
2198 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2199 REG_BR_PROB_BASE);
2200 ENTRY_BLOCK_PTR->frequency
2201 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2202 EXIT_BLOCK_PTR->count =
2203 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2204 REG_BR_PROB_BASE);
2205 EXIT_BLOCK_PTR->frequency =
2206 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2207 if (src_cfun->eh)
2208 init_eh_for_function ();
2210 if (src_cfun->gimple_df)
2212 init_tree_ssa (cfun);
2213 cfun->gimple_df->in_ssa_p = true;
2214 init_ssa_operands (cfun);
2218 /* Helper function for copy_cfg_body. Move debug stmts from the end
2219 of NEW_BB to the beginning of successor basic blocks when needed. If the
2220 successor has multiple predecessors, reset them, otherwise keep
2221 their value. */
2223 static void
2224 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2226 edge e;
2227 edge_iterator ei;
2228 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2230 if (gsi_end_p (si)
2231 || gsi_one_before_end_p (si)
2232 || !(stmt_can_throw_internal (gsi_stmt (si))
2233 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2234 return;
2236 FOR_EACH_EDGE (e, ei, new_bb->succs)
2238 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2239 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2240 while (is_gimple_debug (gsi_stmt (ssi)))
2242 gimple stmt = gsi_stmt (ssi), new_stmt;
2243 tree var;
2244 tree value;
2246 /* For the last edge move the debug stmts instead of copying
2247 them. */
2248 if (ei_one_before_end_p (ei))
2250 si = ssi;
2251 gsi_prev (&ssi);
2252 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2253 gimple_debug_bind_reset_value (stmt);
2254 gsi_remove (&si, false);
2255 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2256 continue;
2259 if (gimple_debug_bind_p (stmt))
2261 var = gimple_debug_bind_get_var (stmt);
2262 if (single_pred_p (e->dest))
2264 value = gimple_debug_bind_get_value (stmt);
2265 value = unshare_expr (value);
2267 else
2268 value = NULL_TREE;
2269 new_stmt = gimple_build_debug_bind (var, value, stmt);
2271 else if (gimple_debug_source_bind_p (stmt))
2273 var = gimple_debug_source_bind_get_var (stmt);
2274 value = gimple_debug_source_bind_get_value (stmt);
2275 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2277 else
2278 gcc_unreachable ();
2279 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2280 id->debug_stmts.safe_push (new_stmt);
2281 gsi_prev (&ssi);
2286 /* Make a copy of the sub-loops of SRC_PARENT and place them
2287 as siblings of DEST_PARENT. */
2289 static void
2290 copy_loops (copy_body_data *id,
2291 struct loop *dest_parent, struct loop *src_parent)
2293 struct loop *src_loop = src_parent->inner;
2294 while (src_loop)
2296 if (!id->blocks_to_copy
2297 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2299 struct loop *dest_loop = alloc_loop ();
2301 /* Assign the new loop its header and latch and associate
2302 those with the new loop. */
2303 if (src_loop->header != NULL)
2305 dest_loop->header = (basic_block)src_loop->header->aux;
2306 dest_loop->header->loop_father = dest_loop;
2308 if (src_loop->latch != NULL)
2310 dest_loop->latch = (basic_block)src_loop->latch->aux;
2311 dest_loop->latch->loop_father = dest_loop;
2314 /* Copy loop meta-data. */
2315 copy_loop_info (src_loop, dest_loop);
2317 /* Finally place it into the loop array and the loop tree. */
2318 place_new_loop (cfun, dest_loop);
2319 flow_loop_tree_node_add (dest_parent, dest_loop);
2321 if (src_loop->simduid)
2323 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2324 cfun->has_simduid_loops = true;
2326 if (src_loop->force_vect)
2328 dest_loop->force_vect = true;
2329 cfun->has_force_vect_loops = true;
2332 /* Recurse. */
2333 copy_loops (id, dest_loop, src_loop);
2335 src_loop = src_loop->next;
2339 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2341 void
2342 redirect_all_calls (copy_body_data * id, basic_block bb)
2344 gimple_stmt_iterator si;
2345 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2347 if (is_gimple_call (gsi_stmt (si)))
2349 struct cgraph_edge *edge = cgraph_edge (id->dst_node, gsi_stmt (si));
2350 if (edge)
2351 cgraph_redirect_edge_call_stmt_to_callee (edge);
2356 /* Make a copy of the body of FN so that it can be inserted inline in
2357 another function. Walks FN via CFG, returns new fndecl. */
2359 static tree
2360 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2361 basic_block entry_block_map, basic_block exit_block_map,
2362 basic_block new_entry)
2364 tree callee_fndecl = id->src_fn;
2365 /* Original cfun for the callee, doesn't change. */
2366 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2367 struct function *cfun_to_copy;
2368 basic_block bb;
2369 tree new_fndecl = NULL;
2370 bool need_debug_cleanup = false;
2371 gcov_type count_scale;
2372 int last;
2373 int incoming_frequency = 0;
2374 gcov_type incoming_count = 0;
2376 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2377 count_scale
2378 = GCOV_COMPUTE_SCALE (count,
2379 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2380 else
2381 count_scale = REG_BR_PROB_BASE;
2383 /* Register specific tree functions. */
2384 gimple_register_cfg_hooks ();
2386 /* If we are inlining just region of the function, make sure to connect new entry
2387 to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute
2388 frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
2389 probabilities of edges incoming from nonduplicated region. */
2390 if (new_entry)
2392 edge e;
2393 edge_iterator ei;
2395 FOR_EACH_EDGE (e, ei, new_entry->preds)
2396 if (!e->src->aux)
2398 incoming_frequency += EDGE_FREQUENCY (e);
2399 incoming_count += e->count;
2401 incoming_count = apply_scale (incoming_count, count_scale);
2402 incoming_frequency
2403 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2404 ENTRY_BLOCK_PTR->count = incoming_count;
2405 ENTRY_BLOCK_PTR->frequency = incoming_frequency;
2408 /* Must have a CFG here at this point. */
2409 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2410 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2412 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2414 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2415 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2416 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2417 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2419 /* Duplicate any exception-handling regions. */
2420 if (cfun->eh)
2421 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2422 remap_decl_1, id);
2424 /* Use aux pointers to map the original blocks to copy. */
2425 FOR_EACH_BB_FN (bb, cfun_to_copy)
2426 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2428 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2429 bb->aux = new_bb;
2430 new_bb->aux = bb;
2431 new_bb->loop_father = entry_block_map->loop_father;
2434 last = last_basic_block;
2436 /* Now that we've duplicated the blocks, duplicate their edges. */
2437 bool can_make_abormal_goto
2438 = id->gimple_call && stmt_can_make_abnormal_goto (id->gimple_call);
2439 FOR_ALL_BB_FN (bb, cfun_to_copy)
2440 if (!id->blocks_to_copy
2441 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2442 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2443 can_make_abormal_goto);
2445 if (new_entry)
2447 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2448 e->probability = REG_BR_PROB_BASE;
2449 e->count = incoming_count;
2452 /* Duplicate the loop tree, if available and wanted. */
2453 if (loops_for_fn (src_cfun) != NULL
2454 && current_loops != NULL)
2456 copy_loops (id, entry_block_map->loop_father,
2457 get_loop (src_cfun, 0));
2458 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2459 loops_state_set (LOOPS_NEED_FIXUP);
2462 /* If the loop tree in the source function needed fixup, mark the
2463 destination loop tree for fixup, too. */
2464 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2465 loops_state_set (LOOPS_NEED_FIXUP);
2467 if (gimple_in_ssa_p (cfun))
2468 FOR_ALL_BB_FN (bb, cfun_to_copy)
2469 if (!id->blocks_to_copy
2470 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2471 copy_phis_for_bb (bb, id);
2473 FOR_ALL_BB_FN (bb, cfun_to_copy)
2474 if (bb->aux)
2476 if (need_debug_cleanup
2477 && bb->index != ENTRY_BLOCK
2478 && bb->index != EXIT_BLOCK)
2479 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2480 /* Update call edge destinations. This can not be done before loop
2481 info is updated, because we may split basic blocks. */
2482 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2483 redirect_all_calls (id, (basic_block)bb->aux);
2484 ((basic_block)bb->aux)->aux = NULL;
2485 bb->aux = NULL;
2488 /* Zero out AUX fields of newly created block during EH edge
2489 insertion. */
2490 for (; last < last_basic_block; last++)
2492 if (need_debug_cleanup)
2493 maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2494 BASIC_BLOCK (last)->aux = NULL;
2495 /* Update call edge destinations. This can not be done before loop
2496 info is updated, because we may split basic blocks. */
2497 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2498 redirect_all_calls (id, BASIC_BLOCK (last));
2500 entry_block_map->aux = NULL;
2501 exit_block_map->aux = NULL;
2503 if (id->eh_map)
2505 pointer_map_destroy (id->eh_map);
2506 id->eh_map = NULL;
2509 return new_fndecl;
2512 /* Copy the debug STMT using ID. We deal with these statements in a
2513 special way: if any variable in their VALUE expression wasn't
2514 remapped yet, we won't remap it, because that would get decl uids
2515 out of sync, causing codegen differences between -g and -g0. If
2516 this arises, we drop the VALUE expression altogether. */
2518 static void
2519 copy_debug_stmt (gimple stmt, copy_body_data *id)
2521 tree t, *n;
2522 struct walk_stmt_info wi;
2524 if (gimple_block (stmt))
2526 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2527 gimple_set_block (stmt, n ? *n : id->block);
2530 /* Remap all the operands in COPY. */
2531 memset (&wi, 0, sizeof (wi));
2532 wi.info = id;
2534 processing_debug_stmt = 1;
2536 if (gimple_debug_source_bind_p (stmt))
2537 t = gimple_debug_source_bind_get_var (stmt);
2538 else
2539 t = gimple_debug_bind_get_var (stmt);
2541 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2542 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2544 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2545 t = *n;
2547 else if (TREE_CODE (t) == VAR_DECL
2548 && !is_global_var (t)
2549 && !pointer_map_contains (id->decl_map, t))
2550 /* T is a non-localized variable. */;
2551 else
2552 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2554 if (gimple_debug_bind_p (stmt))
2556 gimple_debug_bind_set_var (stmt, t);
2558 if (gimple_debug_bind_has_value_p (stmt))
2559 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2560 remap_gimple_op_r, &wi, NULL);
2562 /* Punt if any decl couldn't be remapped. */
2563 if (processing_debug_stmt < 0)
2564 gimple_debug_bind_reset_value (stmt);
2566 else if (gimple_debug_source_bind_p (stmt))
2568 gimple_debug_source_bind_set_var (stmt, t);
2569 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2570 remap_gimple_op_r, &wi, NULL);
2571 /* When inlining and source bind refers to one of the optimized
2572 away parameters, change the source bind into normal debug bind
2573 referring to the corresponding DEBUG_EXPR_DECL that should have
2574 been bound before the call stmt. */
2575 t = gimple_debug_source_bind_get_value (stmt);
2576 if (t != NULL_TREE
2577 && TREE_CODE (t) == PARM_DECL
2578 && id->gimple_call)
2580 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2581 unsigned int i;
2582 if (debug_args != NULL)
2584 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2585 if ((**debug_args)[i] == DECL_ORIGIN (t)
2586 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2588 t = (**debug_args)[i + 1];
2589 stmt->gsbase.subcode = GIMPLE_DEBUG_BIND;
2590 gimple_debug_bind_set_value (stmt, t);
2591 break;
2597 processing_debug_stmt = 0;
2599 update_stmt (stmt);
2602 /* Process deferred debug stmts. In order to give values better odds
2603 of being successfully remapped, we delay the processing of debug
2604 stmts until all other stmts that might require remapping are
2605 processed. */
2607 static void
2608 copy_debug_stmts (copy_body_data *id)
2610 size_t i;
2611 gimple stmt;
2613 if (!id->debug_stmts.exists ())
2614 return;
2616 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2617 copy_debug_stmt (stmt, id);
2619 id->debug_stmts.release ();
2622 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2623 another function. */
2625 static tree
2626 copy_tree_body (copy_body_data *id)
2628 tree fndecl = id->src_fn;
2629 tree body = DECL_SAVED_TREE (fndecl);
2631 walk_tree (&body, copy_tree_body_r, id, NULL);
2633 return body;
2636 /* Make a copy of the body of FN so that it can be inserted inline in
2637 another function. */
2639 static tree
2640 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2641 basic_block entry_block_map, basic_block exit_block_map,
2642 basic_block new_entry)
2644 tree fndecl = id->src_fn;
2645 tree body;
2647 /* If this body has a CFG, walk CFG and copy. */
2648 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2649 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2650 new_entry);
2651 copy_debug_stmts (id);
2653 return body;
2656 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2657 defined in function FN, or of a data member thereof. */
2659 static bool
2660 self_inlining_addr_expr (tree value, tree fn)
2662 tree var;
2664 if (TREE_CODE (value) != ADDR_EXPR)
2665 return false;
2667 var = get_base_address (TREE_OPERAND (value, 0));
2669 return var && auto_var_in_fn_p (var, fn);
2672 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2673 lexical block and line number information from base_stmt, if given,
2674 or from the last stmt of the block otherwise. */
2676 static gimple
2677 insert_init_debug_bind (copy_body_data *id,
2678 basic_block bb, tree var, tree value,
2679 gimple base_stmt)
2681 gimple note;
2682 gimple_stmt_iterator gsi;
2683 tree tracked_var;
2685 if (!gimple_in_ssa_p (id->src_cfun))
2686 return NULL;
2688 if (!MAY_HAVE_DEBUG_STMTS)
2689 return NULL;
2691 tracked_var = target_for_debug_bind (var);
2692 if (!tracked_var)
2693 return NULL;
2695 if (bb)
2697 gsi = gsi_last_bb (bb);
2698 if (!base_stmt && !gsi_end_p (gsi))
2699 base_stmt = gsi_stmt (gsi);
2702 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2704 if (bb)
2706 if (!gsi_end_p (gsi))
2707 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2708 else
2709 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2712 return note;
2715 static void
2716 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2718 /* If VAR represents a zero-sized variable, it's possible that the
2719 assignment statement may result in no gimple statements. */
2720 if (init_stmt)
2722 gimple_stmt_iterator si = gsi_last_bb (bb);
2724 /* We can end up with init statements that store to a non-register
2725 from a rhs with a conversion. Handle that here by forcing the
2726 rhs into a temporary. gimple_regimplify_operands is not
2727 prepared to do this for us. */
2728 if (!is_gimple_debug (init_stmt)
2729 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2730 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2731 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2733 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2734 gimple_expr_type (init_stmt),
2735 gimple_assign_rhs1 (init_stmt));
2736 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2737 GSI_NEW_STMT);
2738 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2739 gimple_assign_set_rhs1 (init_stmt, rhs);
2741 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2742 gimple_regimplify_operands (init_stmt, &si);
2744 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2746 tree def = gimple_assign_lhs (init_stmt);
2747 insert_init_debug_bind (id, bb, def, def, init_stmt);
2752 /* Initialize parameter P with VALUE. If needed, produce init statement
2753 at the end of BB. When BB is NULL, we return init statement to be
2754 output later. */
2755 static gimple
2756 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2757 basic_block bb, tree *vars)
2759 gimple init_stmt = NULL;
2760 tree var;
2761 tree rhs = value;
2762 tree def = (gimple_in_ssa_p (cfun)
2763 ? ssa_default_def (id->src_cfun, p) : NULL);
2765 if (value
2766 && value != error_mark_node
2767 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2769 /* If we can match up types by promotion/demotion do so. */
2770 if (fold_convertible_p (TREE_TYPE (p), value))
2771 rhs = fold_convert (TREE_TYPE (p), value);
2772 else
2774 /* ??? For valid programs we should not end up here.
2775 Still if we end up with truly mismatched types here, fall back
2776 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2777 GIMPLE to the following passes. */
2778 if (!is_gimple_reg_type (TREE_TYPE (value))
2779 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2780 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2781 else
2782 rhs = build_zero_cst (TREE_TYPE (p));
2786 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2787 here since the type of this decl must be visible to the calling
2788 function. */
2789 var = copy_decl_to_var (p, id);
2791 /* Declare this new variable. */
2792 DECL_CHAIN (var) = *vars;
2793 *vars = var;
2795 /* Make gimplifier happy about this variable. */
2796 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2798 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2799 we would not need to create a new variable here at all, if it
2800 weren't for debug info. Still, we can just use the argument
2801 value. */
2802 if (TREE_READONLY (p)
2803 && !TREE_ADDRESSABLE (p)
2804 && value && !TREE_SIDE_EFFECTS (value)
2805 && !def)
2807 /* We may produce non-gimple trees by adding NOPs or introduce
2808 invalid sharing when operand is not really constant.
2809 It is not big deal to prohibit constant propagation here as
2810 we will constant propagate in DOM1 pass anyway. */
2811 if (is_gimple_min_invariant (value)
2812 && useless_type_conversion_p (TREE_TYPE (p),
2813 TREE_TYPE (value))
2814 /* We have to be very careful about ADDR_EXPR. Make sure
2815 the base variable isn't a local variable of the inlined
2816 function, e.g., when doing recursive inlining, direct or
2817 mutually-recursive or whatever, which is why we don't
2818 just test whether fn == current_function_decl. */
2819 && ! self_inlining_addr_expr (value, fn))
2821 insert_decl_map (id, p, value);
2822 insert_debug_decl_map (id, p, var);
2823 return insert_init_debug_bind (id, bb, var, value, NULL);
2827 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2828 that way, when the PARM_DECL is encountered, it will be
2829 automatically replaced by the VAR_DECL. */
2830 insert_decl_map (id, p, var);
2832 /* Even if P was TREE_READONLY, the new VAR should not be.
2833 In the original code, we would have constructed a
2834 temporary, and then the function body would have never
2835 changed the value of P. However, now, we will be
2836 constructing VAR directly. The constructor body may
2837 change its value multiple times as it is being
2838 constructed. Therefore, it must not be TREE_READONLY;
2839 the back-end assumes that TREE_READONLY variable is
2840 assigned to only once. */
2841 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2842 TREE_READONLY (var) = 0;
2844 /* If there is no setup required and we are in SSA, take the easy route
2845 replacing all SSA names representing the function parameter by the
2846 SSA name passed to function.
2848 We need to construct map for the variable anyway as it might be used
2849 in different SSA names when parameter is set in function.
2851 Do replacement at -O0 for const arguments replaced by constant.
2852 This is important for builtin_constant_p and other construct requiring
2853 constant argument to be visible in inlined function body. */
2854 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2855 && (optimize
2856 || (TREE_READONLY (p)
2857 && is_gimple_min_invariant (rhs)))
2858 && (TREE_CODE (rhs) == SSA_NAME
2859 || is_gimple_min_invariant (rhs))
2860 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2862 insert_decl_map (id, def, rhs);
2863 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2866 /* If the value of argument is never used, don't care about initializing
2867 it. */
2868 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2870 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2871 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2874 /* Initialize this VAR_DECL from the equivalent argument. Convert
2875 the argument to the proper type in case it was promoted. */
2876 if (value)
2878 if (rhs == error_mark_node)
2880 insert_decl_map (id, p, var);
2881 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2884 STRIP_USELESS_TYPE_CONVERSION (rhs);
2886 /* If we are in SSA form properly remap the default definition
2887 or assign to a dummy SSA name if the parameter is unused and
2888 we are not optimizing. */
2889 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2891 if (def)
2893 def = remap_ssa_name (def, id);
2894 init_stmt = gimple_build_assign (def, rhs);
2895 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2896 set_ssa_default_def (cfun, var, NULL);
2898 else if (!optimize)
2900 def = make_ssa_name (var, NULL);
2901 init_stmt = gimple_build_assign (def, rhs);
2904 else
2905 init_stmt = gimple_build_assign (var, rhs);
2907 if (bb && init_stmt)
2908 insert_init_stmt (id, bb, init_stmt);
2910 return init_stmt;
2913 /* Generate code to initialize the parameters of the function at the
2914 top of the stack in ID from the GIMPLE_CALL STMT. */
2916 static void
2917 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2918 tree fn, basic_block bb)
2920 tree parms;
2921 size_t i;
2922 tree p;
2923 tree vars = NULL_TREE;
2924 tree static_chain = gimple_call_chain (stmt);
2926 /* Figure out what the parameters are. */
2927 parms = DECL_ARGUMENTS (fn);
2929 /* Loop through the parameter declarations, replacing each with an
2930 equivalent VAR_DECL, appropriately initialized. */
2931 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2933 tree val;
2934 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2935 setup_one_parameter (id, p, val, fn, bb, &vars);
2937 /* After remapping parameters remap their types. This has to be done
2938 in a second loop over all parameters to appropriately remap
2939 variable sized arrays when the size is specified in a
2940 parameter following the array. */
2941 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2943 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
2944 if (varp
2945 && TREE_CODE (*varp) == VAR_DECL)
2947 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
2948 ? ssa_default_def (id->src_cfun, p) : NULL);
2949 tree var = *varp;
2950 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
2951 /* Also remap the default definition if it was remapped
2952 to the default definition of the parameter replacement
2953 by the parameter setup. */
2954 if (def)
2956 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
2957 if (defp
2958 && TREE_CODE (*defp) == SSA_NAME
2959 && SSA_NAME_VAR (*defp) == var)
2960 TREE_TYPE (*defp) = TREE_TYPE (var);
2965 /* Initialize the static chain. */
2966 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2967 gcc_assert (fn != current_function_decl);
2968 if (p)
2970 /* No static chain? Seems like a bug in tree-nested.c. */
2971 gcc_assert (static_chain);
2973 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2976 declare_inline_vars (id->block, vars);
2980 /* Declare a return variable to replace the RESULT_DECL for the
2981 function we are calling. An appropriate DECL_STMT is returned.
2982 The USE_STMT is filled to contain a use of the declaration to
2983 indicate the return value of the function.
2985 RETURN_SLOT, if non-null is place where to store the result. It
2986 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2987 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2989 The return value is a (possibly null) value that holds the result
2990 as seen by the caller. */
2992 static tree
2993 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2994 basic_block entry_bb)
2996 tree callee = id->src_fn;
2997 tree result = DECL_RESULT (callee);
2998 tree callee_type = TREE_TYPE (result);
2999 tree caller_type;
3000 tree var, use;
3002 /* Handle type-mismatches in the function declaration return type
3003 vs. the call expression. */
3004 if (modify_dest)
3005 caller_type = TREE_TYPE (modify_dest);
3006 else
3007 caller_type = TREE_TYPE (TREE_TYPE (callee));
3009 /* We don't need to do anything for functions that don't return anything. */
3010 if (VOID_TYPE_P (callee_type))
3011 return NULL_TREE;
3013 /* If there was a return slot, then the return value is the
3014 dereferenced address of that object. */
3015 if (return_slot)
3017 /* The front end shouldn't have used both return_slot and
3018 a modify expression. */
3019 gcc_assert (!modify_dest);
3020 if (DECL_BY_REFERENCE (result))
3022 tree return_slot_addr = build_fold_addr_expr (return_slot);
3023 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3025 /* We are going to construct *&return_slot and we can't do that
3026 for variables believed to be not addressable.
3028 FIXME: This check possibly can match, because values returned
3029 via return slot optimization are not believed to have address
3030 taken by alias analysis. */
3031 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3032 var = return_slot_addr;
3034 else
3036 var = return_slot;
3037 gcc_assert (TREE_CODE (var) != SSA_NAME);
3038 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
3040 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3041 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3042 && !DECL_GIMPLE_REG_P (result)
3043 && DECL_P (var))
3044 DECL_GIMPLE_REG_P (var) = 0;
3045 use = NULL;
3046 goto done;
3049 /* All types requiring non-trivial constructors should have been handled. */
3050 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3052 /* Attempt to avoid creating a new temporary variable. */
3053 if (modify_dest
3054 && TREE_CODE (modify_dest) != SSA_NAME)
3056 bool use_it = false;
3058 /* We can't use MODIFY_DEST if there's type promotion involved. */
3059 if (!useless_type_conversion_p (callee_type, caller_type))
3060 use_it = false;
3062 /* ??? If we're assigning to a variable sized type, then we must
3063 reuse the destination variable, because we've no good way to
3064 create variable sized temporaries at this point. */
3065 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3066 use_it = true;
3068 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3069 reuse it as the result of the call directly. Don't do this if
3070 it would promote MODIFY_DEST to addressable. */
3071 else if (TREE_ADDRESSABLE (result))
3072 use_it = false;
3073 else
3075 tree base_m = get_base_address (modify_dest);
3077 /* If the base isn't a decl, then it's a pointer, and we don't
3078 know where that's going to go. */
3079 if (!DECL_P (base_m))
3080 use_it = false;
3081 else if (is_global_var (base_m))
3082 use_it = false;
3083 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3084 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3085 && !DECL_GIMPLE_REG_P (result)
3086 && DECL_GIMPLE_REG_P (base_m))
3087 use_it = false;
3088 else if (!TREE_ADDRESSABLE (base_m))
3089 use_it = true;
3092 if (use_it)
3094 var = modify_dest;
3095 use = NULL;
3096 goto done;
3100 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3102 var = copy_result_decl_to_var (result, id);
3103 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3105 /* Do not have the rest of GCC warn about this variable as it should
3106 not be visible to the user. */
3107 TREE_NO_WARNING (var) = 1;
3109 declare_inline_vars (id->block, var);
3111 /* Build the use expr. If the return type of the function was
3112 promoted, convert it back to the expected type. */
3113 use = var;
3114 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3116 /* If we can match up types by promotion/demotion do so. */
3117 if (fold_convertible_p (caller_type, var))
3118 use = fold_convert (caller_type, var);
3119 else
3121 /* ??? For valid programs we should not end up here.
3122 Still if we end up with truly mismatched types here, fall back
3123 to using a MEM_REF to not leak invalid GIMPLE to the following
3124 passes. */
3125 /* Prevent var from being written into SSA form. */
3126 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3127 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3128 DECL_GIMPLE_REG_P (var) = false;
3129 else if (is_gimple_reg_type (TREE_TYPE (var)))
3130 TREE_ADDRESSABLE (var) = true;
3131 use = fold_build2 (MEM_REF, caller_type,
3132 build_fold_addr_expr (var),
3133 build_int_cst (ptr_type_node, 0));
3137 STRIP_USELESS_TYPE_CONVERSION (use);
3139 if (DECL_BY_REFERENCE (result))
3141 TREE_ADDRESSABLE (var) = 1;
3142 var = build_fold_addr_expr (var);
3145 done:
3146 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3147 way, when the RESULT_DECL is encountered, it will be
3148 automatically replaced by the VAR_DECL.
3150 When returning by reference, ensure that RESULT_DECL remaps to
3151 gimple_val. */
3152 if (DECL_BY_REFERENCE (result)
3153 && !is_gimple_val (var))
3155 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3156 insert_decl_map (id, result, temp);
3157 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3158 it's default_def SSA_NAME. */
3159 if (gimple_in_ssa_p (id->src_cfun)
3160 && is_gimple_reg (result))
3162 temp = make_ssa_name (temp, NULL);
3163 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3165 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3167 else
3168 insert_decl_map (id, result, var);
3170 /* Remember this so we can ignore it in remap_decls. */
3171 id->retvar = var;
3173 return use;
3176 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3177 to a local label. */
3179 static tree
3180 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3182 tree node = *nodep;
3183 tree fn = (tree) fnp;
3185 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3186 return node;
3188 if (TYPE_P (node))
3189 *walk_subtrees = 0;
3191 return NULL_TREE;
3194 /* Determine if the function can be copied. If so return NULL. If
3195 not return a string describng the reason for failure. */
3197 static const char *
3198 copy_forbidden (struct function *fun, tree fndecl)
3200 const char *reason = fun->cannot_be_copied_reason;
3201 tree decl;
3202 unsigned ix;
3204 /* Only examine the function once. */
3205 if (fun->cannot_be_copied_set)
3206 return reason;
3208 /* We cannot copy a function that receives a non-local goto
3209 because we cannot remap the destination label used in the
3210 function that is performing the non-local goto. */
3211 /* ??? Actually, this should be possible, if we work at it.
3212 No doubt there's just a handful of places that simply
3213 assume it doesn't happen and don't substitute properly. */
3214 if (fun->has_nonlocal_label)
3216 reason = G_("function %q+F can never be copied "
3217 "because it receives a non-local goto");
3218 goto fail;
3221 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3222 if (TREE_CODE (decl) == VAR_DECL
3223 && TREE_STATIC (decl)
3224 && !DECL_EXTERNAL (decl)
3225 && DECL_INITIAL (decl)
3226 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3227 has_label_address_in_static_1,
3228 fndecl))
3230 reason = G_("function %q+F can never be copied because it saves "
3231 "address of local label in a static variable");
3232 goto fail;
3235 fail:
3236 fun->cannot_be_copied_reason = reason;
3237 fun->cannot_be_copied_set = true;
3238 return reason;
3242 static const char *inline_forbidden_reason;
3244 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3245 iff a function can not be inlined. Also sets the reason why. */
3247 static tree
3248 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3249 struct walk_stmt_info *wip)
3251 tree fn = (tree) wip->info;
3252 tree t;
3253 gimple stmt = gsi_stmt (*gsi);
3255 switch (gimple_code (stmt))
3257 case GIMPLE_CALL:
3258 /* Refuse to inline alloca call unless user explicitly forced so as
3259 this may change program's memory overhead drastically when the
3260 function using alloca is called in loop. In GCC present in
3261 SPEC2000 inlining into schedule_block cause it to require 2GB of
3262 RAM instead of 256MB. Don't do so for alloca calls emitted for
3263 VLA objects as those can't cause unbounded growth (they're always
3264 wrapped inside stack_save/stack_restore regions. */
3265 if (gimple_alloca_call_p (stmt)
3266 && !gimple_call_alloca_for_var_p (stmt)
3267 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3269 inline_forbidden_reason
3270 = G_("function %q+F can never be inlined because it uses "
3271 "alloca (override using the always_inline attribute)");
3272 *handled_ops_p = true;
3273 return fn;
3276 t = gimple_call_fndecl (stmt);
3277 if (t == NULL_TREE)
3278 break;
3280 /* We cannot inline functions that call setjmp. */
3281 if (setjmp_call_p (t))
3283 inline_forbidden_reason
3284 = G_("function %q+F can never be inlined because it uses setjmp");
3285 *handled_ops_p = true;
3286 return t;
3289 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3290 switch (DECL_FUNCTION_CODE (t))
3292 /* We cannot inline functions that take a variable number of
3293 arguments. */
3294 case BUILT_IN_VA_START:
3295 case BUILT_IN_NEXT_ARG:
3296 case BUILT_IN_VA_END:
3297 inline_forbidden_reason
3298 = G_("function %q+F can never be inlined because it "
3299 "uses variable argument lists");
3300 *handled_ops_p = true;
3301 return t;
3303 case BUILT_IN_LONGJMP:
3304 /* We can't inline functions that call __builtin_longjmp at
3305 all. The non-local goto machinery really requires the
3306 destination be in a different function. If we allow the
3307 function calling __builtin_longjmp to be inlined into the
3308 function calling __builtin_setjmp, Things will Go Awry. */
3309 inline_forbidden_reason
3310 = G_("function %q+F can never be inlined because "
3311 "it uses setjmp-longjmp exception handling");
3312 *handled_ops_p = true;
3313 return t;
3315 case BUILT_IN_NONLOCAL_GOTO:
3316 /* Similarly. */
3317 inline_forbidden_reason
3318 = G_("function %q+F can never be inlined because "
3319 "it uses non-local goto");
3320 *handled_ops_p = true;
3321 return t;
3323 case BUILT_IN_RETURN:
3324 case BUILT_IN_APPLY_ARGS:
3325 /* If a __builtin_apply_args caller would be inlined,
3326 it would be saving arguments of the function it has
3327 been inlined into. Similarly __builtin_return would
3328 return from the function the inline has been inlined into. */
3329 inline_forbidden_reason
3330 = G_("function %q+F can never be inlined because "
3331 "it uses __builtin_return or __builtin_apply_args");
3332 *handled_ops_p = true;
3333 return t;
3335 default:
3336 break;
3338 break;
3340 case GIMPLE_GOTO:
3341 t = gimple_goto_dest (stmt);
3343 /* We will not inline a function which uses computed goto. The
3344 addresses of its local labels, which may be tucked into
3345 global storage, are of course not constant across
3346 instantiations, which causes unexpected behavior. */
3347 if (TREE_CODE (t) != LABEL_DECL)
3349 inline_forbidden_reason
3350 = G_("function %q+F can never be inlined "
3351 "because it contains a computed goto");
3352 *handled_ops_p = true;
3353 return t;
3355 break;
3357 default:
3358 break;
3361 *handled_ops_p = false;
3362 return NULL_TREE;
3365 /* Return true if FNDECL is a function that cannot be inlined into
3366 another one. */
3368 static bool
3369 inline_forbidden_p (tree fndecl)
3371 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3372 struct walk_stmt_info wi;
3373 struct pointer_set_t *visited_nodes;
3374 basic_block bb;
3375 bool forbidden_p = false;
3377 /* First check for shared reasons not to copy the code. */
3378 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3379 if (inline_forbidden_reason != NULL)
3380 return true;
3382 /* Next, walk the statements of the function looking for
3383 constraucts we can't handle, or are non-optimal for inlining. */
3384 visited_nodes = pointer_set_create ();
3385 memset (&wi, 0, sizeof (wi));
3386 wi.info = (void *) fndecl;
3387 wi.pset = visited_nodes;
3389 FOR_EACH_BB_FN (bb, fun)
3391 gimple ret;
3392 gimple_seq seq = bb_seq (bb);
3393 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3394 forbidden_p = (ret != NULL);
3395 if (forbidden_p)
3396 break;
3399 pointer_set_destroy (visited_nodes);
3400 return forbidden_p;
3403 /* Return false if the function FNDECL cannot be inlined on account of its
3404 attributes, true otherwise. */
3405 static bool
3406 function_attribute_inlinable_p (const_tree fndecl)
3408 if (targetm.attribute_table)
3410 const_tree a;
3412 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3414 const_tree name = TREE_PURPOSE (a);
3415 int i;
3417 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3418 if (is_attribute_p (targetm.attribute_table[i].name, name))
3419 return targetm.function_attribute_inlinable_p (fndecl);
3423 return true;
3426 /* Returns nonzero if FN is a function that does not have any
3427 fundamental inline blocking properties. */
3429 bool
3430 tree_inlinable_function_p (tree fn)
3432 bool inlinable = true;
3433 bool do_warning;
3434 tree always_inline;
3436 /* If we've already decided this function shouldn't be inlined,
3437 there's no need to check again. */
3438 if (DECL_UNINLINABLE (fn))
3439 return false;
3441 /* We only warn for functions declared `inline' by the user. */
3442 do_warning = (warn_inline
3443 && DECL_DECLARED_INLINE_P (fn)
3444 && !DECL_NO_INLINE_WARNING_P (fn)
3445 && !DECL_IN_SYSTEM_HEADER (fn));
3447 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3449 if (flag_no_inline
3450 && always_inline == NULL)
3452 if (do_warning)
3453 warning (OPT_Winline, "function %q+F can never be inlined because it "
3454 "is suppressed using -fno-inline", fn);
3455 inlinable = false;
3458 else if (!function_attribute_inlinable_p (fn))
3460 if (do_warning)
3461 warning (OPT_Winline, "function %q+F can never be inlined because it "
3462 "uses attributes conflicting with inlining", fn);
3463 inlinable = false;
3466 else if (inline_forbidden_p (fn))
3468 /* See if we should warn about uninlinable functions. Previously,
3469 some of these warnings would be issued while trying to expand
3470 the function inline, but that would cause multiple warnings
3471 about functions that would for example call alloca. But since
3472 this a property of the function, just one warning is enough.
3473 As a bonus we can now give more details about the reason why a
3474 function is not inlinable. */
3475 if (always_inline)
3476 error (inline_forbidden_reason, fn);
3477 else if (do_warning)
3478 warning (OPT_Winline, inline_forbidden_reason, fn);
3480 inlinable = false;
3483 /* Squirrel away the result so that we don't have to check again. */
3484 DECL_UNINLINABLE (fn) = !inlinable;
3486 return inlinable;
3489 /* Estimate the cost of a memory move. Use machine dependent
3490 word size and take possible memcpy call into account. */
3493 estimate_move_cost (tree type)
3495 HOST_WIDE_INT size;
3497 gcc_assert (!VOID_TYPE_P (type));
3499 if (TREE_CODE (type) == VECTOR_TYPE)
3501 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3502 enum machine_mode simd
3503 = targetm.vectorize.preferred_simd_mode (inner);
3504 int simd_mode_size = GET_MODE_SIZE (simd);
3505 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3506 / simd_mode_size);
3509 size = int_size_in_bytes (type);
3511 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3512 /* Cost of a memcpy call, 3 arguments and the call. */
3513 return 4;
3514 else
3515 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3518 /* Returns cost of operation CODE, according to WEIGHTS */
3520 static int
3521 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3522 tree op1 ATTRIBUTE_UNUSED, tree op2)
3524 switch (code)
3526 /* These are "free" conversions, or their presumed cost
3527 is folded into other operations. */
3528 case RANGE_EXPR:
3529 CASE_CONVERT:
3530 case COMPLEX_EXPR:
3531 case PAREN_EXPR:
3532 case VIEW_CONVERT_EXPR:
3533 return 0;
3535 /* Assign cost of 1 to usual operations.
3536 ??? We may consider mapping RTL costs to this. */
3537 case COND_EXPR:
3538 case VEC_COND_EXPR:
3539 case VEC_PERM_EXPR:
3541 case PLUS_EXPR:
3542 case POINTER_PLUS_EXPR:
3543 case MINUS_EXPR:
3544 case MULT_EXPR:
3545 case MULT_HIGHPART_EXPR:
3546 case FMA_EXPR:
3548 case ADDR_SPACE_CONVERT_EXPR:
3549 case FIXED_CONVERT_EXPR:
3550 case FIX_TRUNC_EXPR:
3552 case NEGATE_EXPR:
3553 case FLOAT_EXPR:
3554 case MIN_EXPR:
3555 case MAX_EXPR:
3556 case ABS_EXPR:
3558 case LSHIFT_EXPR:
3559 case RSHIFT_EXPR:
3560 case LROTATE_EXPR:
3561 case RROTATE_EXPR:
3562 case VEC_LSHIFT_EXPR:
3563 case VEC_RSHIFT_EXPR:
3565 case BIT_IOR_EXPR:
3566 case BIT_XOR_EXPR:
3567 case BIT_AND_EXPR:
3568 case BIT_NOT_EXPR:
3570 case TRUTH_ANDIF_EXPR:
3571 case TRUTH_ORIF_EXPR:
3572 case TRUTH_AND_EXPR:
3573 case TRUTH_OR_EXPR:
3574 case TRUTH_XOR_EXPR:
3575 case TRUTH_NOT_EXPR:
3577 case LT_EXPR:
3578 case LE_EXPR:
3579 case GT_EXPR:
3580 case GE_EXPR:
3581 case EQ_EXPR:
3582 case NE_EXPR:
3583 case ORDERED_EXPR:
3584 case UNORDERED_EXPR:
3586 case UNLT_EXPR:
3587 case UNLE_EXPR:
3588 case UNGT_EXPR:
3589 case UNGE_EXPR:
3590 case UNEQ_EXPR:
3591 case LTGT_EXPR:
3593 case CONJ_EXPR:
3595 case PREDECREMENT_EXPR:
3596 case PREINCREMENT_EXPR:
3597 case POSTDECREMENT_EXPR:
3598 case POSTINCREMENT_EXPR:
3600 case REALIGN_LOAD_EXPR:
3602 case REDUC_MAX_EXPR:
3603 case REDUC_MIN_EXPR:
3604 case REDUC_PLUS_EXPR:
3605 case WIDEN_SUM_EXPR:
3606 case WIDEN_MULT_EXPR:
3607 case DOT_PROD_EXPR:
3608 case WIDEN_MULT_PLUS_EXPR:
3609 case WIDEN_MULT_MINUS_EXPR:
3610 case WIDEN_LSHIFT_EXPR:
3612 case VEC_WIDEN_MULT_HI_EXPR:
3613 case VEC_WIDEN_MULT_LO_EXPR:
3614 case VEC_WIDEN_MULT_EVEN_EXPR:
3615 case VEC_WIDEN_MULT_ODD_EXPR:
3616 case VEC_UNPACK_HI_EXPR:
3617 case VEC_UNPACK_LO_EXPR:
3618 case VEC_UNPACK_FLOAT_HI_EXPR:
3619 case VEC_UNPACK_FLOAT_LO_EXPR:
3620 case VEC_PACK_TRUNC_EXPR:
3621 case VEC_PACK_SAT_EXPR:
3622 case VEC_PACK_FIX_TRUNC_EXPR:
3623 case VEC_WIDEN_LSHIFT_HI_EXPR:
3624 case VEC_WIDEN_LSHIFT_LO_EXPR:
3626 return 1;
3628 /* Few special cases of expensive operations. This is useful
3629 to avoid inlining on functions having too many of these. */
3630 case TRUNC_DIV_EXPR:
3631 case CEIL_DIV_EXPR:
3632 case FLOOR_DIV_EXPR:
3633 case ROUND_DIV_EXPR:
3634 case EXACT_DIV_EXPR:
3635 case TRUNC_MOD_EXPR:
3636 case CEIL_MOD_EXPR:
3637 case FLOOR_MOD_EXPR:
3638 case ROUND_MOD_EXPR:
3639 case RDIV_EXPR:
3640 if (TREE_CODE (op2) != INTEGER_CST)
3641 return weights->div_mod_cost;
3642 return 1;
3644 default:
3645 /* We expect a copy assignment with no operator. */
3646 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3647 return 0;
3652 /* Estimate number of instructions that will be created by expanding
3653 the statements in the statement sequence STMTS.
3654 WEIGHTS contains weights attributed to various constructs. */
3656 static
3657 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3659 int cost;
3660 gimple_stmt_iterator gsi;
3662 cost = 0;
3663 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3664 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3666 return cost;
3670 /* Estimate number of instructions that will be created by expanding STMT.
3671 WEIGHTS contains weights attributed to various constructs. */
3674 estimate_num_insns (gimple stmt, eni_weights *weights)
3676 unsigned cost, i;
3677 enum gimple_code code = gimple_code (stmt);
3678 tree lhs;
3679 tree rhs;
3681 switch (code)
3683 case GIMPLE_ASSIGN:
3684 /* Try to estimate the cost of assignments. We have three cases to
3685 deal with:
3686 1) Simple assignments to registers;
3687 2) Stores to things that must live in memory. This includes
3688 "normal" stores to scalars, but also assignments of large
3689 structures, or constructors of big arrays;
3691 Let us look at the first two cases, assuming we have "a = b + C":
3692 <GIMPLE_ASSIGN <var_decl "a">
3693 <plus_expr <var_decl "b"> <constant C>>
3694 If "a" is a GIMPLE register, the assignment to it is free on almost
3695 any target, because "a" usually ends up in a real register. Hence
3696 the only cost of this expression comes from the PLUS_EXPR, and we
3697 can ignore the GIMPLE_ASSIGN.
3698 If "a" is not a GIMPLE register, the assignment to "a" will most
3699 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3700 of moving something into "a", which we compute using the function
3701 estimate_move_cost. */
3702 if (gimple_clobber_p (stmt))
3703 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3705 lhs = gimple_assign_lhs (stmt);
3706 rhs = gimple_assign_rhs1 (stmt);
3708 cost = 0;
3710 /* Account for the cost of moving to / from memory. */
3711 if (gimple_store_p (stmt))
3712 cost += estimate_move_cost (TREE_TYPE (lhs));
3713 if (gimple_assign_load_p (stmt))
3714 cost += estimate_move_cost (TREE_TYPE (rhs));
3716 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3717 gimple_assign_rhs1 (stmt),
3718 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3719 == GIMPLE_BINARY_RHS
3720 ? gimple_assign_rhs2 (stmt) : NULL);
3721 break;
3723 case GIMPLE_COND:
3724 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3725 gimple_op (stmt, 0),
3726 gimple_op (stmt, 1));
3727 break;
3729 case GIMPLE_SWITCH:
3730 /* Take into account cost of the switch + guess 2 conditional jumps for
3731 each case label.
3733 TODO: once the switch expansion logic is sufficiently separated, we can
3734 do better job on estimating cost of the switch. */
3735 if (weights->time_based)
3736 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3737 else
3738 cost = gimple_switch_num_labels (stmt) * 2;
3739 break;
3741 case GIMPLE_CALL:
3743 tree decl = gimple_call_fndecl (stmt);
3744 struct cgraph_node *node = NULL;
3746 /* Do not special case builtins where we see the body.
3747 This just confuse inliner. */
3748 if (!decl || !(node = cgraph_get_node (decl)) || node->definition)
3750 /* For buitins that are likely expanded to nothing or
3751 inlined do not account operand costs. */
3752 else if (is_simple_builtin (decl))
3753 return 0;
3754 else if (is_inexpensive_builtin (decl))
3755 return weights->target_builtin_call_cost;
3756 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3758 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3759 specialize the cheap expansion we do here.
3760 ??? This asks for a more general solution. */
3761 switch (DECL_FUNCTION_CODE (decl))
3763 case BUILT_IN_POW:
3764 case BUILT_IN_POWF:
3765 case BUILT_IN_POWL:
3766 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3767 && REAL_VALUES_EQUAL
3768 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3769 return estimate_operator_cost (MULT_EXPR, weights,
3770 gimple_call_arg (stmt, 0),
3771 gimple_call_arg (stmt, 0));
3772 break;
3774 default:
3775 break;
3779 cost = node ? weights->call_cost : weights->indirect_call_cost;
3780 if (gimple_call_lhs (stmt))
3781 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3782 for (i = 0; i < gimple_call_num_args (stmt); i++)
3784 tree arg = gimple_call_arg (stmt, i);
3785 cost += estimate_move_cost (TREE_TYPE (arg));
3787 break;
3790 case GIMPLE_RETURN:
3791 return weights->return_cost;
3793 case GIMPLE_GOTO:
3794 case GIMPLE_LABEL:
3795 case GIMPLE_NOP:
3796 case GIMPLE_PHI:
3797 case GIMPLE_PREDICT:
3798 case GIMPLE_DEBUG:
3799 return 0;
3801 case GIMPLE_ASM:
3803 int count = asm_str_count (gimple_asm_string (stmt));
3804 /* 1000 means infinity. This avoids overflows later
3805 with very long asm statements. */
3806 if (count > 1000)
3807 count = 1000;
3808 return count;
3811 case GIMPLE_RESX:
3812 /* This is either going to be an external function call with one
3813 argument, or two register copy statements plus a goto. */
3814 return 2;
3816 case GIMPLE_EH_DISPATCH:
3817 /* ??? This is going to turn into a switch statement. Ideally
3818 we'd have a look at the eh region and estimate the number of
3819 edges involved. */
3820 return 10;
3822 case GIMPLE_BIND:
3823 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3825 case GIMPLE_EH_FILTER:
3826 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3828 case GIMPLE_CATCH:
3829 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3831 case GIMPLE_TRY:
3832 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3833 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3835 /* OpenMP directives are generally very expensive. */
3837 case GIMPLE_OMP_RETURN:
3838 case GIMPLE_OMP_SECTIONS_SWITCH:
3839 case GIMPLE_OMP_ATOMIC_STORE:
3840 case GIMPLE_OMP_CONTINUE:
3841 /* ...except these, which are cheap. */
3842 return 0;
3844 case GIMPLE_OMP_ATOMIC_LOAD:
3845 return weights->omp_cost;
3847 case GIMPLE_OMP_FOR:
3848 return (weights->omp_cost
3849 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3850 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3852 case GIMPLE_OMP_PARALLEL:
3853 case GIMPLE_OMP_TASK:
3854 case GIMPLE_OMP_CRITICAL:
3855 case GIMPLE_OMP_MASTER:
3856 case GIMPLE_OMP_TASKGROUP:
3857 case GIMPLE_OMP_ORDERED:
3858 case GIMPLE_OMP_SECTION:
3859 case GIMPLE_OMP_SECTIONS:
3860 case GIMPLE_OMP_SINGLE:
3861 case GIMPLE_OMP_TARGET:
3862 case GIMPLE_OMP_TEAMS:
3863 return (weights->omp_cost
3864 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3866 case GIMPLE_TRANSACTION:
3867 return (weights->tm_cost
3868 + estimate_num_insns_seq (gimple_transaction_body (stmt),
3869 weights));
3871 default:
3872 gcc_unreachable ();
3875 return cost;
3878 /* Estimate number of instructions that will be created by expanding
3879 function FNDECL. WEIGHTS contains weights attributed to various
3880 constructs. */
3883 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3885 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3886 gimple_stmt_iterator bsi;
3887 basic_block bb;
3888 int n = 0;
3890 gcc_assert (my_function && my_function->cfg);
3891 FOR_EACH_BB_FN (bb, my_function)
3893 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3894 n += estimate_num_insns (gsi_stmt (bsi), weights);
3897 return n;
3901 /* Initializes weights used by estimate_num_insns. */
3903 void
3904 init_inline_once (void)
3906 eni_size_weights.call_cost = 1;
3907 eni_size_weights.indirect_call_cost = 3;
3908 eni_size_weights.target_builtin_call_cost = 1;
3909 eni_size_weights.div_mod_cost = 1;
3910 eni_size_weights.omp_cost = 40;
3911 eni_size_weights.tm_cost = 10;
3912 eni_size_weights.time_based = false;
3913 eni_size_weights.return_cost = 1;
3915 /* Estimating time for call is difficult, since we have no idea what the
3916 called function does. In the current uses of eni_time_weights,
3917 underestimating the cost does less harm than overestimating it, so
3918 we choose a rather small value here. */
3919 eni_time_weights.call_cost = 10;
3920 eni_time_weights.indirect_call_cost = 15;
3921 eni_time_weights.target_builtin_call_cost = 1;
3922 eni_time_weights.div_mod_cost = 10;
3923 eni_time_weights.omp_cost = 40;
3924 eni_time_weights.tm_cost = 40;
3925 eni_time_weights.time_based = true;
3926 eni_time_weights.return_cost = 2;
3929 /* Estimate the number of instructions in a gimple_seq. */
3932 count_insns_seq (gimple_seq seq, eni_weights *weights)
3934 gimple_stmt_iterator gsi;
3935 int n = 0;
3936 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3937 n += estimate_num_insns (gsi_stmt (gsi), weights);
3939 return n;
3943 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3945 static void
3946 prepend_lexical_block (tree current_block, tree new_block)
3948 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3949 BLOCK_SUBBLOCKS (current_block) = new_block;
3950 BLOCK_SUPERCONTEXT (new_block) = current_block;
3953 /* Add local variables from CALLEE to CALLER. */
3955 static inline void
3956 add_local_variables (struct function *callee, struct function *caller,
3957 copy_body_data *id)
3959 tree var;
3960 unsigned ix;
3962 FOR_EACH_LOCAL_DECL (callee, ix, var)
3963 if (!can_be_nonlocal (var, id))
3965 tree new_var = remap_decl (var, id);
3967 /* Remap debug-expressions. */
3968 if (TREE_CODE (new_var) == VAR_DECL
3969 && DECL_HAS_DEBUG_EXPR_P (var)
3970 && new_var != var)
3972 tree tem = DECL_DEBUG_EXPR (var);
3973 bool old_regimplify = id->regimplify;
3974 id->remapping_type_depth++;
3975 walk_tree (&tem, copy_tree_body_r, id, NULL);
3976 id->remapping_type_depth--;
3977 id->regimplify = old_regimplify;
3978 SET_DECL_DEBUG_EXPR (new_var, tem);
3979 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
3981 add_local_decl (caller, new_var);
3985 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3987 static bool
3988 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3990 tree use_retvar;
3991 tree fn;
3992 struct pointer_map_t *st, *dst;
3993 tree return_slot;
3994 tree modify_dest;
3995 location_t saved_location;
3996 struct cgraph_edge *cg_edge;
3997 cgraph_inline_failed_t reason;
3998 basic_block return_block;
3999 edge e;
4000 gimple_stmt_iterator gsi, stmt_gsi;
4001 bool successfully_inlined = FALSE;
4002 bool purge_dead_abnormal_edges;
4004 /* Set input_location here so we get the right instantiation context
4005 if we call instantiate_decl from inlinable_function_p. */
4006 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4007 saved_location = input_location;
4008 input_location = gimple_location (stmt);
4010 /* From here on, we're only interested in CALL_EXPRs. */
4011 if (gimple_code (stmt) != GIMPLE_CALL)
4012 goto egress;
4014 cg_edge = cgraph_edge (id->dst_node, stmt);
4015 gcc_checking_assert (cg_edge);
4016 /* First, see if we can figure out what function is being called.
4017 If we cannot, then there is no hope of inlining the function. */
4018 if (cg_edge->indirect_unknown_callee)
4019 goto egress;
4020 fn = cg_edge->callee->decl;
4021 gcc_checking_assert (fn);
4023 /* If FN is a declaration of a function in a nested scope that was
4024 globally declared inline, we don't set its DECL_INITIAL.
4025 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4026 C++ front-end uses it for cdtors to refer to their internal
4027 declarations, that are not real functions. Fortunately those
4028 don't have trees to be saved, so we can tell by checking their
4029 gimple_body. */
4030 if (!DECL_INITIAL (fn)
4031 && DECL_ABSTRACT_ORIGIN (fn)
4032 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4033 fn = DECL_ABSTRACT_ORIGIN (fn);
4035 /* Don't try to inline functions that are not well-suited to inlining. */
4036 if (cg_edge->inline_failed)
4038 reason = cg_edge->inline_failed;
4039 /* If this call was originally indirect, we do not want to emit any
4040 inlining related warnings or sorry messages because there are no
4041 guarantees regarding those. */
4042 if (cg_edge->indirect_inlining_edge)
4043 goto egress;
4045 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4046 /* For extern inline functions that get redefined we always
4047 silently ignored always_inline flag. Better behaviour would
4048 be to be able to keep both bodies and use extern inline body
4049 for inlining, but we can't do that because frontends overwrite
4050 the body. */
4051 && !cg_edge->callee->local.redefined_extern_inline
4052 /* During early inline pass, report only when optimization is
4053 not turned on. */
4054 && (cgraph_global_info_ready
4055 || !optimize)
4056 /* PR 20090218-1_0.c. Body can be provided by another module. */
4057 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4059 error ("inlining failed in call to always_inline %q+F: %s", fn,
4060 cgraph_inline_failed_string (reason));
4061 error ("called from here");
4063 else if (warn_inline
4064 && DECL_DECLARED_INLINE_P (fn)
4065 && !DECL_NO_INLINE_WARNING_P (fn)
4066 && !DECL_IN_SYSTEM_HEADER (fn)
4067 && reason != CIF_UNSPECIFIED
4068 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4069 /* Do not warn about not inlined recursive calls. */
4070 && !cgraph_edge_recursive_p (cg_edge)
4071 /* Avoid warnings during early inline pass. */
4072 && cgraph_global_info_ready)
4074 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4075 fn, _(cgraph_inline_failed_string (reason)));
4076 warning (OPT_Winline, "called from here");
4078 goto egress;
4080 fn = cg_edge->callee->decl;
4081 cgraph_get_body (cg_edge->callee);
4083 #ifdef ENABLE_CHECKING
4084 if (cg_edge->callee->decl != id->dst_node->decl)
4085 verify_cgraph_node (cg_edge->callee);
4086 #endif
4088 /* We will be inlining this callee. */
4089 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4091 /* Update the callers EH personality. */
4092 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4093 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4094 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4096 /* Split the block holding the GIMPLE_CALL. */
4097 e = split_block (bb, stmt);
4098 bb = e->src;
4099 return_block = e->dest;
4100 remove_edge (e);
4102 /* split_block splits after the statement; work around this by
4103 moving the call into the second block manually. Not pretty,
4104 but seems easier than doing the CFG manipulation by hand
4105 when the GIMPLE_CALL is in the last statement of BB. */
4106 stmt_gsi = gsi_last_bb (bb);
4107 gsi_remove (&stmt_gsi, false);
4109 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4110 been the source of abnormal edges. In this case, schedule
4111 the removal of dead abnormal edges. */
4112 gsi = gsi_start_bb (return_block);
4113 if (gsi_end_p (gsi))
4115 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4116 purge_dead_abnormal_edges = true;
4118 else
4120 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4121 purge_dead_abnormal_edges = false;
4124 stmt_gsi = gsi_start_bb (return_block);
4126 /* Build a block containing code to initialize the arguments, the
4127 actual inline expansion of the body, and a label for the return
4128 statements within the function to jump to. The type of the
4129 statement expression is the return type of the function call.
4130 ??? If the call does not have an associated block then we will
4131 remap all callee blocks to NULL, effectively dropping most of
4132 its debug information. This should only happen for calls to
4133 artificial decls inserted by the compiler itself. We need to
4134 either link the inlined blocks into the caller block tree or
4135 not refer to them in any way to not break GC for locations. */
4136 if (gimple_block (stmt))
4138 id->block = make_node (BLOCK);
4139 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4140 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4141 prepend_lexical_block (gimple_block (stmt), id->block);
4144 /* Local declarations will be replaced by their equivalents in this
4145 map. */
4146 st = id->decl_map;
4147 id->decl_map = pointer_map_create ();
4148 dst = id->debug_map;
4149 id->debug_map = NULL;
4151 /* Record the function we are about to inline. */
4152 id->src_fn = fn;
4153 id->src_node = cg_edge->callee;
4154 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4155 id->gimple_call = stmt;
4157 gcc_assert (!id->src_cfun->after_inlining);
4159 id->entry_bb = bb;
4160 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4162 gimple_stmt_iterator si = gsi_last_bb (bb);
4163 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4164 NOT_TAKEN),
4165 GSI_NEW_STMT);
4167 initialize_inlined_parameters (id, stmt, fn, bb);
4169 if (DECL_INITIAL (fn))
4171 if (gimple_block (stmt))
4173 tree *var;
4175 prepend_lexical_block (id->block,
4176 remap_blocks (DECL_INITIAL (fn), id));
4177 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4178 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4179 == NULL_TREE));
4180 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4181 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4182 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4183 under it. The parameters can be then evaluated in the debugger,
4184 but don't show in backtraces. */
4185 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4186 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4188 tree v = *var;
4189 *var = TREE_CHAIN (v);
4190 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4191 BLOCK_VARS (id->block) = v;
4193 else
4194 var = &TREE_CHAIN (*var);
4196 else
4197 remap_blocks_to_null (DECL_INITIAL (fn), id);
4200 /* Return statements in the function body will be replaced by jumps
4201 to the RET_LABEL. */
4202 gcc_assert (DECL_INITIAL (fn));
4203 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4205 /* Find the LHS to which the result of this call is assigned. */
4206 return_slot = NULL;
4207 if (gimple_call_lhs (stmt))
4209 modify_dest = gimple_call_lhs (stmt);
4211 /* The function which we are inlining might not return a value,
4212 in which case we should issue a warning that the function
4213 does not return a value. In that case the optimizers will
4214 see that the variable to which the value is assigned was not
4215 initialized. We do not want to issue a warning about that
4216 uninitialized variable. */
4217 if (DECL_P (modify_dest))
4218 TREE_NO_WARNING (modify_dest) = 1;
4220 if (gimple_call_return_slot_opt_p (stmt))
4222 return_slot = modify_dest;
4223 modify_dest = NULL;
4226 else
4227 modify_dest = NULL;
4229 /* If we are inlining a call to the C++ operator new, we don't want
4230 to use type based alias analysis on the return value. Otherwise
4231 we may get confused if the compiler sees that the inlined new
4232 function returns a pointer which was just deleted. See bug
4233 33407. */
4234 if (DECL_IS_OPERATOR_NEW (fn))
4236 return_slot = NULL;
4237 modify_dest = NULL;
4240 /* Declare the return variable for the function. */
4241 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4243 /* Add local vars in this inlined callee to caller. */
4244 add_local_variables (id->src_cfun, cfun, id);
4246 if (dump_file && (dump_flags & TDF_DETAILS))
4248 fprintf (dump_file, "Inlining ");
4249 print_generic_expr (dump_file, id->src_fn, 0);
4250 fprintf (dump_file, " to ");
4251 print_generic_expr (dump_file, id->dst_fn, 0);
4252 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4255 /* This is it. Duplicate the callee body. Assume callee is
4256 pre-gimplified. Note that we must not alter the caller
4257 function in any way before this point, as this CALL_EXPR may be
4258 a self-referential call; if we're calling ourselves, we need to
4259 duplicate our body before altering anything. */
4260 copy_body (id, bb->count,
4261 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4262 bb, return_block, NULL);
4264 /* Reset the escaped solution. */
4265 if (cfun->gimple_df)
4266 pt_solution_reset (&cfun->gimple_df->escaped);
4268 /* Clean up. */
4269 if (id->debug_map)
4271 pointer_map_destroy (id->debug_map);
4272 id->debug_map = dst;
4274 pointer_map_destroy (id->decl_map);
4275 id->decl_map = st;
4277 /* Unlink the calls virtual operands before replacing it. */
4278 unlink_stmt_vdef (stmt);
4280 /* If the inlined function returns a result that we care about,
4281 substitute the GIMPLE_CALL with an assignment of the return
4282 variable to the LHS of the call. That is, if STMT was
4283 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4284 if (use_retvar && gimple_call_lhs (stmt))
4286 gimple old_stmt = stmt;
4287 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4288 gsi_replace (&stmt_gsi, stmt, false);
4289 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4291 else
4293 /* Handle the case of inlining a function with no return
4294 statement, which causes the return value to become undefined. */
4295 if (gimple_call_lhs (stmt)
4296 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4298 tree name = gimple_call_lhs (stmt);
4299 tree var = SSA_NAME_VAR (name);
4300 tree def = ssa_default_def (cfun, var);
4302 if (def)
4304 /* If the variable is used undefined, make this name
4305 undefined via a move. */
4306 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4307 gsi_replace (&stmt_gsi, stmt, true);
4309 else
4311 /* Otherwise make this variable undefined. */
4312 gsi_remove (&stmt_gsi, true);
4313 set_ssa_default_def (cfun, var, name);
4314 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4317 else
4318 gsi_remove (&stmt_gsi, true);
4321 if (purge_dead_abnormal_edges)
4323 gimple_purge_dead_eh_edges (return_block);
4324 gimple_purge_dead_abnormal_call_edges (return_block);
4327 /* If the value of the new expression is ignored, that's OK. We
4328 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4329 the equivalent inlined version either. */
4330 if (is_gimple_assign (stmt))
4332 gcc_assert (gimple_assign_single_p (stmt)
4333 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4334 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4337 /* Output the inlining info for this abstract function, since it has been
4338 inlined. If we don't do this now, we can lose the information about the
4339 variables in the function when the blocks get blown away as soon as we
4340 remove the cgraph node. */
4341 if (gimple_block (stmt))
4342 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4344 /* Update callgraph if needed. */
4345 cgraph_remove_node (cg_edge->callee);
4347 id->block = NULL_TREE;
4348 successfully_inlined = TRUE;
4350 egress:
4351 input_location = saved_location;
4352 return successfully_inlined;
4355 /* Expand call statements reachable from STMT_P.
4356 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4357 in a MODIFY_EXPR. */
4359 static bool
4360 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4362 gimple_stmt_iterator gsi;
4364 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4366 gimple stmt = gsi_stmt (gsi);
4368 if (is_gimple_call (stmt)
4369 && expand_call_inline (bb, stmt, id))
4370 return true;
4373 return false;
4377 /* Walk all basic blocks created after FIRST and try to fold every statement
4378 in the STATEMENTS pointer set. */
4380 static void
4381 fold_marked_statements (int first, struct pointer_set_t *statements)
4383 for (; first < n_basic_blocks; first++)
4384 if (BASIC_BLOCK (first))
4386 gimple_stmt_iterator gsi;
4388 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4389 !gsi_end_p (gsi);
4390 gsi_next (&gsi))
4391 if (pointer_set_contains (statements, gsi_stmt (gsi)))
4393 gimple old_stmt = gsi_stmt (gsi);
4394 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4396 if (old_decl && DECL_BUILT_IN (old_decl))
4398 /* Folding builtins can create multiple instructions,
4399 we need to look at all of them. */
4400 gimple_stmt_iterator i2 = gsi;
4401 gsi_prev (&i2);
4402 if (fold_stmt (&gsi))
4404 gimple new_stmt;
4405 /* If a builtin at the end of a bb folded into nothing,
4406 the following loop won't work. */
4407 if (gsi_end_p (gsi))
4409 cgraph_update_edges_for_call_stmt (old_stmt,
4410 old_decl, NULL);
4411 break;
4413 if (gsi_end_p (i2))
4414 i2 = gsi_start_bb (BASIC_BLOCK (first));
4415 else
4416 gsi_next (&i2);
4417 while (1)
4419 new_stmt = gsi_stmt (i2);
4420 update_stmt (new_stmt);
4421 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4422 new_stmt);
4424 if (new_stmt == gsi_stmt (gsi))
4426 /* It is okay to check only for the very last
4427 of these statements. If it is a throwing
4428 statement nothing will change. If it isn't
4429 this can remove EH edges. If that weren't
4430 correct then because some intermediate stmts
4431 throw, but not the last one. That would mean
4432 we'd have to split the block, which we can't
4433 here and we'd loose anyway. And as builtins
4434 probably never throw, this all
4435 is mood anyway. */
4436 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4437 new_stmt))
4438 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4439 break;
4441 gsi_next (&i2);
4445 else if (fold_stmt (&gsi))
4447 /* Re-read the statement from GSI as fold_stmt() may
4448 have changed it. */
4449 gimple new_stmt = gsi_stmt (gsi);
4450 update_stmt (new_stmt);
4452 if (is_gimple_call (old_stmt)
4453 || is_gimple_call (new_stmt))
4454 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4455 new_stmt);
4457 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4458 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4464 /* Return true if BB has at least one abnormal outgoing edge. */
4466 static inline bool
4467 has_abnormal_outgoing_edge_p (basic_block bb)
4469 edge e;
4470 edge_iterator ei;
4472 FOR_EACH_EDGE (e, ei, bb->succs)
4473 if (e->flags & EDGE_ABNORMAL)
4474 return true;
4476 return false;
4479 /* Expand calls to inline functions in the body of FN. */
4481 unsigned int
4482 optimize_inline_calls (tree fn)
4484 copy_body_data id;
4485 basic_block bb;
4486 int last = n_basic_blocks;
4487 struct gimplify_ctx gctx;
4488 bool inlined_p = false;
4490 /* Clear out ID. */
4491 memset (&id, 0, sizeof (id));
4493 id.src_node = id.dst_node = cgraph_get_node (fn);
4494 gcc_assert (id.dst_node->definition);
4495 id.dst_fn = fn;
4496 /* Or any functions that aren't finished yet. */
4497 if (current_function_decl)
4498 id.dst_fn = current_function_decl;
4500 id.copy_decl = copy_decl_maybe_to_var;
4501 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4502 id.transform_new_cfg = false;
4503 id.transform_return_to_modify = true;
4504 id.transform_parameter = true;
4505 id.transform_lang_insert_block = NULL;
4506 id.statements_to_fold = pointer_set_create ();
4508 push_gimplify_context (&gctx);
4510 /* We make no attempts to keep dominance info up-to-date. */
4511 free_dominance_info (CDI_DOMINATORS);
4512 free_dominance_info (CDI_POST_DOMINATORS);
4514 /* Register specific gimple functions. */
4515 gimple_register_cfg_hooks ();
4517 /* Reach the trees by walking over the CFG, and note the
4518 enclosing basic-blocks in the call edges. */
4519 /* We walk the blocks going forward, because inlined function bodies
4520 will split id->current_basic_block, and the new blocks will
4521 follow it; we'll trudge through them, processing their CALL_EXPRs
4522 along the way. */
4523 FOR_EACH_BB (bb)
4524 inlined_p |= gimple_expand_calls_inline (bb, &id);
4526 pop_gimplify_context (NULL);
4528 #ifdef ENABLE_CHECKING
4530 struct cgraph_edge *e;
4532 verify_cgraph_node (id.dst_node);
4534 /* Double check that we inlined everything we are supposed to inline. */
4535 for (e = id.dst_node->callees; e; e = e->next_callee)
4536 gcc_assert (e->inline_failed);
4538 #endif
4540 /* Fold queued statements. */
4541 fold_marked_statements (last, id.statements_to_fold);
4542 pointer_set_destroy (id.statements_to_fold);
4544 gcc_assert (!id.debug_stmts.exists ());
4546 /* If we didn't inline into the function there is nothing to do. */
4547 if (!inlined_p)
4548 return 0;
4550 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4551 number_blocks (fn);
4553 delete_unreachable_blocks_update_callgraph (&id);
4554 #ifdef ENABLE_CHECKING
4555 verify_cgraph_node (id.dst_node);
4556 #endif
4558 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4559 not possible yet - the IPA passes might make various functions to not
4560 throw and they don't care to proactively update local EH info. This is
4561 done later in fixup_cfg pass that also execute the verification. */
4562 return (TODO_update_ssa
4563 | TODO_cleanup_cfg
4564 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4565 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4566 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4569 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4571 tree
4572 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4574 enum tree_code code = TREE_CODE (*tp);
4575 enum tree_code_class cl = TREE_CODE_CLASS (code);
4577 /* We make copies of most nodes. */
4578 if (IS_EXPR_CODE_CLASS (cl)
4579 || code == TREE_LIST
4580 || code == TREE_VEC
4581 || code == TYPE_DECL
4582 || code == OMP_CLAUSE)
4584 /* Because the chain gets clobbered when we make a copy, we save it
4585 here. */
4586 tree chain = NULL_TREE, new_tree;
4588 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4589 chain = TREE_CHAIN (*tp);
4591 /* Copy the node. */
4592 new_tree = copy_node (*tp);
4594 *tp = new_tree;
4596 /* Now, restore the chain, if appropriate. That will cause
4597 walk_tree to walk into the chain as well. */
4598 if (code == PARM_DECL
4599 || code == TREE_LIST
4600 || code == OMP_CLAUSE)
4601 TREE_CHAIN (*tp) = chain;
4603 /* For now, we don't update BLOCKs when we make copies. So, we
4604 have to nullify all BIND_EXPRs. */
4605 if (TREE_CODE (*tp) == BIND_EXPR)
4606 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4608 else if (code == CONSTRUCTOR)
4610 /* CONSTRUCTOR nodes need special handling because
4611 we need to duplicate the vector of elements. */
4612 tree new_tree;
4614 new_tree = copy_node (*tp);
4615 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
4616 *tp = new_tree;
4618 else if (code == STATEMENT_LIST)
4619 /* We used to just abort on STATEMENT_LIST, but we can run into them
4620 with statement-expressions (c++/40975). */
4621 copy_statement_list (tp);
4622 else if (TREE_CODE_CLASS (code) == tcc_type)
4623 *walk_subtrees = 0;
4624 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4625 *walk_subtrees = 0;
4626 else if (TREE_CODE_CLASS (code) == tcc_constant)
4627 *walk_subtrees = 0;
4628 return NULL_TREE;
4631 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4632 information indicating to what new SAVE_EXPR this one should be mapped,
4633 use that one. Otherwise, create a new node and enter it in ST. FN is
4634 the function into which the copy will be placed. */
4636 static void
4637 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4639 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4640 tree *n;
4641 tree t;
4643 /* See if we already encountered this SAVE_EXPR. */
4644 n = (tree *) pointer_map_contains (st, *tp);
4646 /* If we didn't already remap this SAVE_EXPR, do so now. */
4647 if (!n)
4649 t = copy_node (*tp);
4651 /* Remember this SAVE_EXPR. */
4652 *pointer_map_insert (st, *tp) = t;
4653 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4654 *pointer_map_insert (st, t) = t;
4656 else
4658 /* We've already walked into this SAVE_EXPR; don't do it again. */
4659 *walk_subtrees = 0;
4660 t = *n;
4663 /* Replace this SAVE_EXPR with the copy. */
4664 *tp = t;
4667 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4668 label, copies the declaration and enters it in the splay_tree in DATA (which
4669 is really a 'copy_body_data *'. */
4671 static tree
4672 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4673 bool *handled_ops_p ATTRIBUTE_UNUSED,
4674 struct walk_stmt_info *wi)
4676 copy_body_data *id = (copy_body_data *) wi->info;
4677 gimple stmt = gsi_stmt (*gsip);
4679 if (gimple_code (stmt) == GIMPLE_LABEL)
4681 tree decl = gimple_label_label (stmt);
4683 /* Copy the decl and remember the copy. */
4684 insert_decl_map (id, decl, id->copy_decl (decl, id));
4687 return NULL_TREE;
4691 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4692 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4693 remaps all local declarations to appropriate replacements in gimple
4694 operands. */
4696 static tree
4697 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4699 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4700 copy_body_data *id = (copy_body_data *) wi->info;
4701 struct pointer_map_t *st = id->decl_map;
4702 tree *n;
4703 tree expr = *tp;
4705 /* Only a local declaration (variable or label). */
4706 if ((TREE_CODE (expr) == VAR_DECL
4707 && !TREE_STATIC (expr))
4708 || TREE_CODE (expr) == LABEL_DECL)
4710 /* Lookup the declaration. */
4711 n = (tree *) pointer_map_contains (st, expr);
4713 /* If it's there, remap it. */
4714 if (n)
4715 *tp = *n;
4716 *walk_subtrees = 0;
4718 else if (TREE_CODE (expr) == STATEMENT_LIST
4719 || TREE_CODE (expr) == BIND_EXPR
4720 || TREE_CODE (expr) == SAVE_EXPR)
4721 gcc_unreachable ();
4722 else if (TREE_CODE (expr) == TARGET_EXPR)
4724 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4725 It's OK for this to happen if it was part of a subtree that
4726 isn't immediately expanded, such as operand 2 of another
4727 TARGET_EXPR. */
4728 if (!TREE_OPERAND (expr, 1))
4730 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4731 TREE_OPERAND (expr, 3) = NULL_TREE;
4735 /* Keep iterating. */
4736 return NULL_TREE;
4740 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4741 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4742 remaps all local declarations to appropriate replacements in gimple
4743 statements. */
4745 static tree
4746 replace_locals_stmt (gimple_stmt_iterator *gsip,
4747 bool *handled_ops_p ATTRIBUTE_UNUSED,
4748 struct walk_stmt_info *wi)
4750 copy_body_data *id = (copy_body_data *) wi->info;
4751 gimple stmt = gsi_stmt (*gsip);
4753 if (gimple_code (stmt) == GIMPLE_BIND)
4755 tree block = gimple_bind_block (stmt);
4757 if (block)
4759 remap_block (&block, id);
4760 gimple_bind_set_block (stmt, block);
4763 /* This will remap a lot of the same decls again, but this should be
4764 harmless. */
4765 if (gimple_bind_vars (stmt))
4766 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
4767 NULL, id));
4770 /* Keep iterating. */
4771 return NULL_TREE;
4775 /* Copies everything in SEQ and replaces variables and labels local to
4776 current_function_decl. */
4778 gimple_seq
4779 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4781 copy_body_data id;
4782 struct walk_stmt_info wi;
4783 struct pointer_set_t *visited;
4784 gimple_seq copy;
4786 /* There's nothing to do for NULL_TREE. */
4787 if (seq == NULL)
4788 return seq;
4790 /* Set up ID. */
4791 memset (&id, 0, sizeof (id));
4792 id.src_fn = current_function_decl;
4793 id.dst_fn = current_function_decl;
4794 id.decl_map = pointer_map_create ();
4795 id.debug_map = NULL;
4797 id.copy_decl = copy_decl_no_change;
4798 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4799 id.transform_new_cfg = false;
4800 id.transform_return_to_modify = false;
4801 id.transform_parameter = false;
4802 id.transform_lang_insert_block = NULL;
4804 /* Walk the tree once to find local labels. */
4805 memset (&wi, 0, sizeof (wi));
4806 visited = pointer_set_create ();
4807 wi.info = &id;
4808 wi.pset = visited;
4809 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4810 pointer_set_destroy (visited);
4812 copy = gimple_seq_copy (seq);
4814 /* Walk the copy, remapping decls. */
4815 memset (&wi, 0, sizeof (wi));
4816 wi.info = &id;
4817 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4819 /* Clean up. */
4820 pointer_map_destroy (id.decl_map);
4821 if (id.debug_map)
4822 pointer_map_destroy (id.debug_map);
4824 return copy;
4828 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4830 static tree
4831 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4833 if (*tp == data)
4834 return (tree) data;
4835 else
4836 return NULL;
4839 DEBUG_FUNCTION bool
4840 debug_find_tree (tree top, tree search)
4842 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4846 /* Declare the variables created by the inliner. Add all the variables in
4847 VARS to BIND_EXPR. */
4849 static void
4850 declare_inline_vars (tree block, tree vars)
4852 tree t;
4853 for (t = vars; t; t = DECL_CHAIN (t))
4855 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4856 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4857 add_local_decl (cfun, t);
4860 if (block)
4861 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4864 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4865 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4866 VAR_DECL translation. */
4868 static tree
4869 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4871 /* Don't generate debug information for the copy if we wouldn't have
4872 generated it for the copy either. */
4873 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4874 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4876 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4877 declaration inspired this copy. */
4878 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4880 /* The new variable/label has no RTL, yet. */
4881 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4882 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4883 SET_DECL_RTL (copy, 0);
4885 /* These args would always appear unused, if not for this. */
4886 TREE_USED (copy) = 1;
4888 /* Set the context for the new declaration. */
4889 if (!DECL_CONTEXT (decl))
4890 /* Globals stay global. */
4892 else if (DECL_CONTEXT (decl) != id->src_fn)
4893 /* Things that weren't in the scope of the function we're inlining
4894 from aren't in the scope we're inlining to, either. */
4896 else if (TREE_STATIC (decl))
4897 /* Function-scoped static variables should stay in the original
4898 function. */
4900 else
4901 /* Ordinary automatic local variables are now in the scope of the
4902 new function. */
4903 DECL_CONTEXT (copy) = id->dst_fn;
4905 return copy;
4908 static tree
4909 copy_decl_to_var (tree decl, copy_body_data *id)
4911 tree copy, type;
4913 gcc_assert (TREE_CODE (decl) == PARM_DECL
4914 || TREE_CODE (decl) == RESULT_DECL);
4916 type = TREE_TYPE (decl);
4918 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4919 VAR_DECL, DECL_NAME (decl), type);
4920 if (DECL_PT_UID_SET_P (decl))
4921 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4922 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4923 TREE_READONLY (copy) = TREE_READONLY (decl);
4924 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4925 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4927 return copy_decl_for_dup_finish (id, decl, copy);
4930 /* Like copy_decl_to_var, but create a return slot object instead of a
4931 pointer variable for return by invisible reference. */
4933 static tree
4934 copy_result_decl_to_var (tree decl, copy_body_data *id)
4936 tree copy, type;
4938 gcc_assert (TREE_CODE (decl) == PARM_DECL
4939 || TREE_CODE (decl) == RESULT_DECL);
4941 type = TREE_TYPE (decl);
4942 if (DECL_BY_REFERENCE (decl))
4943 type = TREE_TYPE (type);
4945 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4946 VAR_DECL, DECL_NAME (decl), type);
4947 if (DECL_PT_UID_SET_P (decl))
4948 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4949 TREE_READONLY (copy) = TREE_READONLY (decl);
4950 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4951 if (!DECL_BY_REFERENCE (decl))
4953 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4954 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4957 return copy_decl_for_dup_finish (id, decl, copy);
4960 tree
4961 copy_decl_no_change (tree decl, copy_body_data *id)
4963 tree copy;
4965 copy = copy_node (decl);
4967 /* The COPY is not abstract; it will be generated in DST_FN. */
4968 DECL_ABSTRACT (copy) = 0;
4969 lang_hooks.dup_lang_specific_decl (copy);
4971 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4972 been taken; it's for internal bookkeeping in expand_goto_internal. */
4973 if (TREE_CODE (copy) == LABEL_DECL)
4975 TREE_ADDRESSABLE (copy) = 0;
4976 LABEL_DECL_UID (copy) = -1;
4979 return copy_decl_for_dup_finish (id, decl, copy);
4982 static tree
4983 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4985 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4986 return copy_decl_to_var (decl, id);
4987 else
4988 return copy_decl_no_change (decl, id);
4991 /* Return a copy of the function's argument tree. */
4992 static tree
4993 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4994 bitmap args_to_skip, tree *vars)
4996 tree arg, *parg;
4997 tree new_parm = NULL;
4998 int i = 0;
5000 parg = &new_parm;
5002 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5003 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5005 tree new_tree = remap_decl (arg, id);
5006 if (TREE_CODE (new_tree) != PARM_DECL)
5007 new_tree = id->copy_decl (arg, id);
5008 lang_hooks.dup_lang_specific_decl (new_tree);
5009 *parg = new_tree;
5010 parg = &DECL_CHAIN (new_tree);
5012 else if (!pointer_map_contains (id->decl_map, arg))
5014 /* Make an equivalent VAR_DECL. If the argument was used
5015 as temporary variable later in function, the uses will be
5016 replaced by local variable. */
5017 tree var = copy_decl_to_var (arg, id);
5018 insert_decl_map (id, arg, var);
5019 /* Declare this new variable. */
5020 DECL_CHAIN (var) = *vars;
5021 *vars = var;
5023 return new_parm;
5026 /* Return a copy of the function's static chain. */
5027 static tree
5028 copy_static_chain (tree static_chain, copy_body_data * id)
5030 tree *chain_copy, *pvar;
5032 chain_copy = &static_chain;
5033 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5035 tree new_tree = remap_decl (*pvar, id);
5036 lang_hooks.dup_lang_specific_decl (new_tree);
5037 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5038 *pvar = new_tree;
5040 return static_chain;
5043 /* Return true if the function is allowed to be versioned.
5044 This is a guard for the versioning functionality. */
5046 bool
5047 tree_versionable_function_p (tree fndecl)
5049 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5050 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5053 /* Delete all unreachable basic blocks and update callgraph.
5054 Doing so is somewhat nontrivial because we need to update all clones and
5055 remove inline function that become unreachable. */
5057 static bool
5058 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5060 bool changed = false;
5061 basic_block b, next_bb;
5063 find_unreachable_blocks ();
5065 /* Delete all unreachable basic blocks. */
5067 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
5069 next_bb = b->next_bb;
5071 if (!(b->flags & BB_REACHABLE))
5073 gimple_stmt_iterator bsi;
5075 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5077 struct cgraph_edge *e;
5078 struct cgraph_node *node;
5080 ipa_remove_stmt_references (id->dst_node, gsi_stmt (bsi));
5082 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5083 &&(e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
5085 if (!e->inline_failed)
5086 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5087 else
5088 cgraph_remove_edge (e);
5090 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5091 && id->dst_node->clones)
5092 for (node = id->dst_node->clones; node != id->dst_node;)
5094 ipa_remove_stmt_references (node, gsi_stmt (bsi));
5095 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5096 && (e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
5098 if (!e->inline_failed)
5099 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5100 else
5101 cgraph_remove_edge (e);
5104 if (node->clones)
5105 node = node->clones;
5106 else if (node->next_sibling_clone)
5107 node = node->next_sibling_clone;
5108 else
5110 while (node != id->dst_node && !node->next_sibling_clone)
5111 node = node->clone_of;
5112 if (node != id->dst_node)
5113 node = node->next_sibling_clone;
5117 delete_basic_block (b);
5118 changed = true;
5122 return changed;
5125 /* Update clone info after duplication. */
5127 static void
5128 update_clone_info (copy_body_data * id)
5130 struct cgraph_node *node;
5131 if (!id->dst_node->clones)
5132 return;
5133 for (node = id->dst_node->clones; node != id->dst_node;)
5135 /* First update replace maps to match the new body. */
5136 if (node->clone.tree_map)
5138 unsigned int i;
5139 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5141 struct ipa_replace_map *replace_info;
5142 replace_info = (*node->clone.tree_map)[i];
5143 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5144 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5147 if (node->clones)
5148 node = node->clones;
5149 else if (node->next_sibling_clone)
5150 node = node->next_sibling_clone;
5151 else
5153 while (node != id->dst_node && !node->next_sibling_clone)
5154 node = node->clone_of;
5155 if (node != id->dst_node)
5156 node = node->next_sibling_clone;
5161 /* Create a copy of a function's tree.
5162 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5163 of the original function and the new copied function
5164 respectively. In case we want to replace a DECL
5165 tree with another tree while duplicating the function's
5166 body, TREE_MAP represents the mapping between these
5167 trees. If UPDATE_CLONES is set, the call_stmt fields
5168 of edges of clones of the function will be updated.
5170 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5171 from new version.
5172 If SKIP_RETURN is true, the new version will return void.
5173 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5174 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5176 void
5177 tree_function_versioning (tree old_decl, tree new_decl,
5178 vec<ipa_replace_map_p, va_gc> *tree_map,
5179 bool update_clones, bitmap args_to_skip,
5180 bool skip_return, bitmap blocks_to_copy,
5181 basic_block new_entry)
5183 struct cgraph_node *old_version_node;
5184 struct cgraph_node *new_version_node;
5185 copy_body_data id;
5186 tree p;
5187 unsigned i;
5188 struct ipa_replace_map *replace_info;
5189 basic_block old_entry_block, bb;
5190 vec<gimple> init_stmts;
5191 init_stmts.create (10);
5192 tree vars = NULL_TREE;
5194 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5195 && TREE_CODE (new_decl) == FUNCTION_DECL);
5196 DECL_POSSIBLY_INLINED (old_decl) = 1;
5198 old_version_node = cgraph_get_node (old_decl);
5199 gcc_checking_assert (old_version_node);
5200 new_version_node = cgraph_get_node (new_decl);
5201 gcc_checking_assert (new_version_node);
5203 /* Copy over debug args. */
5204 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5206 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5207 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5208 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5209 old_debug_args = decl_debug_args_lookup (old_decl);
5210 if (old_debug_args)
5212 new_debug_args = decl_debug_args_insert (new_decl);
5213 *new_debug_args = vec_safe_copy (*old_debug_args);
5217 /* Output the inlining info for this abstract function, since it has been
5218 inlined. If we don't do this now, we can lose the information about the
5219 variables in the function when the blocks get blown away as soon as we
5220 remove the cgraph node. */
5221 (*debug_hooks->outlining_inline_function) (old_decl);
5223 DECL_ARTIFICIAL (new_decl) = 1;
5224 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5225 if (DECL_ORIGIN (old_decl) == old_decl)
5226 old_version_node->used_as_abstract_origin = true;
5227 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5229 /* Prepare the data structures for the tree copy. */
5230 memset (&id, 0, sizeof (id));
5232 /* Generate a new name for the new version. */
5233 id.statements_to_fold = pointer_set_create ();
5235 id.decl_map = pointer_map_create ();
5236 id.debug_map = NULL;
5237 id.src_fn = old_decl;
5238 id.dst_fn = new_decl;
5239 id.src_node = old_version_node;
5240 id.dst_node = new_version_node;
5241 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5242 id.blocks_to_copy = blocks_to_copy;
5243 if (id.src_node->ipa_transforms_to_apply.exists ())
5245 vec<ipa_opt_pass> old_transforms_to_apply
5246 = id.dst_node->ipa_transforms_to_apply;
5247 unsigned int i;
5249 id.dst_node->ipa_transforms_to_apply
5250 = id.src_node->ipa_transforms_to_apply.copy ();
5251 for (i = 0; i < old_transforms_to_apply.length (); i++)
5252 id.dst_node->ipa_transforms_to_apply.safe_push (old_transforms_to_apply[i]);
5253 old_transforms_to_apply.release ();
5256 id.copy_decl = copy_decl_no_change;
5257 id.transform_call_graph_edges
5258 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5259 id.transform_new_cfg = true;
5260 id.transform_return_to_modify = false;
5261 id.transform_parameter = false;
5262 id.transform_lang_insert_block = NULL;
5264 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5265 (DECL_STRUCT_FUNCTION (old_decl));
5266 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5267 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5268 initialize_cfun (new_decl, old_decl,
5269 old_entry_block->count);
5270 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5271 = id.src_cfun->gimple_df->ipa_pta;
5273 /* Copy the function's static chain. */
5274 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5275 if (p)
5276 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5277 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5278 &id);
5280 /* If there's a tree_map, prepare for substitution. */
5281 if (tree_map)
5282 for (i = 0; i < tree_map->length (); i++)
5284 gimple init;
5285 replace_info = (*tree_map)[i];
5286 if (replace_info->replace_p)
5288 if (!replace_info->old_tree)
5290 int i = replace_info->parm_num;
5291 tree parm;
5292 tree req_type;
5294 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5295 i --;
5296 replace_info->old_tree = parm;
5297 req_type = TREE_TYPE (parm);
5298 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5300 if (fold_convertible_p (req_type, replace_info->new_tree))
5301 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5302 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5303 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5304 else
5306 if (dump_file)
5308 fprintf (dump_file, " const ");
5309 print_generic_expr (dump_file, replace_info->new_tree, 0);
5310 fprintf (dump_file, " can't be converted to param ");
5311 print_generic_expr (dump_file, parm, 0);
5312 fprintf (dump_file, "\n");
5314 replace_info->old_tree = NULL;
5318 else
5319 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5320 if (replace_info->old_tree)
5322 init = setup_one_parameter (&id, replace_info->old_tree,
5323 replace_info->new_tree, id.src_fn,
5324 NULL,
5325 &vars);
5326 if (init)
5327 init_stmts.safe_push (init);
5331 /* Copy the function's arguments. */
5332 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5333 DECL_ARGUMENTS (new_decl) =
5334 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5335 args_to_skip, &vars);
5337 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5338 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5340 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5342 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5343 /* Add local vars. */
5344 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5346 if (DECL_RESULT (old_decl) == NULL_TREE)
5348 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5350 DECL_RESULT (new_decl)
5351 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5352 RESULT_DECL, NULL_TREE, void_type_node);
5353 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5354 cfun->returns_struct = 0;
5355 cfun->returns_pcc_struct = 0;
5357 else
5359 tree old_name;
5360 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5361 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5362 if (gimple_in_ssa_p (id.src_cfun)
5363 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5364 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5366 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5367 insert_decl_map (&id, old_name, new_name);
5368 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5369 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5373 /* Set up the destination functions loop tree. */
5374 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5376 cfun->curr_properties &= ~PROP_loops;
5377 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5378 cfun->curr_properties |= PROP_loops;
5381 /* Copy the Function's body. */
5382 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5383 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, new_entry);
5385 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5386 number_blocks (new_decl);
5388 /* We want to create the BB unconditionally, so that the addition of
5389 debug stmts doesn't affect BB count, which may in the end cause
5390 codegen differences. */
5391 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5392 while (init_stmts.length ())
5393 insert_init_stmt (&id, bb, init_stmts.pop ());
5394 update_clone_info (&id);
5396 /* Remap the nonlocal_goto_save_area, if any. */
5397 if (cfun->nonlocal_goto_save_area)
5399 struct walk_stmt_info wi;
5401 memset (&wi, 0, sizeof (wi));
5402 wi.info = &id;
5403 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5406 /* Clean up. */
5407 pointer_map_destroy (id.decl_map);
5408 if (id.debug_map)
5409 pointer_map_destroy (id.debug_map);
5410 free_dominance_info (CDI_DOMINATORS);
5411 free_dominance_info (CDI_POST_DOMINATORS);
5413 fold_marked_statements (0, id.statements_to_fold);
5414 pointer_set_destroy (id.statements_to_fold);
5415 fold_cond_expr_cond ();
5416 delete_unreachable_blocks_update_callgraph (&id);
5417 if (id.dst_node->definition)
5418 cgraph_rebuild_references ();
5419 update_ssa (TODO_update_ssa);
5421 /* After partial cloning we need to rescale frequencies, so they are
5422 within proper range in the cloned function. */
5423 if (new_entry)
5425 struct cgraph_edge *e;
5426 rebuild_frequencies ();
5428 new_version_node->count = ENTRY_BLOCK_PTR->count;
5429 for (e = new_version_node->callees; e; e = e->next_callee)
5431 basic_block bb = gimple_bb (e->call_stmt);
5432 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5433 bb);
5434 e->count = bb->count;
5436 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5438 basic_block bb = gimple_bb (e->call_stmt);
5439 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5440 bb);
5441 e->count = bb->count;
5445 free_dominance_info (CDI_DOMINATORS);
5446 free_dominance_info (CDI_POST_DOMINATORS);
5448 gcc_assert (!id.debug_stmts.exists ());
5449 init_stmts.release ();
5450 pop_cfun ();
5451 return;
5454 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5455 the callee and return the inlined body on success. */
5457 tree
5458 maybe_inline_call_in_expr (tree exp)
5460 tree fn = get_callee_fndecl (exp);
5462 /* We can only try to inline "const" functions. */
5463 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5465 struct pointer_map_t *decl_map = pointer_map_create ();
5466 call_expr_arg_iterator iter;
5467 copy_body_data id;
5468 tree param, arg, t;
5470 /* Remap the parameters. */
5471 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5472 param;
5473 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5474 *pointer_map_insert (decl_map, param) = arg;
5476 memset (&id, 0, sizeof (id));
5477 id.src_fn = fn;
5478 id.dst_fn = current_function_decl;
5479 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5480 id.decl_map = decl_map;
5482 id.copy_decl = copy_decl_no_change;
5483 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5484 id.transform_new_cfg = false;
5485 id.transform_return_to_modify = true;
5486 id.transform_parameter = true;
5487 id.transform_lang_insert_block = NULL;
5489 /* Make sure not to unshare trees behind the front-end's back
5490 since front-end specific mechanisms may rely on sharing. */
5491 id.regimplify = false;
5492 id.do_not_unshare = true;
5494 /* We're not inside any EH region. */
5495 id.eh_lp_nr = 0;
5497 t = copy_tree_body (&id);
5498 pointer_map_destroy (decl_map);
5500 /* We can only return something suitable for use in a GENERIC
5501 expression tree. */
5502 if (TREE_CODE (t) == MODIFY_EXPR)
5503 return TREE_OPERAND (t, 1);
5506 return NULL_TREE;
5509 /* Duplicate a type, fields and all. */
5511 tree
5512 build_duplicate_type (tree type)
5514 struct copy_body_data id;
5516 memset (&id, 0, sizeof (id));
5517 id.src_fn = current_function_decl;
5518 id.dst_fn = current_function_decl;
5519 id.src_cfun = cfun;
5520 id.decl_map = pointer_map_create ();
5521 id.debug_map = NULL;
5522 id.copy_decl = copy_decl_no_change;
5524 type = remap_type_1 (type, &id);
5526 pointer_map_destroy (id.decl_map);
5527 if (id.debug_map)
5528 pointer_map_destroy (id.debug_map);
5530 TYPE_CANONICAL (type) = type;
5532 return type;