* friend.c (make_friend_class): Handle template template parameters.
[official-gcc.git] / gcc / tree-inline.c
blob20d33173a6ddde3b05c9273a16bb2fc216b3fa6e
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
3 2012 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "diagnostic-core.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "insn-config.h"
33 #include "hashtab.h"
34 #include "langhooks.h"
35 #include "basic-block.h"
36 #include "tree-iterator.h"
37 #include "cgraph.h"
38 #include "intl.h"
39 #include "tree-mudflap.h"
40 #include "tree-flow.h"
41 #include "function.h"
42 #include "tree-flow.h"
43 #include "tree-pretty-print.h"
44 #include "except.h"
45 #include "debug.h"
46 #include "pointer-set.h"
47 #include "ipa-prop.h"
48 #include "value-prof.h"
49 #include "tree-pass.h"
50 #include "target.h"
52 #include "rtl.h" /* FIXME: For asm_str_count. */
54 /* I'm not real happy about this, but we need to handle gimple and
55 non-gimple trees. */
56 #include "gimple.h"
58 /* Inlining, Cloning, Versioning, Parallelization
60 Inlining: a function body is duplicated, but the PARM_DECLs are
61 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
62 MODIFY_EXPRs that store to a dedicated returned-value variable.
63 The duplicated eh_region info of the copy will later be appended
64 to the info for the caller; the eh_region info in copied throwing
65 statements and RESX statements are adjusted accordingly.
67 Cloning: (only in C++) We have one body for a con/de/structor, and
68 multiple function decls, each with a unique parameter list.
69 Duplicate the body, using the given splay tree; some parameters
70 will become constants (like 0 or 1).
72 Versioning: a function body is duplicated and the result is a new
73 function rather than into blocks of an existing function as with
74 inlining. Some parameters will become constants.
76 Parallelization: a region of a function is duplicated resulting in
77 a new function. Variables may be replaced with complex expressions
78 to enable shared variable semantics.
80 All of these will simultaneously lookup any callgraph edges. If
81 we're going to inline the duplicated function body, and the given
82 function has some cloned callgraph nodes (one for each place this
83 function will be inlined) those callgraph edges will be duplicated.
84 If we're cloning the body, those callgraph edges will be
85 updated to point into the new body. (Note that the original
86 callgraph node and edge list will not be altered.)
88 See the CALL_EXPR handling case in copy_tree_body_r (). */
90 /* To Do:
92 o In order to make inlining-on-trees work, we pessimized
93 function-local static constants. In particular, they are now
94 always output, even when not addressed. Fix this by treating
95 function-local static constants just like global static
96 constants; the back-end already knows not to output them if they
97 are not needed.
99 o Provide heuristics to clamp inlining of recursive template
100 calls? */
103 /* Weights that estimate_num_insns uses to estimate the size of the
104 produced code. */
106 eni_weights eni_size_weights;
108 /* Weights that estimate_num_insns uses to estimate the time necessary
109 to execute the produced code. */
111 eni_weights eni_time_weights;
113 /* Prototypes. */
115 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
116 static void remap_block (tree *, copy_body_data *);
117 static void copy_bind_expr (tree *, int *, copy_body_data *);
118 static tree mark_local_for_remap_r (tree *, int *, void *);
119 static void unsave_expr_1 (tree);
120 static tree unsave_r (tree *, int *, void *);
121 static void declare_inline_vars (tree, tree);
122 static void remap_save_expr (tree *, void *, int *);
123 static void prepend_lexical_block (tree current_block, tree new_block);
124 static tree copy_decl_to_var (tree, copy_body_data *);
125 static tree copy_result_decl_to_var (tree, copy_body_data *);
126 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
127 static gimple remap_gimple_stmt (gimple, copy_body_data *);
128 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
130 /* Insert a tree->tree mapping for ID. Despite the name suggests
131 that the trees should be variables, it is used for more than that. */
133 void
134 insert_decl_map (copy_body_data *id, tree key, tree value)
136 *pointer_map_insert (id->decl_map, key) = value;
138 /* Always insert an identity map as well. If we see this same new
139 node again, we won't want to duplicate it a second time. */
140 if (key != value)
141 *pointer_map_insert (id->decl_map, value) = value;
144 /* Insert a tree->tree mapping for ID. This is only used for
145 variables. */
147 static void
148 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
150 if (!gimple_in_ssa_p (id->src_cfun))
151 return;
153 if (!MAY_HAVE_DEBUG_STMTS)
154 return;
156 if (!target_for_debug_bind (key))
157 return;
159 gcc_assert (TREE_CODE (key) == PARM_DECL);
160 gcc_assert (TREE_CODE (value) == VAR_DECL);
162 if (!id->debug_map)
163 id->debug_map = pointer_map_create ();
165 *pointer_map_insert (id->debug_map, key) = value;
168 /* If nonzero, we're remapping the contents of inlined debug
169 statements. If negative, an error has occurred, such as a
170 reference to a variable that isn't available in the inlined
171 context. */
172 static int processing_debug_stmt = 0;
174 /* Construct new SSA name for old NAME. ID is the inline context. */
176 static tree
177 remap_ssa_name (tree name, copy_body_data *id)
179 tree new_tree, var;
180 tree *n;
182 gcc_assert (TREE_CODE (name) == SSA_NAME);
184 n = (tree *) pointer_map_contains (id->decl_map, name);
185 if (n)
186 return unshare_expr (*n);
188 if (processing_debug_stmt)
190 if (SSA_NAME_IS_DEFAULT_DEF (name)
191 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
192 && id->entry_bb == NULL
193 && single_succ_p (ENTRY_BLOCK_PTR))
195 tree vexpr = make_node (DEBUG_EXPR_DECL);
196 gimple def_temp;
197 gimple_stmt_iterator gsi;
198 tree val = SSA_NAME_VAR (name);
200 n = (tree *) pointer_map_contains (id->decl_map, val);
201 if (n != NULL)
202 val = *n;
203 if (TREE_CODE (val) != PARM_DECL)
205 processing_debug_stmt = -1;
206 return name;
208 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
209 DECL_ARTIFICIAL (vexpr) = 1;
210 TREE_TYPE (vexpr) = TREE_TYPE (name);
211 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
212 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
213 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
214 return vexpr;
217 processing_debug_stmt = -1;
218 return name;
221 /* Remap anonymous SSA names or SSA names of anonymous decls. */
222 var = SSA_NAME_VAR (name);
223 if (!var
224 || (!SSA_NAME_IS_DEFAULT_DEF (name)
225 && TREE_CODE (var) == VAR_DECL
226 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
227 && DECL_ARTIFICIAL (var)
228 && DECL_IGNORED_P (var)
229 && !DECL_NAME (var)))
231 struct ptr_info_def *pi;
232 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id), NULL);
233 if (!var && SSA_NAME_IDENTIFIER (name))
234 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
235 insert_decl_map (id, name, new_tree);
236 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
237 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
238 /* At least IPA points-to info can be directly transferred. */
239 if (id->src_cfun->gimple_df
240 && id->src_cfun->gimple_df->ipa_pta
241 && (pi = SSA_NAME_PTR_INFO (name))
242 && !pi->pt.anything)
244 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
245 new_pi->pt = pi->pt;
247 return new_tree;
250 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
251 in copy_bb. */
252 new_tree = remap_decl (var, id);
254 /* We might've substituted constant or another SSA_NAME for
255 the variable.
257 Replace the SSA name representing RESULT_DECL by variable during
258 inlining: this saves us from need to introduce PHI node in a case
259 return value is just partly initialized. */
260 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
261 && (!SSA_NAME_VAR (name)
262 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
263 || !id->transform_return_to_modify))
265 struct ptr_info_def *pi;
266 new_tree = make_ssa_name (new_tree, NULL);
267 insert_decl_map (id, name, new_tree);
268 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
269 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
270 /* At least IPA points-to info can be directly transferred. */
271 if (id->src_cfun->gimple_df
272 && id->src_cfun->gimple_df->ipa_pta
273 && (pi = SSA_NAME_PTR_INFO (name))
274 && !pi->pt.anything)
276 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
277 new_pi->pt = pi->pt;
279 if (SSA_NAME_IS_DEFAULT_DEF (name))
281 /* By inlining function having uninitialized variable, we might
282 extend the lifetime (variable might get reused). This cause
283 ICE in the case we end up extending lifetime of SSA name across
284 abnormal edge, but also increase register pressure.
286 We simply initialize all uninitialized vars by 0 except
287 for case we are inlining to very first BB. We can avoid
288 this for all BBs that are not inside strongly connected
289 regions of the CFG, but this is expensive to test. */
290 if (id->entry_bb
291 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
292 && (!SSA_NAME_VAR (name)
293 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
294 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
295 || EDGE_COUNT (id->entry_bb->preds) != 1))
297 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
298 gimple init_stmt;
299 tree zero = build_zero_cst (TREE_TYPE (new_tree));
301 init_stmt = gimple_build_assign (new_tree, zero);
302 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
303 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
305 else
307 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
308 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
312 else
313 insert_decl_map (id, name, new_tree);
314 return new_tree;
317 /* Remap DECL during the copying of the BLOCK tree for the function. */
319 tree
320 remap_decl (tree decl, copy_body_data *id)
322 tree *n;
324 /* We only remap local variables in the current function. */
326 /* See if we have remapped this declaration. */
328 n = (tree *) pointer_map_contains (id->decl_map, decl);
330 if (!n && processing_debug_stmt)
332 processing_debug_stmt = -1;
333 return decl;
336 /* If we didn't already have an equivalent for this declaration,
337 create one now. */
338 if (!n)
340 /* Make a copy of the variable or label. */
341 tree t = id->copy_decl (decl, id);
343 /* Remember it, so that if we encounter this local entity again
344 we can reuse this copy. Do this early because remap_type may
345 need this decl for TYPE_STUB_DECL. */
346 insert_decl_map (id, decl, t);
348 if (!DECL_P (t))
349 return t;
351 /* Remap types, if necessary. */
352 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
353 if (TREE_CODE (t) == TYPE_DECL)
354 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
356 /* Remap sizes as necessary. */
357 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
358 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
360 /* If fields, do likewise for offset and qualifier. */
361 if (TREE_CODE (t) == FIELD_DECL)
363 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
364 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
365 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
368 return t;
371 if (id->do_not_unshare)
372 return *n;
373 else
374 return unshare_expr (*n);
377 static tree
378 remap_type_1 (tree type, copy_body_data *id)
380 tree new_tree, t;
382 /* We do need a copy. build and register it now. If this is a pointer or
383 reference type, remap the designated type and make a new pointer or
384 reference type. */
385 if (TREE_CODE (type) == POINTER_TYPE)
387 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
388 TYPE_MODE (type),
389 TYPE_REF_CAN_ALIAS_ALL (type));
390 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
391 new_tree = build_type_attribute_qual_variant (new_tree,
392 TYPE_ATTRIBUTES (type),
393 TYPE_QUALS (type));
394 insert_decl_map (id, type, new_tree);
395 return new_tree;
397 else if (TREE_CODE (type) == REFERENCE_TYPE)
399 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
400 TYPE_MODE (type),
401 TYPE_REF_CAN_ALIAS_ALL (type));
402 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
403 new_tree = build_type_attribute_qual_variant (new_tree,
404 TYPE_ATTRIBUTES (type),
405 TYPE_QUALS (type));
406 insert_decl_map (id, type, new_tree);
407 return new_tree;
409 else
410 new_tree = copy_node (type);
412 insert_decl_map (id, type, new_tree);
414 /* This is a new type, not a copy of an old type. Need to reassociate
415 variants. We can handle everything except the main variant lazily. */
416 t = TYPE_MAIN_VARIANT (type);
417 if (type != t)
419 t = remap_type (t, id);
420 TYPE_MAIN_VARIANT (new_tree) = t;
421 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
422 TYPE_NEXT_VARIANT (t) = new_tree;
424 else
426 TYPE_MAIN_VARIANT (new_tree) = new_tree;
427 TYPE_NEXT_VARIANT (new_tree) = NULL;
430 if (TYPE_STUB_DECL (type))
431 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
433 /* Lazily create pointer and reference types. */
434 TYPE_POINTER_TO (new_tree) = NULL;
435 TYPE_REFERENCE_TO (new_tree) = NULL;
437 switch (TREE_CODE (new_tree))
439 case INTEGER_TYPE:
440 case REAL_TYPE:
441 case FIXED_POINT_TYPE:
442 case ENUMERAL_TYPE:
443 case BOOLEAN_TYPE:
444 t = TYPE_MIN_VALUE (new_tree);
445 if (t && TREE_CODE (t) != INTEGER_CST)
446 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
448 t = TYPE_MAX_VALUE (new_tree);
449 if (t && TREE_CODE (t) != INTEGER_CST)
450 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
451 return new_tree;
453 case FUNCTION_TYPE:
454 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
455 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
456 return new_tree;
458 case ARRAY_TYPE:
459 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
460 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
461 break;
463 case RECORD_TYPE:
464 case UNION_TYPE:
465 case QUAL_UNION_TYPE:
467 tree f, nf = NULL;
469 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
471 t = remap_decl (f, id);
472 DECL_CONTEXT (t) = new_tree;
473 DECL_CHAIN (t) = nf;
474 nf = t;
476 TYPE_FIELDS (new_tree) = nreverse (nf);
478 break;
480 case OFFSET_TYPE:
481 default:
482 /* Shouldn't have been thought variable sized. */
483 gcc_unreachable ();
486 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
487 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
489 return new_tree;
492 tree
493 remap_type (tree type, copy_body_data *id)
495 tree *node;
496 tree tmp;
498 if (type == NULL)
499 return type;
501 /* See if we have remapped this type. */
502 node = (tree *) pointer_map_contains (id->decl_map, type);
503 if (node)
504 return *node;
506 /* The type only needs remapping if it's variably modified. */
507 if (! variably_modified_type_p (type, id->src_fn))
509 insert_decl_map (id, type, type);
510 return type;
513 id->remapping_type_depth++;
514 tmp = remap_type_1 (type, id);
515 id->remapping_type_depth--;
517 return tmp;
520 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
522 static bool
523 can_be_nonlocal (tree decl, copy_body_data *id)
525 /* We can not duplicate function decls. */
526 if (TREE_CODE (decl) == FUNCTION_DECL)
527 return true;
529 /* Local static vars must be non-local or we get multiple declaration
530 problems. */
531 if (TREE_CODE (decl) == VAR_DECL
532 && !auto_var_in_fn_p (decl, id->src_fn))
533 return true;
535 return false;
538 static tree
539 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
541 tree old_var;
542 tree new_decls = NULL_TREE;
544 /* Remap its variables. */
545 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
547 tree new_var;
549 if (can_be_nonlocal (old_var, id))
551 /* We need to add this variable to the local decls as otherwise
552 nothing else will do so. */
553 if (TREE_CODE (old_var) == VAR_DECL
554 && ! DECL_EXTERNAL (old_var))
555 add_local_decl (cfun, old_var);
556 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
557 && !DECL_IGNORED_P (old_var)
558 && nonlocalized_list)
559 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
560 continue;
563 /* Remap the variable. */
564 new_var = remap_decl (old_var, id);
566 /* If we didn't remap this variable, we can't mess with its
567 TREE_CHAIN. If we remapped this variable to the return slot, it's
568 already declared somewhere else, so don't declare it here. */
570 if (new_var == id->retvar)
572 else if (!new_var)
574 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
575 && !DECL_IGNORED_P (old_var)
576 && nonlocalized_list)
577 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
579 else
581 gcc_assert (DECL_P (new_var));
582 DECL_CHAIN (new_var) = new_decls;
583 new_decls = new_var;
585 /* Also copy value-expressions. */
586 if (TREE_CODE (new_var) == VAR_DECL
587 && DECL_HAS_VALUE_EXPR_P (new_var))
589 tree tem = DECL_VALUE_EXPR (new_var);
590 bool old_regimplify = id->regimplify;
591 id->remapping_type_depth++;
592 walk_tree (&tem, copy_tree_body_r, id, NULL);
593 id->remapping_type_depth--;
594 id->regimplify = old_regimplify;
595 SET_DECL_VALUE_EXPR (new_var, tem);
600 return nreverse (new_decls);
603 /* Copy the BLOCK to contain remapped versions of the variables
604 therein. And hook the new block into the block-tree. */
606 static void
607 remap_block (tree *block, copy_body_data *id)
609 tree old_block;
610 tree new_block;
612 /* Make the new block. */
613 old_block = *block;
614 new_block = make_node (BLOCK);
615 TREE_USED (new_block) = TREE_USED (old_block);
616 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
617 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
618 BLOCK_NONLOCALIZED_VARS (new_block)
619 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
620 *block = new_block;
622 /* Remap its variables. */
623 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
624 &BLOCK_NONLOCALIZED_VARS (new_block),
625 id);
627 if (id->transform_lang_insert_block)
628 id->transform_lang_insert_block (new_block);
630 /* Remember the remapped block. */
631 insert_decl_map (id, old_block, new_block);
634 /* Copy the whole block tree and root it in id->block. */
635 static tree
636 remap_blocks (tree block, copy_body_data *id)
638 tree t;
639 tree new_tree = block;
641 if (!block)
642 return NULL;
644 remap_block (&new_tree, id);
645 gcc_assert (new_tree != block);
646 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
647 prepend_lexical_block (new_tree, remap_blocks (t, id));
648 /* Blocks are in arbitrary order, but make things slightly prettier and do
649 not swap order when producing a copy. */
650 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
651 return new_tree;
654 static void
655 copy_statement_list (tree *tp)
657 tree_stmt_iterator oi, ni;
658 tree new_tree;
660 new_tree = alloc_stmt_list ();
661 ni = tsi_start (new_tree);
662 oi = tsi_start (*tp);
663 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
664 *tp = new_tree;
666 for (; !tsi_end_p (oi); tsi_next (&oi))
668 tree stmt = tsi_stmt (oi);
669 if (TREE_CODE (stmt) == STATEMENT_LIST)
670 /* This copy is not redundant; tsi_link_after will smash this
671 STATEMENT_LIST into the end of the one we're building, and we
672 don't want to do that with the original. */
673 copy_statement_list (&stmt);
674 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
678 static void
679 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
681 tree block = BIND_EXPR_BLOCK (*tp);
682 /* Copy (and replace) the statement. */
683 copy_tree_r (tp, walk_subtrees, NULL);
684 if (block)
686 remap_block (&block, id);
687 BIND_EXPR_BLOCK (*tp) = block;
690 if (BIND_EXPR_VARS (*tp))
691 /* This will remap a lot of the same decls again, but this should be
692 harmless. */
693 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
697 /* Create a new gimple_seq by remapping all the statements in BODY
698 using the inlining information in ID. */
700 static gimple_seq
701 remap_gimple_seq (gimple_seq body, copy_body_data *id)
703 gimple_stmt_iterator si;
704 gimple_seq new_body = NULL;
706 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
708 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
709 gimple_seq_add_stmt (&new_body, new_stmt);
712 return new_body;
716 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
717 block using the mapping information in ID. */
719 static gimple
720 copy_gimple_bind (gimple stmt, copy_body_data *id)
722 gimple new_bind;
723 tree new_block, new_vars;
724 gimple_seq body, new_body;
726 /* Copy the statement. Note that we purposely don't use copy_stmt
727 here because we need to remap statements as we copy. */
728 body = gimple_bind_body (stmt);
729 new_body = remap_gimple_seq (body, id);
731 new_block = gimple_bind_block (stmt);
732 if (new_block)
733 remap_block (&new_block, id);
735 /* This will remap a lot of the same decls again, but this should be
736 harmless. */
737 new_vars = gimple_bind_vars (stmt);
738 if (new_vars)
739 new_vars = remap_decls (new_vars, NULL, id);
741 new_bind = gimple_build_bind (new_vars, new_body, new_block);
743 return new_bind;
747 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
748 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
749 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
750 recursing into the children nodes of *TP. */
752 static tree
753 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
755 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
756 copy_body_data *id = (copy_body_data *) wi_p->info;
757 tree fn = id->src_fn;
759 if (TREE_CODE (*tp) == SSA_NAME)
761 *tp = remap_ssa_name (*tp, id);
762 *walk_subtrees = 0;
763 return NULL;
765 else if (auto_var_in_fn_p (*tp, fn))
767 /* Local variables and labels need to be replaced by equivalent
768 variables. We don't want to copy static variables; there's
769 only one of those, no matter how many times we inline the
770 containing function. Similarly for globals from an outer
771 function. */
772 tree new_decl;
774 /* Remap the declaration. */
775 new_decl = remap_decl (*tp, id);
776 gcc_assert (new_decl);
777 /* Replace this variable with the copy. */
778 STRIP_TYPE_NOPS (new_decl);
779 /* ??? The C++ frontend uses void * pointer zero to initialize
780 any other type. This confuses the middle-end type verification.
781 As cloned bodies do not go through gimplification again the fixup
782 there doesn't trigger. */
783 if (TREE_CODE (new_decl) == INTEGER_CST
784 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
785 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
786 *tp = new_decl;
787 *walk_subtrees = 0;
789 else if (TREE_CODE (*tp) == STATEMENT_LIST)
790 gcc_unreachable ();
791 else if (TREE_CODE (*tp) == SAVE_EXPR)
792 gcc_unreachable ();
793 else if (TREE_CODE (*tp) == LABEL_DECL
794 && (!DECL_CONTEXT (*tp)
795 || decl_function_context (*tp) == id->src_fn))
796 /* These may need to be remapped for EH handling. */
797 *tp = remap_decl (*tp, id);
798 else if (TREE_CODE (*tp) == FIELD_DECL)
800 /* If the enclosing record type is variably_modified_type_p, the field
801 has already been remapped. Otherwise, it need not be. */
802 tree *n = (tree *) pointer_map_contains (id->decl_map, *tp);
803 if (n)
804 *tp = *n;
805 *walk_subtrees = 0;
807 else if (TYPE_P (*tp))
808 /* Types may need remapping as well. */
809 *tp = remap_type (*tp, id);
810 else if (CONSTANT_CLASS_P (*tp))
812 /* If this is a constant, we have to copy the node iff the type
813 will be remapped. copy_tree_r will not copy a constant. */
814 tree new_type = remap_type (TREE_TYPE (*tp), id);
816 if (new_type == TREE_TYPE (*tp))
817 *walk_subtrees = 0;
819 else if (TREE_CODE (*tp) == INTEGER_CST)
820 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
821 TREE_INT_CST_HIGH (*tp));
822 else
824 *tp = copy_node (*tp);
825 TREE_TYPE (*tp) = new_type;
828 else
830 /* Otherwise, just copy the node. Note that copy_tree_r already
831 knows not to copy VAR_DECLs, etc., so this is safe. */
833 /* We should never have TREE_BLOCK set on non-statements. */
834 if (EXPR_P (*tp))
835 gcc_assert (!TREE_BLOCK (*tp));
837 if (TREE_CODE (*tp) == MEM_REF)
839 tree ptr = TREE_OPERAND (*tp, 0);
840 tree type = remap_type (TREE_TYPE (*tp), id);
841 tree old = *tp;
843 /* We need to re-canonicalize MEM_REFs from inline substitutions
844 that can happen when a pointer argument is an ADDR_EXPR.
845 Recurse here manually to allow that. */
846 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
847 *tp = fold_build2 (MEM_REF, type,
848 ptr, TREE_OPERAND (*tp, 1));
849 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
850 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
851 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
852 *walk_subtrees = 0;
853 return NULL;
856 /* Here is the "usual case". Copy this tree node, and then
857 tweak some special cases. */
858 copy_tree_r (tp, walk_subtrees, NULL);
860 if (TREE_CODE (*tp) != OMP_CLAUSE)
861 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
863 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
865 /* The copied TARGET_EXPR has never been expanded, even if the
866 original node was expanded already. */
867 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
868 TREE_OPERAND (*tp, 3) = NULL_TREE;
870 else if (TREE_CODE (*tp) == ADDR_EXPR)
872 /* Variable substitution need not be simple. In particular,
873 the MEM_REF substitution above. Make sure that
874 TREE_CONSTANT and friends are up-to-date. But make sure
875 to not improperly set TREE_BLOCK on some sub-expressions. */
876 int invariant = is_gimple_min_invariant (*tp);
877 tree block = id->block;
878 id->block = NULL_TREE;
879 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
880 id->block = block;
881 recompute_tree_invariant_for_addr_expr (*tp);
883 /* If this used to be invariant, but is not any longer,
884 then regimplification is probably needed. */
885 if (invariant && !is_gimple_min_invariant (*tp))
886 id->regimplify = true;
888 *walk_subtrees = 0;
892 /* Keep iterating. */
893 return NULL_TREE;
897 /* Called from copy_body_id via walk_tree. DATA is really a
898 `copy_body_data *'. */
900 tree
901 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
903 copy_body_data *id = (copy_body_data *) data;
904 tree fn = id->src_fn;
905 tree new_block;
907 /* Begin by recognizing trees that we'll completely rewrite for the
908 inlining context. Our output for these trees is completely
909 different from out input (e.g. RETURN_EXPR is deleted, and morphs
910 into an edge). Further down, we'll handle trees that get
911 duplicated and/or tweaked. */
913 /* When requested, RETURN_EXPRs should be transformed to just the
914 contained MODIFY_EXPR. The branch semantics of the return will
915 be handled elsewhere by manipulating the CFG rather than a statement. */
916 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
918 tree assignment = TREE_OPERAND (*tp, 0);
920 /* If we're returning something, just turn that into an
921 assignment into the equivalent of the original RESULT_DECL.
922 If the "assignment" is just the result decl, the result
923 decl has already been set (e.g. a recent "foo (&result_decl,
924 ...)"); just toss the entire RETURN_EXPR. */
925 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
927 /* Replace the RETURN_EXPR with (a copy of) the
928 MODIFY_EXPR hanging underneath. */
929 *tp = copy_node (assignment);
931 else /* Else the RETURN_EXPR returns no value. */
933 *tp = NULL;
934 return (tree) (void *)1;
937 else if (TREE_CODE (*tp) == SSA_NAME)
939 *tp = remap_ssa_name (*tp, id);
940 *walk_subtrees = 0;
941 return NULL;
944 /* Local variables and labels need to be replaced by equivalent
945 variables. We don't want to copy static variables; there's only
946 one of those, no matter how many times we inline the containing
947 function. Similarly for globals from an outer function. */
948 else if (auto_var_in_fn_p (*tp, fn))
950 tree new_decl;
952 /* Remap the declaration. */
953 new_decl = remap_decl (*tp, id);
954 gcc_assert (new_decl);
955 /* Replace this variable with the copy. */
956 STRIP_TYPE_NOPS (new_decl);
957 *tp = new_decl;
958 *walk_subtrees = 0;
960 else if (TREE_CODE (*tp) == STATEMENT_LIST)
961 copy_statement_list (tp);
962 else if (TREE_CODE (*tp) == SAVE_EXPR
963 || TREE_CODE (*tp) == TARGET_EXPR)
964 remap_save_expr (tp, id->decl_map, walk_subtrees);
965 else if (TREE_CODE (*tp) == LABEL_DECL
966 && (! DECL_CONTEXT (*tp)
967 || decl_function_context (*tp) == id->src_fn))
968 /* These may need to be remapped for EH handling. */
969 *tp = remap_decl (*tp, id);
970 else if (TREE_CODE (*tp) == BIND_EXPR)
971 copy_bind_expr (tp, walk_subtrees, id);
972 /* Types may need remapping as well. */
973 else if (TYPE_P (*tp))
974 *tp = remap_type (*tp, id);
976 /* If this is a constant, we have to copy the node iff the type will be
977 remapped. copy_tree_r will not copy a constant. */
978 else if (CONSTANT_CLASS_P (*tp))
980 tree new_type = remap_type (TREE_TYPE (*tp), id);
982 if (new_type == TREE_TYPE (*tp))
983 *walk_subtrees = 0;
985 else if (TREE_CODE (*tp) == INTEGER_CST)
986 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
987 TREE_INT_CST_HIGH (*tp));
988 else
990 *tp = copy_node (*tp);
991 TREE_TYPE (*tp) = new_type;
995 /* Otherwise, just copy the node. Note that copy_tree_r already
996 knows not to copy VAR_DECLs, etc., so this is safe. */
997 else
999 /* Here we handle trees that are not completely rewritten.
1000 First we detect some inlining-induced bogosities for
1001 discarding. */
1002 if (TREE_CODE (*tp) == MODIFY_EXPR
1003 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1004 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1006 /* Some assignments VAR = VAR; don't generate any rtl code
1007 and thus don't count as variable modification. Avoid
1008 keeping bogosities like 0 = 0. */
1009 tree decl = TREE_OPERAND (*tp, 0), value;
1010 tree *n;
1012 n = (tree *) pointer_map_contains (id->decl_map, decl);
1013 if (n)
1015 value = *n;
1016 STRIP_TYPE_NOPS (value);
1017 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1019 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1020 return copy_tree_body_r (tp, walk_subtrees, data);
1024 else if (TREE_CODE (*tp) == INDIRECT_REF)
1026 /* Get rid of *& from inline substitutions that can happen when a
1027 pointer argument is an ADDR_EXPR. */
1028 tree decl = TREE_OPERAND (*tp, 0);
1029 tree *n;
1031 n = (tree *) pointer_map_contains (id->decl_map, decl);
1032 if (n)
1034 tree new_tree;
1035 tree old;
1036 /* If we happen to get an ADDR_EXPR in n->value, strip
1037 it manually here as we'll eventually get ADDR_EXPRs
1038 which lie about their types pointed to. In this case
1039 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1040 but we absolutely rely on that. As fold_indirect_ref
1041 does other useful transformations, try that first, though. */
1042 tree type = TREE_TYPE (TREE_TYPE (*n));
1043 if (id->do_not_unshare)
1044 new_tree = *n;
1045 else
1046 new_tree = unshare_expr (*n);
1047 old = *tp;
1048 *tp = gimple_fold_indirect_ref (new_tree);
1049 if (! *tp)
1051 if (TREE_CODE (new_tree) == ADDR_EXPR)
1053 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1054 type, new_tree);
1055 /* ??? We should either assert here or build
1056 a VIEW_CONVERT_EXPR instead of blindly leaking
1057 incompatible types to our IL. */
1058 if (! *tp)
1059 *tp = TREE_OPERAND (new_tree, 0);
1061 else
1063 *tp = build1 (INDIRECT_REF, type, new_tree);
1064 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1065 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1066 TREE_READONLY (*tp) = TREE_READONLY (old);
1067 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
1070 *walk_subtrees = 0;
1071 return NULL;
1074 else if (TREE_CODE (*tp) == MEM_REF)
1076 /* We need to re-canonicalize MEM_REFs from inline substitutions
1077 that can happen when a pointer argument is an ADDR_EXPR. */
1078 tree decl = TREE_OPERAND (*tp, 0);
1079 tree *n;
1081 n = (tree *) pointer_map_contains (id->decl_map, decl);
1082 if (n)
1084 tree old = *tp;
1085 *tp = fold_build2 (MEM_REF, TREE_TYPE (*tp),
1086 unshare_expr (*n), TREE_OPERAND (*tp, 1));
1087 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1088 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1089 *walk_subtrees = 0;
1090 return NULL;
1094 /* Here is the "usual case". Copy this tree node, and then
1095 tweak some special cases. */
1096 copy_tree_r (tp, walk_subtrees, NULL);
1098 /* If EXPR has block defined, map it to newly constructed block.
1099 When inlining we want EXPRs without block appear in the block
1100 of function call if we are not remapping a type. */
1101 if (EXPR_P (*tp))
1103 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1104 if (TREE_BLOCK (*tp))
1106 tree *n;
1107 n = (tree *) pointer_map_contains (id->decl_map,
1108 TREE_BLOCK (*tp));
1109 gcc_assert (n || id->remapping_type_depth != 0);
1110 if (n)
1111 new_block = *n;
1113 TREE_BLOCK (*tp) = new_block;
1116 if (TREE_CODE (*tp) != OMP_CLAUSE)
1117 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1119 /* The copied TARGET_EXPR has never been expanded, even if the
1120 original node was expanded already. */
1121 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1123 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1124 TREE_OPERAND (*tp, 3) = NULL_TREE;
1127 /* Variable substitution need not be simple. In particular, the
1128 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1129 and friends are up-to-date. */
1130 else if (TREE_CODE (*tp) == ADDR_EXPR)
1132 int invariant = is_gimple_min_invariant (*tp);
1133 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1135 /* Handle the case where we substituted an INDIRECT_REF
1136 into the operand of the ADDR_EXPR. */
1137 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1138 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1139 else
1140 recompute_tree_invariant_for_addr_expr (*tp);
1142 /* If this used to be invariant, but is not any longer,
1143 then regimplification is probably needed. */
1144 if (invariant && !is_gimple_min_invariant (*tp))
1145 id->regimplify = true;
1147 *walk_subtrees = 0;
1151 /* Keep iterating. */
1152 return NULL_TREE;
1155 /* Helper for remap_gimple_stmt. Given an EH region number for the
1156 source function, map that to the duplicate EH region number in
1157 the destination function. */
1159 static int
1160 remap_eh_region_nr (int old_nr, copy_body_data *id)
1162 eh_region old_r, new_r;
1163 void **slot;
1165 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1166 slot = pointer_map_contains (id->eh_map, old_r);
1167 new_r = (eh_region) *slot;
1169 return new_r->index;
1172 /* Similar, but operate on INTEGER_CSTs. */
1174 static tree
1175 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1177 int old_nr, new_nr;
1179 old_nr = tree_low_cst (old_t_nr, 0);
1180 new_nr = remap_eh_region_nr (old_nr, id);
1182 return build_int_cst (integer_type_node, new_nr);
1185 /* Helper for copy_bb. Remap statement STMT using the inlining
1186 information in ID. Return the new statement copy. */
1188 static gimple
1189 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1191 gimple copy = NULL;
1192 struct walk_stmt_info wi;
1193 tree new_block;
1194 bool skip_first = false;
1196 /* Begin by recognizing trees that we'll completely rewrite for the
1197 inlining context. Our output for these trees is completely
1198 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1199 into an edge). Further down, we'll handle trees that get
1200 duplicated and/or tweaked. */
1202 /* When requested, GIMPLE_RETURNs should be transformed to just the
1203 contained GIMPLE_ASSIGN. The branch semantics of the return will
1204 be handled elsewhere by manipulating the CFG rather than the
1205 statement. */
1206 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1208 tree retval = gimple_return_retval (stmt);
1210 /* If we're returning something, just turn that into an
1211 assignment into the equivalent of the original RESULT_DECL.
1212 If RETVAL is just the result decl, the result decl has
1213 already been set (e.g. a recent "foo (&result_decl, ...)");
1214 just toss the entire GIMPLE_RETURN. */
1215 if (retval
1216 && (TREE_CODE (retval) != RESULT_DECL
1217 && (TREE_CODE (retval) != SSA_NAME
1218 || ! SSA_NAME_VAR (retval)
1219 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1221 copy = gimple_build_assign (id->retvar, retval);
1222 /* id->retvar is already substituted. Skip it on later remapping. */
1223 skip_first = true;
1225 else
1226 return gimple_build_nop ();
1228 else if (gimple_has_substatements (stmt))
1230 gimple_seq s1, s2;
1232 /* When cloning bodies from the C++ front end, we will be handed bodies
1233 in High GIMPLE form. Handle here all the High GIMPLE statements that
1234 have embedded statements. */
1235 switch (gimple_code (stmt))
1237 case GIMPLE_BIND:
1238 copy = copy_gimple_bind (stmt, id);
1239 break;
1241 case GIMPLE_CATCH:
1242 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1243 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1244 break;
1246 case GIMPLE_EH_FILTER:
1247 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1248 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1249 break;
1251 case GIMPLE_TRY:
1252 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1253 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1254 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1255 break;
1257 case GIMPLE_WITH_CLEANUP_EXPR:
1258 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1259 copy = gimple_build_wce (s1);
1260 break;
1262 case GIMPLE_OMP_PARALLEL:
1263 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1264 copy = gimple_build_omp_parallel
1265 (s1,
1266 gimple_omp_parallel_clauses (stmt),
1267 gimple_omp_parallel_child_fn (stmt),
1268 gimple_omp_parallel_data_arg (stmt));
1269 break;
1271 case GIMPLE_OMP_TASK:
1272 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1273 copy = gimple_build_omp_task
1274 (s1,
1275 gimple_omp_task_clauses (stmt),
1276 gimple_omp_task_child_fn (stmt),
1277 gimple_omp_task_data_arg (stmt),
1278 gimple_omp_task_copy_fn (stmt),
1279 gimple_omp_task_arg_size (stmt),
1280 gimple_omp_task_arg_align (stmt));
1281 break;
1283 case GIMPLE_OMP_FOR:
1284 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1285 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1286 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1287 gimple_omp_for_collapse (stmt), s2);
1289 size_t i;
1290 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1292 gimple_omp_for_set_index (copy, i,
1293 gimple_omp_for_index (stmt, i));
1294 gimple_omp_for_set_initial (copy, i,
1295 gimple_omp_for_initial (stmt, i));
1296 gimple_omp_for_set_final (copy, i,
1297 gimple_omp_for_final (stmt, i));
1298 gimple_omp_for_set_incr (copy, i,
1299 gimple_omp_for_incr (stmt, i));
1300 gimple_omp_for_set_cond (copy, i,
1301 gimple_omp_for_cond (stmt, i));
1304 break;
1306 case GIMPLE_OMP_MASTER:
1307 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1308 copy = gimple_build_omp_master (s1);
1309 break;
1311 case GIMPLE_OMP_ORDERED:
1312 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1313 copy = gimple_build_omp_ordered (s1);
1314 break;
1316 case GIMPLE_OMP_SECTION:
1317 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1318 copy = gimple_build_omp_section (s1);
1319 break;
1321 case GIMPLE_OMP_SECTIONS:
1322 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1323 copy = gimple_build_omp_sections
1324 (s1, gimple_omp_sections_clauses (stmt));
1325 break;
1327 case GIMPLE_OMP_SINGLE:
1328 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1329 copy = gimple_build_omp_single
1330 (s1, gimple_omp_single_clauses (stmt));
1331 break;
1333 case GIMPLE_OMP_CRITICAL:
1334 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1335 copy
1336 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1337 break;
1339 case GIMPLE_TRANSACTION:
1340 s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1341 copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1342 gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1343 break;
1345 default:
1346 gcc_unreachable ();
1349 else
1351 if (gimple_assign_copy_p (stmt)
1352 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1353 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1355 /* Here we handle statements that are not completely rewritten.
1356 First we detect some inlining-induced bogosities for
1357 discarding. */
1359 /* Some assignments VAR = VAR; don't generate any rtl code
1360 and thus don't count as variable modification. Avoid
1361 keeping bogosities like 0 = 0. */
1362 tree decl = gimple_assign_lhs (stmt), value;
1363 tree *n;
1365 n = (tree *) pointer_map_contains (id->decl_map, decl);
1366 if (n)
1368 value = *n;
1369 STRIP_TYPE_NOPS (value);
1370 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1371 return gimple_build_nop ();
1375 if (gimple_debug_bind_p (stmt))
1377 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1378 gimple_debug_bind_get_value (stmt),
1379 stmt);
1380 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1381 return copy;
1383 if (gimple_debug_source_bind_p (stmt))
1385 copy = gimple_build_debug_source_bind
1386 (gimple_debug_source_bind_get_var (stmt),
1387 gimple_debug_source_bind_get_value (stmt), stmt);
1388 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1389 return copy;
1392 /* Create a new deep copy of the statement. */
1393 copy = gimple_copy (stmt);
1395 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1396 RESX and EH_DISPATCH. */
1397 if (id->eh_map)
1398 switch (gimple_code (copy))
1400 case GIMPLE_CALL:
1402 tree r, fndecl = gimple_call_fndecl (copy);
1403 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1404 switch (DECL_FUNCTION_CODE (fndecl))
1406 case BUILT_IN_EH_COPY_VALUES:
1407 r = gimple_call_arg (copy, 1);
1408 r = remap_eh_region_tree_nr (r, id);
1409 gimple_call_set_arg (copy, 1, r);
1410 /* FALLTHRU */
1412 case BUILT_IN_EH_POINTER:
1413 case BUILT_IN_EH_FILTER:
1414 r = gimple_call_arg (copy, 0);
1415 r = remap_eh_region_tree_nr (r, id);
1416 gimple_call_set_arg (copy, 0, r);
1417 break;
1419 default:
1420 break;
1423 /* Reset alias info if we didn't apply measures to
1424 keep it valid over inlining by setting DECL_PT_UID. */
1425 if (!id->src_cfun->gimple_df
1426 || !id->src_cfun->gimple_df->ipa_pta)
1427 gimple_call_reset_alias_info (copy);
1429 break;
1431 case GIMPLE_RESX:
1433 int r = gimple_resx_region (copy);
1434 r = remap_eh_region_nr (r, id);
1435 gimple_resx_set_region (copy, r);
1437 break;
1439 case GIMPLE_EH_DISPATCH:
1441 int r = gimple_eh_dispatch_region (copy);
1442 r = remap_eh_region_nr (r, id);
1443 gimple_eh_dispatch_set_region (copy, r);
1445 break;
1447 default:
1448 break;
1452 /* If STMT has a block defined, map it to the newly constructed
1453 block. When inlining we want statements without a block to
1454 appear in the block of the function call. */
1455 new_block = id->block;
1456 if (gimple_block (copy))
1458 tree *n;
1459 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1460 gcc_assert (n);
1461 new_block = *n;
1464 gimple_set_block (copy, new_block);
1466 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1467 return copy;
1469 /* Remap all the operands in COPY. */
1470 memset (&wi, 0, sizeof (wi));
1471 wi.info = id;
1472 if (skip_first)
1473 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1474 else
1475 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1477 /* Clear the copied virtual operands. We are not remapping them here
1478 but are going to recreate them from scratch. */
1479 if (gimple_has_mem_ops (copy))
1481 gimple_set_vdef (copy, NULL_TREE);
1482 gimple_set_vuse (copy, NULL_TREE);
1485 return copy;
1489 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1490 later */
1492 static basic_block
1493 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1494 gcov_type count_scale)
1496 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1497 basic_block copy_basic_block;
1498 tree decl;
1499 gcov_type freq;
1500 basic_block prev;
1502 /* Search for previous copied basic block. */
1503 prev = bb->prev_bb;
1504 while (!prev->aux)
1505 prev = prev->prev_bb;
1507 /* create_basic_block() will append every new block to
1508 basic_block_info automatically. */
1509 copy_basic_block = create_basic_block (NULL, (void *) 0,
1510 (basic_block) prev->aux);
1511 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1513 /* We are going to rebuild frequencies from scratch. These values
1514 have just small importance to drive canonicalize_loop_headers. */
1515 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1517 /* We recompute frequencies after inlining, so this is quite safe. */
1518 if (freq > BB_FREQ_MAX)
1519 freq = BB_FREQ_MAX;
1520 copy_basic_block->frequency = freq;
1522 copy_gsi = gsi_start_bb (copy_basic_block);
1524 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1526 gimple stmt = gsi_stmt (gsi);
1527 gimple orig_stmt = stmt;
1529 id->regimplify = false;
1530 stmt = remap_gimple_stmt (stmt, id);
1531 if (gimple_nop_p (stmt))
1532 continue;
1534 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1535 seq_gsi = copy_gsi;
1537 /* With return slot optimization we can end up with
1538 non-gimple (foo *)&this->m, fix that here. */
1539 if (is_gimple_assign (stmt)
1540 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1541 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1543 tree new_rhs;
1544 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1545 gimple_assign_rhs1 (stmt),
1546 true, NULL, false,
1547 GSI_CONTINUE_LINKING);
1548 gimple_assign_set_rhs1 (stmt, new_rhs);
1549 id->regimplify = false;
1552 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1554 if (id->regimplify)
1555 gimple_regimplify_operands (stmt, &seq_gsi);
1557 /* If copy_basic_block has been empty at the start of this iteration,
1558 call gsi_start_bb again to get at the newly added statements. */
1559 if (gsi_end_p (copy_gsi))
1560 copy_gsi = gsi_start_bb (copy_basic_block);
1561 else
1562 gsi_next (&copy_gsi);
1564 /* Process the new statement. The call to gimple_regimplify_operands
1565 possibly turned the statement into multiple statements, we
1566 need to process all of them. */
1569 tree fn;
1571 stmt = gsi_stmt (copy_gsi);
1572 if (is_gimple_call (stmt)
1573 && gimple_call_va_arg_pack_p (stmt)
1574 && id->gimple_call)
1576 /* __builtin_va_arg_pack () should be replaced by
1577 all arguments corresponding to ... in the caller. */
1578 tree p;
1579 gimple new_call;
1580 VEC(tree, heap) *argarray;
1581 size_t nargs = gimple_call_num_args (id->gimple_call);
1582 size_t n;
1584 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1585 nargs--;
1587 /* Create the new array of arguments. */
1588 n = nargs + gimple_call_num_args (stmt);
1589 argarray = VEC_alloc (tree, heap, n);
1590 VEC_safe_grow (tree, heap, argarray, n);
1592 /* Copy all the arguments before '...' */
1593 memcpy (VEC_address (tree, argarray),
1594 gimple_call_arg_ptr (stmt, 0),
1595 gimple_call_num_args (stmt) * sizeof (tree));
1597 /* Append the arguments passed in '...' */
1598 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1599 gimple_call_arg_ptr (id->gimple_call, 0)
1600 + (gimple_call_num_args (id->gimple_call) - nargs),
1601 nargs * sizeof (tree));
1603 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1604 argarray);
1606 VEC_free (tree, heap, argarray);
1608 /* Copy all GIMPLE_CALL flags, location and block, except
1609 GF_CALL_VA_ARG_PACK. */
1610 gimple_call_copy_flags (new_call, stmt);
1611 gimple_call_set_va_arg_pack (new_call, false);
1612 gimple_set_location (new_call, gimple_location (stmt));
1613 gimple_set_block (new_call, gimple_block (stmt));
1614 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1616 gsi_replace (&copy_gsi, new_call, false);
1617 stmt = new_call;
1619 else if (is_gimple_call (stmt)
1620 && id->gimple_call
1621 && (decl = gimple_call_fndecl (stmt))
1622 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1623 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1625 /* __builtin_va_arg_pack_len () should be replaced by
1626 the number of anonymous arguments. */
1627 size_t nargs = gimple_call_num_args (id->gimple_call);
1628 tree count, p;
1629 gimple new_stmt;
1631 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1632 nargs--;
1634 count = build_int_cst (integer_type_node, nargs);
1635 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1636 gsi_replace (&copy_gsi, new_stmt, false);
1637 stmt = new_stmt;
1640 /* Statements produced by inlining can be unfolded, especially
1641 when we constant propagated some operands. We can't fold
1642 them right now for two reasons:
1643 1) folding require SSA_NAME_DEF_STMTs to be correct
1644 2) we can't change function calls to builtins.
1645 So we just mark statement for later folding. We mark
1646 all new statements, instead just statements that has changed
1647 by some nontrivial substitution so even statements made
1648 foldable indirectly are updated. If this turns out to be
1649 expensive, copy_body can be told to watch for nontrivial
1650 changes. */
1651 if (id->statements_to_fold)
1652 pointer_set_insert (id->statements_to_fold, stmt);
1654 /* We're duplicating a CALL_EXPR. Find any corresponding
1655 callgraph edges and update or duplicate them. */
1656 if (is_gimple_call (stmt))
1658 struct cgraph_edge *edge;
1659 int flags;
1661 switch (id->transform_call_graph_edges)
1663 case CB_CGE_DUPLICATE:
1664 edge = cgraph_edge (id->src_node, orig_stmt);
1665 if (edge)
1667 int edge_freq = edge->frequency;
1668 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1669 gimple_uid (stmt),
1670 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1671 true);
1672 /* We could also just rescale the frequency, but
1673 doing so would introduce roundoff errors and make
1674 verifier unhappy. */
1675 edge->frequency
1676 = compute_call_stmt_bb_frequency (id->dst_node->symbol.decl,
1677 copy_basic_block);
1678 if (dump_file
1679 && profile_status_for_function (cfun) != PROFILE_ABSENT
1680 && (edge_freq > edge->frequency + 10
1681 || edge_freq < edge->frequency - 10))
1683 fprintf (dump_file, "Edge frequency estimated by "
1684 "cgraph %i diverge from inliner's estimate %i\n",
1685 edge_freq,
1686 edge->frequency);
1687 fprintf (dump_file,
1688 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1689 bb->index,
1690 bb->frequency,
1691 copy_basic_block->frequency);
1693 stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
1695 break;
1697 case CB_CGE_MOVE_CLONES:
1698 cgraph_set_call_stmt_including_clones (id->dst_node,
1699 orig_stmt, stmt);
1700 edge = cgraph_edge (id->dst_node, stmt);
1701 break;
1703 case CB_CGE_MOVE:
1704 edge = cgraph_edge (id->dst_node, orig_stmt);
1705 if (edge)
1706 cgraph_set_call_stmt (edge, stmt);
1707 break;
1709 default:
1710 gcc_unreachable ();
1713 /* Constant propagation on argument done during inlining
1714 may create new direct call. Produce an edge for it. */
1715 if ((!edge
1716 || (edge->indirect_inlining_edge
1717 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1718 && id->dst_node->analyzed
1719 && (fn = gimple_call_fndecl (stmt)) != NULL)
1721 struct cgraph_node *dest = cgraph_get_node (fn);
1723 /* We have missing edge in the callgraph. This can happen
1724 when previous inlining turned an indirect call into a
1725 direct call by constant propagating arguments or we are
1726 producing dead clone (for further cloning). In all
1727 other cases we hit a bug (incorrect node sharing is the
1728 most common reason for missing edges). */
1729 gcc_assert (!dest->analyzed
1730 || dest->symbol.address_taken
1731 || !id->src_node->analyzed
1732 || !id->dst_node->analyzed);
1733 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1734 cgraph_create_edge_including_clones
1735 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1736 compute_call_stmt_bb_frequency (id->dst_node->symbol.decl,
1737 copy_basic_block),
1738 CIF_ORIGINALLY_INDIRECT_CALL);
1739 else
1740 cgraph_create_edge (id->dst_node, dest, stmt,
1741 bb->count,
1742 compute_call_stmt_bb_frequency
1743 (id->dst_node->symbol.decl,
1744 copy_basic_block))->inline_failed
1745 = CIF_ORIGINALLY_INDIRECT_CALL;
1746 if (dump_file)
1748 fprintf (dump_file, "Created new direct edge to %s\n",
1749 cgraph_node_name (dest));
1753 flags = gimple_call_flags (stmt);
1754 if (flags & ECF_MAY_BE_ALLOCA)
1755 cfun->calls_alloca = true;
1756 if (flags & ECF_RETURNS_TWICE)
1757 cfun->calls_setjmp = true;
1760 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1761 id->eh_map, id->eh_lp_nr);
1763 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1765 ssa_op_iter i;
1766 tree def;
1768 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1769 if (TREE_CODE (def) == SSA_NAME)
1770 SSA_NAME_DEF_STMT (def) = stmt;
1773 gsi_next (&copy_gsi);
1775 while (!gsi_end_p (copy_gsi));
1777 copy_gsi = gsi_last_bb (copy_basic_block);
1780 return copy_basic_block;
1783 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1784 form is quite easy, since dominator relationship for old basic blocks does
1785 not change.
1787 There is however exception where inlining might change dominator relation
1788 across EH edges from basic block within inlined functions destinating
1789 to landing pads in function we inline into.
1791 The function fills in PHI_RESULTs of such PHI nodes if they refer
1792 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1793 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1794 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1795 set, and this means that there will be no overlapping live ranges
1796 for the underlying symbol.
1798 This might change in future if we allow redirecting of EH edges and
1799 we might want to change way build CFG pre-inlining to include
1800 all the possible edges then. */
1801 static void
1802 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1803 bool can_throw, bool nonlocal_goto)
1805 edge e;
1806 edge_iterator ei;
1808 FOR_EACH_EDGE (e, ei, bb->succs)
1809 if (!e->dest->aux
1810 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1812 gimple phi;
1813 gimple_stmt_iterator si;
1815 if (!nonlocal_goto)
1816 gcc_assert (e->flags & EDGE_EH);
1818 if (!can_throw)
1819 gcc_assert (!(e->flags & EDGE_EH));
1821 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1823 edge re;
1825 phi = gsi_stmt (si);
1827 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1828 gcc_assert (!e->dest->aux);
1830 gcc_assert ((e->flags & EDGE_EH)
1831 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1833 if (virtual_operand_p (PHI_RESULT (phi)))
1835 mark_virtual_operands_for_renaming (cfun);
1836 continue;
1839 re = find_edge (ret_bb, e->dest);
1840 gcc_assert (re);
1841 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1842 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1844 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1845 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1851 /* Copy edges from BB into its copy constructed earlier, scale profile
1852 accordingly. Edges will be taken care of later. Assume aux
1853 pointers to point to the copies of each BB. Return true if any
1854 debug stmts are left after a statement that must end the basic block. */
1856 static bool
1857 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1859 basic_block new_bb = (basic_block) bb->aux;
1860 edge_iterator ei;
1861 edge old_edge;
1862 gimple_stmt_iterator si;
1863 int flags;
1864 bool need_debug_cleanup = false;
1866 /* Use the indices from the original blocks to create edges for the
1867 new ones. */
1868 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1869 if (!(old_edge->flags & EDGE_EH))
1871 edge new_edge;
1873 flags = old_edge->flags;
1875 /* Return edges do get a FALLTHRU flag when the get inlined. */
1876 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1877 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1878 flags |= EDGE_FALLTHRU;
1879 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1880 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1881 new_edge->probability = old_edge->probability;
1884 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1885 return false;
1887 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1889 gimple copy_stmt;
1890 bool can_throw, nonlocal_goto;
1892 copy_stmt = gsi_stmt (si);
1893 if (!is_gimple_debug (copy_stmt))
1894 update_stmt (copy_stmt);
1896 /* Do this before the possible split_block. */
1897 gsi_next (&si);
1899 /* If this tree could throw an exception, there are two
1900 cases where we need to add abnormal edge(s): the
1901 tree wasn't in a region and there is a "current
1902 region" in the caller; or the original tree had
1903 EH edges. In both cases split the block after the tree,
1904 and add abnormal edge(s) as needed; we need both
1905 those from the callee and the caller.
1906 We check whether the copy can throw, because the const
1907 propagation can change an INDIRECT_REF which throws
1908 into a COMPONENT_REF which doesn't. If the copy
1909 can throw, the original could also throw. */
1910 can_throw = stmt_can_throw_internal (copy_stmt);
1911 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1913 if (can_throw || nonlocal_goto)
1915 if (!gsi_end_p (si))
1917 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1918 gsi_next (&si);
1919 if (gsi_end_p (si))
1920 need_debug_cleanup = true;
1922 if (!gsi_end_p (si))
1923 /* Note that bb's predecessor edges aren't necessarily
1924 right at this point; split_block doesn't care. */
1926 edge e = split_block (new_bb, copy_stmt);
1928 new_bb = e->dest;
1929 new_bb->aux = e->src->aux;
1930 si = gsi_start_bb (new_bb);
1934 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1935 make_eh_dispatch_edges (copy_stmt);
1936 else if (can_throw)
1937 make_eh_edges (copy_stmt);
1939 if (nonlocal_goto)
1940 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1942 if ((can_throw || nonlocal_goto)
1943 && gimple_in_ssa_p (cfun))
1944 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1945 can_throw, nonlocal_goto);
1947 return need_debug_cleanup;
1950 /* Copy the PHIs. All blocks and edges are copied, some blocks
1951 was possibly split and new outgoing EH edges inserted.
1952 BB points to the block of original function and AUX pointers links
1953 the original and newly copied blocks. */
1955 static void
1956 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1958 basic_block const new_bb = (basic_block) bb->aux;
1959 edge_iterator ei;
1960 gimple phi;
1961 gimple_stmt_iterator si;
1962 edge new_edge;
1963 bool inserted = false;
1965 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
1967 tree res, new_res;
1968 gimple new_phi;
1970 phi = gsi_stmt (si);
1971 res = PHI_RESULT (phi);
1972 new_res = res;
1973 if (!virtual_operand_p (res))
1975 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1976 new_phi = create_phi_node (new_res, new_bb);
1977 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1979 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
1980 tree arg;
1981 tree new_arg;
1982 tree block = id->block;
1983 edge_iterator ei2;
1985 /* When doing partial cloning, we allow PHIs on the entry block
1986 as long as all the arguments are the same. Find any input
1987 edge to see argument to copy. */
1988 if (!old_edge)
1989 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
1990 if (!old_edge->src->aux)
1991 break;
1993 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1994 new_arg = arg;
1995 id->block = NULL_TREE;
1996 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1997 id->block = block;
1998 gcc_assert (new_arg);
1999 /* With return slot optimization we can end up with
2000 non-gimple (foo *)&this->m, fix that here. */
2001 if (TREE_CODE (new_arg) != SSA_NAME
2002 && TREE_CODE (new_arg) != FUNCTION_DECL
2003 && !is_gimple_val (new_arg))
2005 gimple_seq stmts = NULL;
2006 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2007 gsi_insert_seq_on_edge (new_edge, stmts);
2008 inserted = true;
2010 add_phi_arg (new_phi, new_arg, new_edge,
2011 gimple_phi_arg_location_from_edge (phi, old_edge));
2016 /* Commit the delayed edge insertions. */
2017 if (inserted)
2018 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2019 gsi_commit_one_edge_insert (new_edge, NULL);
2023 /* Wrapper for remap_decl so it can be used as a callback. */
2025 static tree
2026 remap_decl_1 (tree decl, void *data)
2028 return remap_decl (decl, (copy_body_data *) data);
2031 /* Build struct function and associated datastructures for the new clone
2032 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
2034 static void
2035 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2037 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2038 gcov_type count_scale;
2040 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2041 count_scale = (REG_BR_PROB_BASE * count
2042 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2043 else
2044 count_scale = REG_BR_PROB_BASE;
2046 /* Register specific tree functions. */
2047 gimple_register_cfg_hooks ();
2049 /* Get clean struct function. */
2050 push_struct_function (new_fndecl);
2052 /* We will rebuild these, so just sanity check that they are empty. */
2053 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2054 gcc_assert (cfun->local_decls == NULL);
2055 gcc_assert (cfun->cfg == NULL);
2056 gcc_assert (cfun->decl == new_fndecl);
2058 /* Copy items we preserve during cloning. */
2059 cfun->static_chain_decl = src_cfun->static_chain_decl;
2060 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2061 cfun->function_end_locus = src_cfun->function_end_locus;
2062 cfun->curr_properties = src_cfun->curr_properties & ~PROP_loops;
2063 cfun->last_verified = src_cfun->last_verified;
2064 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2065 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2066 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2067 cfun->stdarg = src_cfun->stdarg;
2068 cfun->after_inlining = src_cfun->after_inlining;
2069 cfun->can_throw_non_call_exceptions
2070 = src_cfun->can_throw_non_call_exceptions;
2071 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2072 cfun->returns_struct = src_cfun->returns_struct;
2073 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2075 init_empty_tree_cfg ();
2077 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2078 ENTRY_BLOCK_PTR->count =
2079 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2080 REG_BR_PROB_BASE);
2081 ENTRY_BLOCK_PTR->frequency
2082 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2083 EXIT_BLOCK_PTR->count =
2084 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2085 REG_BR_PROB_BASE);
2086 EXIT_BLOCK_PTR->frequency =
2087 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2088 if (src_cfun->eh)
2089 init_eh_for_function ();
2091 if (src_cfun->gimple_df)
2093 init_tree_ssa (cfun);
2094 cfun->gimple_df->in_ssa_p = true;
2095 init_ssa_operands (cfun);
2097 pop_cfun ();
2100 /* Helper function for copy_cfg_body. Move debug stmts from the end
2101 of NEW_BB to the beginning of successor basic blocks when needed. If the
2102 successor has multiple predecessors, reset them, otherwise keep
2103 their value. */
2105 static void
2106 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2108 edge e;
2109 edge_iterator ei;
2110 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2112 if (gsi_end_p (si)
2113 || gsi_one_before_end_p (si)
2114 || !(stmt_can_throw_internal (gsi_stmt (si))
2115 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2116 return;
2118 FOR_EACH_EDGE (e, ei, new_bb->succs)
2120 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2121 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2122 while (is_gimple_debug (gsi_stmt (ssi)))
2124 gimple stmt = gsi_stmt (ssi), new_stmt;
2125 tree var;
2126 tree value;
2128 /* For the last edge move the debug stmts instead of copying
2129 them. */
2130 if (ei_one_before_end_p (ei))
2132 si = ssi;
2133 gsi_prev (&ssi);
2134 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2135 gimple_debug_bind_reset_value (stmt);
2136 gsi_remove (&si, false);
2137 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2138 continue;
2141 if (gimple_debug_bind_p (stmt))
2143 var = gimple_debug_bind_get_var (stmt);
2144 if (single_pred_p (e->dest))
2146 value = gimple_debug_bind_get_value (stmt);
2147 value = unshare_expr (value);
2149 else
2150 value = NULL_TREE;
2151 new_stmt = gimple_build_debug_bind (var, value, stmt);
2153 else if (gimple_debug_source_bind_p (stmt))
2155 var = gimple_debug_source_bind_get_var (stmt);
2156 value = gimple_debug_source_bind_get_value (stmt);
2157 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2159 else
2160 gcc_unreachable ();
2161 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2162 VEC_safe_push (gimple, heap, id->debug_stmts, new_stmt);
2163 gsi_prev (&ssi);
2168 /* Make a copy of the body of FN so that it can be inserted inline in
2169 another function. Walks FN via CFG, returns new fndecl. */
2171 static tree
2172 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2173 basic_block entry_block_map, basic_block exit_block_map,
2174 bitmap blocks_to_copy, basic_block new_entry)
2176 tree callee_fndecl = id->src_fn;
2177 /* Original cfun for the callee, doesn't change. */
2178 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2179 struct function *cfun_to_copy;
2180 basic_block bb;
2181 tree new_fndecl = NULL;
2182 bool need_debug_cleanup = false;
2183 gcov_type count_scale;
2184 int last;
2185 int incoming_frequency = 0;
2186 gcov_type incoming_count = 0;
2188 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2189 count_scale = (REG_BR_PROB_BASE * count
2190 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2191 else
2192 count_scale = REG_BR_PROB_BASE;
2194 /* Register specific tree functions. */
2195 gimple_register_cfg_hooks ();
2197 /* If we are inlining just region of the function, make sure to connect new entry
2198 to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute
2199 frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
2200 probabilities of edges incoming from nonduplicated region. */
2201 if (new_entry)
2203 edge e;
2204 edge_iterator ei;
2206 FOR_EACH_EDGE (e, ei, new_entry->preds)
2207 if (!e->src->aux)
2209 incoming_frequency += EDGE_FREQUENCY (e);
2210 incoming_count += e->count;
2212 incoming_count = incoming_count * count_scale / REG_BR_PROB_BASE;
2213 incoming_frequency
2214 = incoming_frequency * frequency_scale / REG_BR_PROB_BASE;
2215 ENTRY_BLOCK_PTR->count = incoming_count;
2216 ENTRY_BLOCK_PTR->frequency = incoming_frequency;
2219 /* Must have a CFG here at this point. */
2220 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2221 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2223 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2225 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2226 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2227 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2228 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2230 /* Duplicate any exception-handling regions. */
2231 if (cfun->eh)
2232 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2233 remap_decl_1, id);
2235 /* Use aux pointers to map the original blocks to copy. */
2236 FOR_EACH_BB_FN (bb, cfun_to_copy)
2237 if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index))
2239 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2240 bb->aux = new_bb;
2241 new_bb->aux = bb;
2244 last = last_basic_block;
2246 /* Now that we've duplicated the blocks, duplicate their edges. */
2247 FOR_ALL_BB_FN (bb, cfun_to_copy)
2248 if (!blocks_to_copy
2249 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2250 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map);
2252 if (new_entry)
2254 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2255 e->probability = REG_BR_PROB_BASE;
2256 e->count = incoming_count;
2259 if (gimple_in_ssa_p (cfun))
2260 FOR_ALL_BB_FN (bb, cfun_to_copy)
2261 if (!blocks_to_copy
2262 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2263 copy_phis_for_bb (bb, id);
2265 FOR_ALL_BB_FN (bb, cfun_to_copy)
2266 if (bb->aux)
2268 if (need_debug_cleanup
2269 && bb->index != ENTRY_BLOCK
2270 && bb->index != EXIT_BLOCK)
2271 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2272 ((basic_block)bb->aux)->aux = NULL;
2273 bb->aux = NULL;
2276 /* Zero out AUX fields of newly created block during EH edge
2277 insertion. */
2278 for (; last < last_basic_block; last++)
2280 if (need_debug_cleanup)
2281 maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2282 BASIC_BLOCK (last)->aux = NULL;
2284 entry_block_map->aux = NULL;
2285 exit_block_map->aux = NULL;
2287 if (id->eh_map)
2289 pointer_map_destroy (id->eh_map);
2290 id->eh_map = NULL;
2293 return new_fndecl;
2296 /* Copy the debug STMT using ID. We deal with these statements in a
2297 special way: if any variable in their VALUE expression wasn't
2298 remapped yet, we won't remap it, because that would get decl uids
2299 out of sync, causing codegen differences between -g and -g0. If
2300 this arises, we drop the VALUE expression altogether. */
2302 static void
2303 copy_debug_stmt (gimple stmt, copy_body_data *id)
2305 tree t, *n;
2306 struct walk_stmt_info wi;
2308 t = id->block;
2309 if (gimple_block (stmt))
2311 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2312 if (n)
2313 t = *n;
2315 gimple_set_block (stmt, t);
2317 /* Remap all the operands in COPY. */
2318 memset (&wi, 0, sizeof (wi));
2319 wi.info = id;
2321 processing_debug_stmt = 1;
2323 if (gimple_debug_source_bind_p (stmt))
2324 t = gimple_debug_source_bind_get_var (stmt);
2325 else
2326 t = gimple_debug_bind_get_var (stmt);
2328 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2329 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2331 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2332 t = *n;
2334 else if (TREE_CODE (t) == VAR_DECL
2335 && !is_global_var (t)
2336 && !pointer_map_contains (id->decl_map, t))
2337 /* T is a non-localized variable. */;
2338 else
2339 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2341 if (gimple_debug_bind_p (stmt))
2343 gimple_debug_bind_set_var (stmt, t);
2345 if (gimple_debug_bind_has_value_p (stmt))
2346 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2347 remap_gimple_op_r, &wi, NULL);
2349 /* Punt if any decl couldn't be remapped. */
2350 if (processing_debug_stmt < 0)
2351 gimple_debug_bind_reset_value (stmt);
2353 else if (gimple_debug_source_bind_p (stmt))
2355 gimple_debug_source_bind_set_var (stmt, t);
2356 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2357 remap_gimple_op_r, &wi, NULL);
2360 processing_debug_stmt = 0;
2362 update_stmt (stmt);
2365 /* Process deferred debug stmts. In order to give values better odds
2366 of being successfully remapped, we delay the processing of debug
2367 stmts until all other stmts that might require remapping are
2368 processed. */
2370 static void
2371 copy_debug_stmts (copy_body_data *id)
2373 size_t i;
2374 gimple stmt;
2376 if (!id->debug_stmts)
2377 return;
2379 FOR_EACH_VEC_ELT (gimple, id->debug_stmts, i, stmt)
2380 copy_debug_stmt (stmt, id);
2382 VEC_free (gimple, heap, id->debug_stmts);
2385 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2386 another function. */
2388 static tree
2389 copy_tree_body (copy_body_data *id)
2391 tree fndecl = id->src_fn;
2392 tree body = DECL_SAVED_TREE (fndecl);
2394 walk_tree (&body, copy_tree_body_r, id, NULL);
2396 return body;
2399 /* Make a copy of the body of FN so that it can be inserted inline in
2400 another function. */
2402 static tree
2403 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2404 basic_block entry_block_map, basic_block exit_block_map,
2405 bitmap blocks_to_copy, basic_block new_entry)
2407 tree fndecl = id->src_fn;
2408 tree body;
2410 /* If this body has a CFG, walk CFG and copy. */
2411 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2412 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2413 blocks_to_copy, new_entry);
2414 copy_debug_stmts (id);
2416 return body;
2419 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2420 defined in function FN, or of a data member thereof. */
2422 static bool
2423 self_inlining_addr_expr (tree value, tree fn)
2425 tree var;
2427 if (TREE_CODE (value) != ADDR_EXPR)
2428 return false;
2430 var = get_base_address (TREE_OPERAND (value, 0));
2432 return var && auto_var_in_fn_p (var, fn);
2435 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2436 lexical block and line number information from base_stmt, if given,
2437 or from the last stmt of the block otherwise. */
2439 static gimple
2440 insert_init_debug_bind (copy_body_data *id,
2441 basic_block bb, tree var, tree value,
2442 gimple base_stmt)
2444 gimple note;
2445 gimple_stmt_iterator gsi;
2446 tree tracked_var;
2448 if (!gimple_in_ssa_p (id->src_cfun))
2449 return NULL;
2451 if (!MAY_HAVE_DEBUG_STMTS)
2452 return NULL;
2454 tracked_var = target_for_debug_bind (var);
2455 if (!tracked_var)
2456 return NULL;
2458 if (bb)
2460 gsi = gsi_last_bb (bb);
2461 if (!base_stmt && !gsi_end_p (gsi))
2462 base_stmt = gsi_stmt (gsi);
2465 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2467 if (bb)
2469 if (!gsi_end_p (gsi))
2470 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2471 else
2472 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2475 return note;
2478 static void
2479 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2481 /* If VAR represents a zero-sized variable, it's possible that the
2482 assignment statement may result in no gimple statements. */
2483 if (init_stmt)
2485 gimple_stmt_iterator si = gsi_last_bb (bb);
2487 /* We can end up with init statements that store to a non-register
2488 from a rhs with a conversion. Handle that here by forcing the
2489 rhs into a temporary. gimple_regimplify_operands is not
2490 prepared to do this for us. */
2491 if (!is_gimple_debug (init_stmt)
2492 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2493 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2494 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2496 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2497 gimple_expr_type (init_stmt),
2498 gimple_assign_rhs1 (init_stmt));
2499 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2500 GSI_NEW_STMT);
2501 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2502 gimple_assign_set_rhs1 (init_stmt, rhs);
2504 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2505 gimple_regimplify_operands (init_stmt, &si);
2507 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2509 tree def = gimple_assign_lhs (init_stmt);
2510 insert_init_debug_bind (id, bb, def, def, init_stmt);
2515 /* Initialize parameter P with VALUE. If needed, produce init statement
2516 at the end of BB. When BB is NULL, we return init statement to be
2517 output later. */
2518 static gimple
2519 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2520 basic_block bb, tree *vars)
2522 gimple init_stmt = NULL;
2523 tree var;
2524 tree rhs = value;
2525 tree def = (gimple_in_ssa_p (cfun)
2526 ? ssa_default_def (id->src_cfun, p) : NULL);
2528 if (value
2529 && value != error_mark_node
2530 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2532 /* If we can match up types by promotion/demotion do so. */
2533 if (fold_convertible_p (TREE_TYPE (p), value))
2534 rhs = fold_convert (TREE_TYPE (p), value);
2535 else
2537 /* ??? For valid programs we should not end up here.
2538 Still if we end up with truly mismatched types here, fall back
2539 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2540 GIMPLE to the following passes. */
2541 if (!is_gimple_reg_type (TREE_TYPE (value))
2542 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2543 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2544 else
2545 rhs = build_zero_cst (TREE_TYPE (p));
2549 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2550 here since the type of this decl must be visible to the calling
2551 function. */
2552 var = copy_decl_to_var (p, id);
2554 /* Declare this new variable. */
2555 DECL_CHAIN (var) = *vars;
2556 *vars = var;
2558 /* Make gimplifier happy about this variable. */
2559 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2561 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2562 we would not need to create a new variable here at all, if it
2563 weren't for debug info. Still, we can just use the argument
2564 value. */
2565 if (TREE_READONLY (p)
2566 && !TREE_ADDRESSABLE (p)
2567 && value && !TREE_SIDE_EFFECTS (value)
2568 && !def)
2570 /* We may produce non-gimple trees by adding NOPs or introduce
2571 invalid sharing when operand is not really constant.
2572 It is not big deal to prohibit constant propagation here as
2573 we will constant propagate in DOM1 pass anyway. */
2574 if (is_gimple_min_invariant (value)
2575 && useless_type_conversion_p (TREE_TYPE (p),
2576 TREE_TYPE (value))
2577 /* We have to be very careful about ADDR_EXPR. Make sure
2578 the base variable isn't a local variable of the inlined
2579 function, e.g., when doing recursive inlining, direct or
2580 mutually-recursive or whatever, which is why we don't
2581 just test whether fn == current_function_decl. */
2582 && ! self_inlining_addr_expr (value, fn))
2584 insert_decl_map (id, p, value);
2585 insert_debug_decl_map (id, p, var);
2586 return insert_init_debug_bind (id, bb, var, value, NULL);
2590 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2591 that way, when the PARM_DECL is encountered, it will be
2592 automatically replaced by the VAR_DECL. */
2593 insert_decl_map (id, p, var);
2595 /* Even if P was TREE_READONLY, the new VAR should not be.
2596 In the original code, we would have constructed a
2597 temporary, and then the function body would have never
2598 changed the value of P. However, now, we will be
2599 constructing VAR directly. The constructor body may
2600 change its value multiple times as it is being
2601 constructed. Therefore, it must not be TREE_READONLY;
2602 the back-end assumes that TREE_READONLY variable is
2603 assigned to only once. */
2604 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2605 TREE_READONLY (var) = 0;
2607 /* If there is no setup required and we are in SSA, take the easy route
2608 replacing all SSA names representing the function parameter by the
2609 SSA name passed to function.
2611 We need to construct map for the variable anyway as it might be used
2612 in different SSA names when parameter is set in function.
2614 Do replacement at -O0 for const arguments replaced by constant.
2615 This is important for builtin_constant_p and other construct requiring
2616 constant argument to be visible in inlined function body. */
2617 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2618 && (optimize
2619 || (TREE_READONLY (p)
2620 && is_gimple_min_invariant (rhs)))
2621 && (TREE_CODE (rhs) == SSA_NAME
2622 || is_gimple_min_invariant (rhs))
2623 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2625 insert_decl_map (id, def, rhs);
2626 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2629 /* If the value of argument is never used, don't care about initializing
2630 it. */
2631 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2633 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2634 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2637 /* Initialize this VAR_DECL from the equivalent argument. Convert
2638 the argument to the proper type in case it was promoted. */
2639 if (value)
2641 if (rhs == error_mark_node)
2643 insert_decl_map (id, p, var);
2644 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2647 STRIP_USELESS_TYPE_CONVERSION (rhs);
2649 /* If we are in SSA form properly remap the default definition
2650 or assign to a dummy SSA name if the parameter is unused and
2651 we are not optimizing. */
2652 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2654 if (def)
2656 def = remap_ssa_name (def, id);
2657 init_stmt = gimple_build_assign (def, rhs);
2658 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2659 set_ssa_default_def (cfun, var, NULL);
2661 else if (!optimize)
2663 def = make_ssa_name (var, NULL);
2664 init_stmt = gimple_build_assign (def, rhs);
2667 else
2668 init_stmt = gimple_build_assign (var, rhs);
2670 if (bb && init_stmt)
2671 insert_init_stmt (id, bb, init_stmt);
2673 return init_stmt;
2676 /* Generate code to initialize the parameters of the function at the
2677 top of the stack in ID from the GIMPLE_CALL STMT. */
2679 static void
2680 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2681 tree fn, basic_block bb)
2683 tree parms;
2684 size_t i;
2685 tree p;
2686 tree vars = NULL_TREE;
2687 tree static_chain = gimple_call_chain (stmt);
2689 /* Figure out what the parameters are. */
2690 parms = DECL_ARGUMENTS (fn);
2692 /* Loop through the parameter declarations, replacing each with an
2693 equivalent VAR_DECL, appropriately initialized. */
2694 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2696 tree val;
2697 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2698 setup_one_parameter (id, p, val, fn, bb, &vars);
2700 /* After remapping parameters remap their types. This has to be done
2701 in a second loop over all parameters to appropriately remap
2702 variable sized arrays when the size is specified in a
2703 parameter following the array. */
2704 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2706 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
2707 if (varp
2708 && TREE_CODE (*varp) == VAR_DECL)
2710 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
2711 ? ssa_default_def (id->src_cfun, p) : NULL);
2712 tree var = *varp;
2713 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
2714 /* Also remap the default definition if it was remapped
2715 to the default definition of the parameter replacement
2716 by the parameter setup. */
2717 if (def)
2719 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
2720 if (defp
2721 && TREE_CODE (*defp) == SSA_NAME
2722 && SSA_NAME_VAR (*defp) == var)
2723 TREE_TYPE (*defp) = TREE_TYPE (var);
2728 /* Initialize the static chain. */
2729 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2730 gcc_assert (fn != current_function_decl);
2731 if (p)
2733 /* No static chain? Seems like a bug in tree-nested.c. */
2734 gcc_assert (static_chain);
2736 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2739 declare_inline_vars (id->block, vars);
2743 /* Declare a return variable to replace the RESULT_DECL for the
2744 function we are calling. An appropriate DECL_STMT is returned.
2745 The USE_STMT is filled to contain a use of the declaration to
2746 indicate the return value of the function.
2748 RETURN_SLOT, if non-null is place where to store the result. It
2749 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2750 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2752 The return value is a (possibly null) value that holds the result
2753 as seen by the caller. */
2755 static tree
2756 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2757 basic_block entry_bb)
2759 tree callee = id->src_fn;
2760 tree result = DECL_RESULT (callee);
2761 tree callee_type = TREE_TYPE (result);
2762 tree caller_type;
2763 tree var, use;
2765 /* Handle type-mismatches in the function declaration return type
2766 vs. the call expression. */
2767 if (modify_dest)
2768 caller_type = TREE_TYPE (modify_dest);
2769 else
2770 caller_type = TREE_TYPE (TREE_TYPE (callee));
2772 /* We don't need to do anything for functions that don't return anything. */
2773 if (VOID_TYPE_P (callee_type))
2774 return NULL_TREE;
2776 /* If there was a return slot, then the return value is the
2777 dereferenced address of that object. */
2778 if (return_slot)
2780 /* The front end shouldn't have used both return_slot and
2781 a modify expression. */
2782 gcc_assert (!modify_dest);
2783 if (DECL_BY_REFERENCE (result))
2785 tree return_slot_addr = build_fold_addr_expr (return_slot);
2786 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2788 /* We are going to construct *&return_slot and we can't do that
2789 for variables believed to be not addressable.
2791 FIXME: This check possibly can match, because values returned
2792 via return slot optimization are not believed to have address
2793 taken by alias analysis. */
2794 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2795 var = return_slot_addr;
2797 else
2799 var = return_slot;
2800 gcc_assert (TREE_CODE (var) != SSA_NAME);
2801 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2803 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2804 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2805 && !DECL_GIMPLE_REG_P (result)
2806 && DECL_P (var))
2807 DECL_GIMPLE_REG_P (var) = 0;
2808 use = NULL;
2809 goto done;
2812 /* All types requiring non-trivial constructors should have been handled. */
2813 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2815 /* Attempt to avoid creating a new temporary variable. */
2816 if (modify_dest
2817 && TREE_CODE (modify_dest) != SSA_NAME)
2819 bool use_it = false;
2821 /* We can't use MODIFY_DEST if there's type promotion involved. */
2822 if (!useless_type_conversion_p (callee_type, caller_type))
2823 use_it = false;
2825 /* ??? If we're assigning to a variable sized type, then we must
2826 reuse the destination variable, because we've no good way to
2827 create variable sized temporaries at this point. */
2828 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2829 use_it = true;
2831 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2832 reuse it as the result of the call directly. Don't do this if
2833 it would promote MODIFY_DEST to addressable. */
2834 else if (TREE_ADDRESSABLE (result))
2835 use_it = false;
2836 else
2838 tree base_m = get_base_address (modify_dest);
2840 /* If the base isn't a decl, then it's a pointer, and we don't
2841 know where that's going to go. */
2842 if (!DECL_P (base_m))
2843 use_it = false;
2844 else if (is_global_var (base_m))
2845 use_it = false;
2846 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2847 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2848 && !DECL_GIMPLE_REG_P (result)
2849 && DECL_GIMPLE_REG_P (base_m))
2850 use_it = false;
2851 else if (!TREE_ADDRESSABLE (base_m))
2852 use_it = true;
2855 if (use_it)
2857 var = modify_dest;
2858 use = NULL;
2859 goto done;
2863 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2865 var = copy_result_decl_to_var (result, id);
2866 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2868 /* Do not have the rest of GCC warn about this variable as it should
2869 not be visible to the user. */
2870 TREE_NO_WARNING (var) = 1;
2872 declare_inline_vars (id->block, var);
2874 /* Build the use expr. If the return type of the function was
2875 promoted, convert it back to the expected type. */
2876 use = var;
2877 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2879 /* If we can match up types by promotion/demotion do so. */
2880 if (fold_convertible_p (caller_type, var))
2881 use = fold_convert (caller_type, var);
2882 else
2884 /* ??? For valid programs we should not end up here.
2885 Still if we end up with truly mismatched types here, fall back
2886 to using a MEM_REF to not leak invalid GIMPLE to the following
2887 passes. */
2888 /* Prevent var from being written into SSA form. */
2889 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
2890 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
2891 DECL_GIMPLE_REG_P (var) = false;
2892 else if (is_gimple_reg_type (TREE_TYPE (var)))
2893 TREE_ADDRESSABLE (var) = true;
2894 use = fold_build2 (MEM_REF, caller_type,
2895 build_fold_addr_expr (var),
2896 build_int_cst (ptr_type_node, 0));
2900 STRIP_USELESS_TYPE_CONVERSION (use);
2902 if (DECL_BY_REFERENCE (result))
2904 TREE_ADDRESSABLE (var) = 1;
2905 var = build_fold_addr_expr (var);
2908 done:
2909 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2910 way, when the RESULT_DECL is encountered, it will be
2911 automatically replaced by the VAR_DECL.
2913 When returning by reference, ensure that RESULT_DECL remaps to
2914 gimple_val. */
2915 if (DECL_BY_REFERENCE (result)
2916 && !is_gimple_val (var))
2918 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
2919 insert_decl_map (id, result, temp);
2920 /* When RESULT_DECL is in SSA form, we need to remap and initialize
2921 it's default_def SSA_NAME. */
2922 if (gimple_in_ssa_p (id->src_cfun)
2923 && is_gimple_reg (result))
2925 temp = make_ssa_name (temp, NULL);
2926 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
2928 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
2930 else
2931 insert_decl_map (id, result, var);
2933 /* Remember this so we can ignore it in remap_decls. */
2934 id->retvar = var;
2936 return use;
2939 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2940 to a local label. */
2942 static tree
2943 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2945 tree node = *nodep;
2946 tree fn = (tree) fnp;
2948 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2949 return node;
2951 if (TYPE_P (node))
2952 *walk_subtrees = 0;
2954 return NULL_TREE;
2957 /* Determine if the function can be copied. If so return NULL. If
2958 not return a string describng the reason for failure. */
2960 static const char *
2961 copy_forbidden (struct function *fun, tree fndecl)
2963 const char *reason = fun->cannot_be_copied_reason;
2964 tree decl;
2965 unsigned ix;
2967 /* Only examine the function once. */
2968 if (fun->cannot_be_copied_set)
2969 return reason;
2971 /* We cannot copy a function that receives a non-local goto
2972 because we cannot remap the destination label used in the
2973 function that is performing the non-local goto. */
2974 /* ??? Actually, this should be possible, if we work at it.
2975 No doubt there's just a handful of places that simply
2976 assume it doesn't happen and don't substitute properly. */
2977 if (fun->has_nonlocal_label)
2979 reason = G_("function %q+F can never be copied "
2980 "because it receives a non-local goto");
2981 goto fail;
2984 FOR_EACH_LOCAL_DECL (fun, ix, decl)
2985 if (TREE_CODE (decl) == VAR_DECL
2986 && TREE_STATIC (decl)
2987 && !DECL_EXTERNAL (decl)
2988 && DECL_INITIAL (decl)
2989 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2990 has_label_address_in_static_1,
2991 fndecl))
2993 reason = G_("function %q+F can never be copied because it saves "
2994 "address of local label in a static variable");
2995 goto fail;
2998 fail:
2999 fun->cannot_be_copied_reason = reason;
3000 fun->cannot_be_copied_set = true;
3001 return reason;
3005 static const char *inline_forbidden_reason;
3007 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3008 iff a function can not be inlined. Also sets the reason why. */
3010 static tree
3011 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3012 struct walk_stmt_info *wip)
3014 tree fn = (tree) wip->info;
3015 tree t;
3016 gimple stmt = gsi_stmt (*gsi);
3018 switch (gimple_code (stmt))
3020 case GIMPLE_CALL:
3021 /* Refuse to inline alloca call unless user explicitly forced so as
3022 this may change program's memory overhead drastically when the
3023 function using alloca is called in loop. In GCC present in
3024 SPEC2000 inlining into schedule_block cause it to require 2GB of
3025 RAM instead of 256MB. Don't do so for alloca calls emitted for
3026 VLA objects as those can't cause unbounded growth (they're always
3027 wrapped inside stack_save/stack_restore regions. */
3028 if (gimple_alloca_call_p (stmt)
3029 && !gimple_call_alloca_for_var_p (stmt)
3030 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3032 inline_forbidden_reason
3033 = G_("function %q+F can never be inlined because it uses "
3034 "alloca (override using the always_inline attribute)");
3035 *handled_ops_p = true;
3036 return fn;
3039 t = gimple_call_fndecl (stmt);
3040 if (t == NULL_TREE)
3041 break;
3043 /* We cannot inline functions that call setjmp. */
3044 if (setjmp_call_p (t))
3046 inline_forbidden_reason
3047 = G_("function %q+F can never be inlined because it uses setjmp");
3048 *handled_ops_p = true;
3049 return t;
3052 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3053 switch (DECL_FUNCTION_CODE (t))
3055 /* We cannot inline functions that take a variable number of
3056 arguments. */
3057 case BUILT_IN_VA_START:
3058 case BUILT_IN_NEXT_ARG:
3059 case BUILT_IN_VA_END:
3060 inline_forbidden_reason
3061 = G_("function %q+F can never be inlined because it "
3062 "uses variable argument lists");
3063 *handled_ops_p = true;
3064 return t;
3066 case BUILT_IN_LONGJMP:
3067 /* We can't inline functions that call __builtin_longjmp at
3068 all. The non-local goto machinery really requires the
3069 destination be in a different function. If we allow the
3070 function calling __builtin_longjmp to be inlined into the
3071 function calling __builtin_setjmp, Things will Go Awry. */
3072 inline_forbidden_reason
3073 = G_("function %q+F can never be inlined because "
3074 "it uses setjmp-longjmp exception handling");
3075 *handled_ops_p = true;
3076 return t;
3078 case BUILT_IN_NONLOCAL_GOTO:
3079 /* Similarly. */
3080 inline_forbidden_reason
3081 = G_("function %q+F can never be inlined because "
3082 "it uses non-local goto");
3083 *handled_ops_p = true;
3084 return t;
3086 case BUILT_IN_RETURN:
3087 case BUILT_IN_APPLY_ARGS:
3088 /* If a __builtin_apply_args caller would be inlined,
3089 it would be saving arguments of the function it has
3090 been inlined into. Similarly __builtin_return would
3091 return from the function the inline has been inlined into. */
3092 inline_forbidden_reason
3093 = G_("function %q+F can never be inlined because "
3094 "it uses __builtin_return or __builtin_apply_args");
3095 *handled_ops_p = true;
3096 return t;
3098 default:
3099 break;
3101 break;
3103 case GIMPLE_GOTO:
3104 t = gimple_goto_dest (stmt);
3106 /* We will not inline a function which uses computed goto. The
3107 addresses of its local labels, which may be tucked into
3108 global storage, are of course not constant across
3109 instantiations, which causes unexpected behavior. */
3110 if (TREE_CODE (t) != LABEL_DECL)
3112 inline_forbidden_reason
3113 = G_("function %q+F can never be inlined "
3114 "because it contains a computed goto");
3115 *handled_ops_p = true;
3116 return t;
3118 break;
3120 default:
3121 break;
3124 *handled_ops_p = false;
3125 return NULL_TREE;
3128 /* Return true if FNDECL is a function that cannot be inlined into
3129 another one. */
3131 static bool
3132 inline_forbidden_p (tree fndecl)
3134 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3135 struct walk_stmt_info wi;
3136 struct pointer_set_t *visited_nodes;
3137 basic_block bb;
3138 bool forbidden_p = false;
3140 /* First check for shared reasons not to copy the code. */
3141 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3142 if (inline_forbidden_reason != NULL)
3143 return true;
3145 /* Next, walk the statements of the function looking for
3146 constraucts we can't handle, or are non-optimal for inlining. */
3147 visited_nodes = pointer_set_create ();
3148 memset (&wi, 0, sizeof (wi));
3149 wi.info = (void *) fndecl;
3150 wi.pset = visited_nodes;
3152 FOR_EACH_BB_FN (bb, fun)
3154 gimple ret;
3155 gimple_seq seq = bb_seq (bb);
3156 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3157 forbidden_p = (ret != NULL);
3158 if (forbidden_p)
3159 break;
3162 pointer_set_destroy (visited_nodes);
3163 return forbidden_p;
3166 /* Return false if the function FNDECL cannot be inlined on account of its
3167 attributes, true otherwise. */
3168 static bool
3169 function_attribute_inlinable_p (const_tree fndecl)
3171 if (targetm.attribute_table)
3173 const_tree a;
3175 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3177 const_tree name = TREE_PURPOSE (a);
3178 int i;
3180 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3181 if (is_attribute_p (targetm.attribute_table[i].name, name))
3182 return targetm.function_attribute_inlinable_p (fndecl);
3186 return true;
3189 /* Returns nonzero if FN is a function that does not have any
3190 fundamental inline blocking properties. */
3192 bool
3193 tree_inlinable_function_p (tree fn)
3195 bool inlinable = true;
3196 bool do_warning;
3197 tree always_inline;
3199 /* If we've already decided this function shouldn't be inlined,
3200 there's no need to check again. */
3201 if (DECL_UNINLINABLE (fn))
3202 return false;
3204 /* We only warn for functions declared `inline' by the user. */
3205 do_warning = (warn_inline
3206 && DECL_DECLARED_INLINE_P (fn)
3207 && !DECL_NO_INLINE_WARNING_P (fn)
3208 && !DECL_IN_SYSTEM_HEADER (fn));
3210 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3212 if (flag_no_inline
3213 && always_inline == NULL)
3215 if (do_warning)
3216 warning (OPT_Winline, "function %q+F can never be inlined because it "
3217 "is suppressed using -fno-inline", fn);
3218 inlinable = false;
3221 else if (!function_attribute_inlinable_p (fn))
3223 if (do_warning)
3224 warning (OPT_Winline, "function %q+F can never be inlined because it "
3225 "uses attributes conflicting with inlining", fn);
3226 inlinable = false;
3229 else if (inline_forbidden_p (fn))
3231 /* See if we should warn about uninlinable functions. Previously,
3232 some of these warnings would be issued while trying to expand
3233 the function inline, but that would cause multiple warnings
3234 about functions that would for example call alloca. But since
3235 this a property of the function, just one warning is enough.
3236 As a bonus we can now give more details about the reason why a
3237 function is not inlinable. */
3238 if (always_inline)
3239 error (inline_forbidden_reason, fn);
3240 else if (do_warning)
3241 warning (OPT_Winline, inline_forbidden_reason, fn);
3243 inlinable = false;
3246 /* Squirrel away the result so that we don't have to check again. */
3247 DECL_UNINLINABLE (fn) = !inlinable;
3249 return inlinable;
3252 /* Estimate the cost of a memory move. Use machine dependent
3253 word size and take possible memcpy call into account. */
3256 estimate_move_cost (tree type)
3258 HOST_WIDE_INT size;
3260 gcc_assert (!VOID_TYPE_P (type));
3262 if (TREE_CODE (type) == VECTOR_TYPE)
3264 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3265 enum machine_mode simd
3266 = targetm.vectorize.preferred_simd_mode (inner);
3267 int simd_mode_size = GET_MODE_SIZE (simd);
3268 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3269 / simd_mode_size);
3272 size = int_size_in_bytes (type);
3274 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3275 /* Cost of a memcpy call, 3 arguments and the call. */
3276 return 4;
3277 else
3278 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3281 /* Returns cost of operation CODE, according to WEIGHTS */
3283 static int
3284 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3285 tree op1 ATTRIBUTE_UNUSED, tree op2)
3287 switch (code)
3289 /* These are "free" conversions, or their presumed cost
3290 is folded into other operations. */
3291 case RANGE_EXPR:
3292 CASE_CONVERT:
3293 case COMPLEX_EXPR:
3294 case PAREN_EXPR:
3295 case VIEW_CONVERT_EXPR:
3296 return 0;
3298 /* Assign cost of 1 to usual operations.
3299 ??? We may consider mapping RTL costs to this. */
3300 case COND_EXPR:
3301 case VEC_COND_EXPR:
3302 case VEC_PERM_EXPR:
3304 case PLUS_EXPR:
3305 case POINTER_PLUS_EXPR:
3306 case MINUS_EXPR:
3307 case MULT_EXPR:
3308 case MULT_HIGHPART_EXPR:
3309 case FMA_EXPR:
3311 case ADDR_SPACE_CONVERT_EXPR:
3312 case FIXED_CONVERT_EXPR:
3313 case FIX_TRUNC_EXPR:
3315 case NEGATE_EXPR:
3316 case FLOAT_EXPR:
3317 case MIN_EXPR:
3318 case MAX_EXPR:
3319 case ABS_EXPR:
3321 case LSHIFT_EXPR:
3322 case RSHIFT_EXPR:
3323 case LROTATE_EXPR:
3324 case RROTATE_EXPR:
3325 case VEC_LSHIFT_EXPR:
3326 case VEC_RSHIFT_EXPR:
3328 case BIT_IOR_EXPR:
3329 case BIT_XOR_EXPR:
3330 case BIT_AND_EXPR:
3331 case BIT_NOT_EXPR:
3333 case TRUTH_ANDIF_EXPR:
3334 case TRUTH_ORIF_EXPR:
3335 case TRUTH_AND_EXPR:
3336 case TRUTH_OR_EXPR:
3337 case TRUTH_XOR_EXPR:
3338 case TRUTH_NOT_EXPR:
3340 case LT_EXPR:
3341 case LE_EXPR:
3342 case GT_EXPR:
3343 case GE_EXPR:
3344 case EQ_EXPR:
3345 case NE_EXPR:
3346 case ORDERED_EXPR:
3347 case UNORDERED_EXPR:
3349 case UNLT_EXPR:
3350 case UNLE_EXPR:
3351 case UNGT_EXPR:
3352 case UNGE_EXPR:
3353 case UNEQ_EXPR:
3354 case LTGT_EXPR:
3356 case CONJ_EXPR:
3358 case PREDECREMENT_EXPR:
3359 case PREINCREMENT_EXPR:
3360 case POSTDECREMENT_EXPR:
3361 case POSTINCREMENT_EXPR:
3363 case REALIGN_LOAD_EXPR:
3365 case REDUC_MAX_EXPR:
3366 case REDUC_MIN_EXPR:
3367 case REDUC_PLUS_EXPR:
3368 case WIDEN_SUM_EXPR:
3369 case WIDEN_MULT_EXPR:
3370 case DOT_PROD_EXPR:
3371 case WIDEN_MULT_PLUS_EXPR:
3372 case WIDEN_MULT_MINUS_EXPR:
3373 case WIDEN_LSHIFT_EXPR:
3375 case VEC_WIDEN_MULT_HI_EXPR:
3376 case VEC_WIDEN_MULT_LO_EXPR:
3377 case VEC_WIDEN_MULT_EVEN_EXPR:
3378 case VEC_WIDEN_MULT_ODD_EXPR:
3379 case VEC_UNPACK_HI_EXPR:
3380 case VEC_UNPACK_LO_EXPR:
3381 case VEC_UNPACK_FLOAT_HI_EXPR:
3382 case VEC_UNPACK_FLOAT_LO_EXPR:
3383 case VEC_PACK_TRUNC_EXPR:
3384 case VEC_PACK_SAT_EXPR:
3385 case VEC_PACK_FIX_TRUNC_EXPR:
3386 case VEC_WIDEN_LSHIFT_HI_EXPR:
3387 case VEC_WIDEN_LSHIFT_LO_EXPR:
3389 return 1;
3391 /* Few special cases of expensive operations. This is useful
3392 to avoid inlining on functions having too many of these. */
3393 case TRUNC_DIV_EXPR:
3394 case CEIL_DIV_EXPR:
3395 case FLOOR_DIV_EXPR:
3396 case ROUND_DIV_EXPR:
3397 case EXACT_DIV_EXPR:
3398 case TRUNC_MOD_EXPR:
3399 case CEIL_MOD_EXPR:
3400 case FLOOR_MOD_EXPR:
3401 case ROUND_MOD_EXPR:
3402 case RDIV_EXPR:
3403 if (TREE_CODE (op2) != INTEGER_CST)
3404 return weights->div_mod_cost;
3405 return 1;
3407 default:
3408 /* We expect a copy assignment with no operator. */
3409 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3410 return 0;
3415 /* Estimate number of instructions that will be created by expanding
3416 the statements in the statement sequence STMTS.
3417 WEIGHTS contains weights attributed to various constructs. */
3419 static
3420 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3422 int cost;
3423 gimple_stmt_iterator gsi;
3425 cost = 0;
3426 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3427 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3429 return cost;
3433 /* Estimate number of instructions that will be created by expanding STMT.
3434 WEIGHTS contains weights attributed to various constructs. */
3437 estimate_num_insns (gimple stmt, eni_weights *weights)
3439 unsigned cost, i;
3440 enum gimple_code code = gimple_code (stmt);
3441 tree lhs;
3442 tree rhs;
3444 switch (code)
3446 case GIMPLE_ASSIGN:
3447 /* Try to estimate the cost of assignments. We have three cases to
3448 deal with:
3449 1) Simple assignments to registers;
3450 2) Stores to things that must live in memory. This includes
3451 "normal" stores to scalars, but also assignments of large
3452 structures, or constructors of big arrays;
3454 Let us look at the first two cases, assuming we have "a = b + C":
3455 <GIMPLE_ASSIGN <var_decl "a">
3456 <plus_expr <var_decl "b"> <constant C>>
3457 If "a" is a GIMPLE register, the assignment to it is free on almost
3458 any target, because "a" usually ends up in a real register. Hence
3459 the only cost of this expression comes from the PLUS_EXPR, and we
3460 can ignore the GIMPLE_ASSIGN.
3461 If "a" is not a GIMPLE register, the assignment to "a" will most
3462 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3463 of moving something into "a", which we compute using the function
3464 estimate_move_cost. */
3465 if (gimple_clobber_p (stmt))
3466 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3468 lhs = gimple_assign_lhs (stmt);
3469 rhs = gimple_assign_rhs1 (stmt);
3471 if (is_gimple_reg (lhs))
3472 cost = 0;
3473 else
3474 cost = estimate_move_cost (TREE_TYPE (lhs));
3476 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3477 cost += estimate_move_cost (TREE_TYPE (rhs));
3479 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3480 gimple_assign_rhs1 (stmt),
3481 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3482 == GIMPLE_BINARY_RHS
3483 ? gimple_assign_rhs2 (stmt) : NULL);
3484 break;
3486 case GIMPLE_COND:
3487 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3488 gimple_op (stmt, 0),
3489 gimple_op (stmt, 1));
3490 break;
3492 case GIMPLE_SWITCH:
3493 /* Take into account cost of the switch + guess 2 conditional jumps for
3494 each case label.
3496 TODO: once the switch expansion logic is sufficiently separated, we can
3497 do better job on estimating cost of the switch. */
3498 if (weights->time_based)
3499 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3500 else
3501 cost = gimple_switch_num_labels (stmt) * 2;
3502 break;
3504 case GIMPLE_CALL:
3506 tree decl = gimple_call_fndecl (stmt);
3507 struct cgraph_node *node = NULL;
3509 /* Do not special case builtins where we see the body.
3510 This just confuse inliner. */
3511 if (!decl || !(node = cgraph_get_node (decl)) || node->analyzed)
3513 /* For buitins that are likely expanded to nothing or
3514 inlined do not account operand costs. */
3515 else if (is_simple_builtin (decl))
3516 return 0;
3517 else if (is_inexpensive_builtin (decl))
3518 return weights->target_builtin_call_cost;
3519 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3521 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3522 specialize the cheap expansion we do here.
3523 ??? This asks for a more general solution. */
3524 switch (DECL_FUNCTION_CODE (decl))
3526 case BUILT_IN_POW:
3527 case BUILT_IN_POWF:
3528 case BUILT_IN_POWL:
3529 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3530 && REAL_VALUES_EQUAL
3531 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3532 return estimate_operator_cost (MULT_EXPR, weights,
3533 gimple_call_arg (stmt, 0),
3534 gimple_call_arg (stmt, 0));
3535 break;
3537 default:
3538 break;
3542 cost = node ? weights->call_cost : weights->indirect_call_cost;
3543 if (gimple_call_lhs (stmt))
3544 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3545 for (i = 0; i < gimple_call_num_args (stmt); i++)
3547 tree arg = gimple_call_arg (stmt, i);
3548 cost += estimate_move_cost (TREE_TYPE (arg));
3550 break;
3553 case GIMPLE_RETURN:
3554 return weights->return_cost;
3556 case GIMPLE_GOTO:
3557 case GIMPLE_LABEL:
3558 case GIMPLE_NOP:
3559 case GIMPLE_PHI:
3560 case GIMPLE_PREDICT:
3561 case GIMPLE_DEBUG:
3562 return 0;
3564 case GIMPLE_ASM:
3565 return asm_str_count (gimple_asm_string (stmt));
3567 case GIMPLE_RESX:
3568 /* This is either going to be an external function call with one
3569 argument, or two register copy statements plus a goto. */
3570 return 2;
3572 case GIMPLE_EH_DISPATCH:
3573 /* ??? This is going to turn into a switch statement. Ideally
3574 we'd have a look at the eh region and estimate the number of
3575 edges involved. */
3576 return 10;
3578 case GIMPLE_BIND:
3579 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3581 case GIMPLE_EH_FILTER:
3582 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3584 case GIMPLE_CATCH:
3585 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3587 case GIMPLE_TRY:
3588 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3589 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3591 /* OpenMP directives are generally very expensive. */
3593 case GIMPLE_OMP_RETURN:
3594 case GIMPLE_OMP_SECTIONS_SWITCH:
3595 case GIMPLE_OMP_ATOMIC_STORE:
3596 case GIMPLE_OMP_CONTINUE:
3597 /* ...except these, which are cheap. */
3598 return 0;
3600 case GIMPLE_OMP_ATOMIC_LOAD:
3601 return weights->omp_cost;
3603 case GIMPLE_OMP_FOR:
3604 return (weights->omp_cost
3605 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3606 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3608 case GIMPLE_OMP_PARALLEL:
3609 case GIMPLE_OMP_TASK:
3610 case GIMPLE_OMP_CRITICAL:
3611 case GIMPLE_OMP_MASTER:
3612 case GIMPLE_OMP_ORDERED:
3613 case GIMPLE_OMP_SECTION:
3614 case GIMPLE_OMP_SECTIONS:
3615 case GIMPLE_OMP_SINGLE:
3616 return (weights->omp_cost
3617 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3619 case GIMPLE_TRANSACTION:
3620 return (weights->tm_cost
3621 + estimate_num_insns_seq (gimple_transaction_body (stmt),
3622 weights));
3624 default:
3625 gcc_unreachable ();
3628 return cost;
3631 /* Estimate number of instructions that will be created by expanding
3632 function FNDECL. WEIGHTS contains weights attributed to various
3633 constructs. */
3636 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3638 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3639 gimple_stmt_iterator bsi;
3640 basic_block bb;
3641 int n = 0;
3643 gcc_assert (my_function && my_function->cfg);
3644 FOR_EACH_BB_FN (bb, my_function)
3646 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3647 n += estimate_num_insns (gsi_stmt (bsi), weights);
3650 return n;
3654 /* Initializes weights used by estimate_num_insns. */
3656 void
3657 init_inline_once (void)
3659 eni_size_weights.call_cost = 1;
3660 eni_size_weights.indirect_call_cost = 3;
3661 eni_size_weights.target_builtin_call_cost = 1;
3662 eni_size_weights.div_mod_cost = 1;
3663 eni_size_weights.omp_cost = 40;
3664 eni_size_weights.tm_cost = 10;
3665 eni_size_weights.time_based = false;
3666 eni_size_weights.return_cost = 1;
3668 /* Estimating time for call is difficult, since we have no idea what the
3669 called function does. In the current uses of eni_time_weights,
3670 underestimating the cost does less harm than overestimating it, so
3671 we choose a rather small value here. */
3672 eni_time_weights.call_cost = 10;
3673 eni_time_weights.indirect_call_cost = 15;
3674 eni_time_weights.target_builtin_call_cost = 1;
3675 eni_time_weights.div_mod_cost = 10;
3676 eni_time_weights.omp_cost = 40;
3677 eni_time_weights.tm_cost = 40;
3678 eni_time_weights.time_based = true;
3679 eni_time_weights.return_cost = 2;
3682 /* Estimate the number of instructions in a gimple_seq. */
3685 count_insns_seq (gimple_seq seq, eni_weights *weights)
3687 gimple_stmt_iterator gsi;
3688 int n = 0;
3689 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3690 n += estimate_num_insns (gsi_stmt (gsi), weights);
3692 return n;
3696 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3698 static void
3699 prepend_lexical_block (tree current_block, tree new_block)
3701 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3702 BLOCK_SUBBLOCKS (current_block) = new_block;
3703 BLOCK_SUPERCONTEXT (new_block) = current_block;
3706 /* Add local variables from CALLEE to CALLER. */
3708 static inline void
3709 add_local_variables (struct function *callee, struct function *caller,
3710 copy_body_data *id)
3712 tree var;
3713 unsigned ix;
3715 FOR_EACH_LOCAL_DECL (callee, ix, var)
3716 if (!can_be_nonlocal (var, id))
3718 tree new_var = remap_decl (var, id);
3720 /* Remap debug-expressions. */
3721 if (TREE_CODE (new_var) == VAR_DECL
3722 && DECL_DEBUG_EXPR_IS_FROM (new_var)
3723 && new_var != var)
3725 tree tem = DECL_DEBUG_EXPR (var);
3726 bool old_regimplify = id->regimplify;
3727 id->remapping_type_depth++;
3728 walk_tree (&tem, copy_tree_body_r, id, NULL);
3729 id->remapping_type_depth--;
3730 id->regimplify = old_regimplify;
3731 SET_DECL_DEBUG_EXPR (new_var, tem);
3733 add_local_decl (caller, new_var);
3737 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3739 static bool
3740 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3742 tree use_retvar;
3743 tree fn;
3744 struct pointer_map_t *st, *dst;
3745 tree return_slot;
3746 tree modify_dest;
3747 location_t saved_location;
3748 struct cgraph_edge *cg_edge;
3749 cgraph_inline_failed_t reason;
3750 basic_block return_block;
3751 edge e;
3752 gimple_stmt_iterator gsi, stmt_gsi;
3753 bool successfully_inlined = FALSE;
3754 bool purge_dead_abnormal_edges;
3756 /* Set input_location here so we get the right instantiation context
3757 if we call instantiate_decl from inlinable_function_p. */
3758 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
3759 saved_location = input_location;
3760 input_location = gimple_location (stmt);
3762 /* From here on, we're only interested in CALL_EXPRs. */
3763 if (gimple_code (stmt) != GIMPLE_CALL)
3764 goto egress;
3766 cg_edge = cgraph_edge (id->dst_node, stmt);
3767 gcc_checking_assert (cg_edge);
3768 /* First, see if we can figure out what function is being called.
3769 If we cannot, then there is no hope of inlining the function. */
3770 if (cg_edge->indirect_unknown_callee)
3771 goto egress;
3772 fn = cg_edge->callee->symbol.decl;
3773 gcc_checking_assert (fn);
3775 /* If FN is a declaration of a function in a nested scope that was
3776 globally declared inline, we don't set its DECL_INITIAL.
3777 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3778 C++ front-end uses it for cdtors to refer to their internal
3779 declarations, that are not real functions. Fortunately those
3780 don't have trees to be saved, so we can tell by checking their
3781 gimple_body. */
3782 if (!DECL_INITIAL (fn)
3783 && DECL_ABSTRACT_ORIGIN (fn)
3784 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3785 fn = DECL_ABSTRACT_ORIGIN (fn);
3787 /* Don't try to inline functions that are not well-suited to inlining. */
3788 if (cg_edge->inline_failed)
3790 reason = cg_edge->inline_failed;
3791 /* If this call was originally indirect, we do not want to emit any
3792 inlining related warnings or sorry messages because there are no
3793 guarantees regarding those. */
3794 if (cg_edge->indirect_inlining_edge)
3795 goto egress;
3797 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3798 /* Avoid warnings during early inline pass. */
3799 && cgraph_global_info_ready
3800 /* PR 20090218-1_0.c. Body can be provided by another module. */
3801 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
3803 error ("inlining failed in call to always_inline %q+F: %s", fn,
3804 cgraph_inline_failed_string (reason));
3805 error ("called from here");
3807 else if (warn_inline
3808 && DECL_DECLARED_INLINE_P (fn)
3809 && !DECL_NO_INLINE_WARNING_P (fn)
3810 && !DECL_IN_SYSTEM_HEADER (fn)
3811 && reason != CIF_UNSPECIFIED
3812 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3813 /* Do not warn about not inlined recursive calls. */
3814 && !cgraph_edge_recursive_p (cg_edge)
3815 /* Avoid warnings during early inline pass. */
3816 && cgraph_global_info_ready)
3818 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3819 fn, _(cgraph_inline_failed_string (reason)));
3820 warning (OPT_Winline, "called from here");
3822 goto egress;
3824 fn = cg_edge->callee->symbol.decl;
3826 #ifdef ENABLE_CHECKING
3827 if (cg_edge->callee->symbol.decl != id->dst_node->symbol.decl)
3828 verify_cgraph_node (cg_edge->callee);
3829 #endif
3831 /* We will be inlining this callee. */
3832 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3834 /* Update the callers EH personality. */
3835 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->symbol.decl))
3836 DECL_FUNCTION_PERSONALITY (cg_edge->caller->symbol.decl)
3837 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->symbol.decl);
3839 /* Split the block holding the GIMPLE_CALL. */
3840 e = split_block (bb, stmt);
3841 bb = e->src;
3842 return_block = e->dest;
3843 remove_edge (e);
3845 /* split_block splits after the statement; work around this by
3846 moving the call into the second block manually. Not pretty,
3847 but seems easier than doing the CFG manipulation by hand
3848 when the GIMPLE_CALL is in the last statement of BB. */
3849 stmt_gsi = gsi_last_bb (bb);
3850 gsi_remove (&stmt_gsi, false);
3852 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3853 been the source of abnormal edges. In this case, schedule
3854 the removal of dead abnormal edges. */
3855 gsi = gsi_start_bb (return_block);
3856 if (gsi_end_p (gsi))
3858 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3859 purge_dead_abnormal_edges = true;
3861 else
3863 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3864 purge_dead_abnormal_edges = false;
3867 stmt_gsi = gsi_start_bb (return_block);
3869 /* Build a block containing code to initialize the arguments, the
3870 actual inline expansion of the body, and a label for the return
3871 statements within the function to jump to. The type of the
3872 statement expression is the return type of the function call. */
3873 id->block = make_node (BLOCK);
3874 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3875 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3876 prepend_lexical_block (gimple_block (stmt), id->block);
3878 /* Local declarations will be replaced by their equivalents in this
3879 map. */
3880 st = id->decl_map;
3881 id->decl_map = pointer_map_create ();
3882 dst = id->debug_map;
3883 id->debug_map = NULL;
3885 /* Record the function we are about to inline. */
3886 id->src_fn = fn;
3887 id->src_node = cg_edge->callee;
3888 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3889 id->gimple_call = stmt;
3891 gcc_assert (!id->src_cfun->after_inlining);
3893 id->entry_bb = bb;
3894 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3896 gimple_stmt_iterator si = gsi_last_bb (bb);
3897 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3898 NOT_TAKEN),
3899 GSI_NEW_STMT);
3901 initialize_inlined_parameters (id, stmt, fn, bb);
3903 if (DECL_INITIAL (fn))
3904 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3906 /* Return statements in the function body will be replaced by jumps
3907 to the RET_LABEL. */
3908 gcc_assert (DECL_INITIAL (fn));
3909 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3911 /* Find the LHS to which the result of this call is assigned. */
3912 return_slot = NULL;
3913 if (gimple_call_lhs (stmt))
3915 modify_dest = gimple_call_lhs (stmt);
3917 /* The function which we are inlining might not return a value,
3918 in which case we should issue a warning that the function
3919 does not return a value. In that case the optimizers will
3920 see that the variable to which the value is assigned was not
3921 initialized. We do not want to issue a warning about that
3922 uninitialized variable. */
3923 if (DECL_P (modify_dest))
3924 TREE_NO_WARNING (modify_dest) = 1;
3926 if (gimple_call_return_slot_opt_p (stmt))
3928 return_slot = modify_dest;
3929 modify_dest = NULL;
3932 else
3933 modify_dest = NULL;
3935 /* If we are inlining a call to the C++ operator new, we don't want
3936 to use type based alias analysis on the return value. Otherwise
3937 we may get confused if the compiler sees that the inlined new
3938 function returns a pointer which was just deleted. See bug
3939 33407. */
3940 if (DECL_IS_OPERATOR_NEW (fn))
3942 return_slot = NULL;
3943 modify_dest = NULL;
3946 /* Declare the return variable for the function. */
3947 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
3949 /* Add local vars in this inlined callee to caller. */
3950 add_local_variables (id->src_cfun, cfun, id);
3952 if (dump_file && (dump_flags & TDF_DETAILS))
3954 fprintf (dump_file, "Inlining ");
3955 print_generic_expr (dump_file, id->src_fn, 0);
3956 fprintf (dump_file, " to ");
3957 print_generic_expr (dump_file, id->dst_fn, 0);
3958 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3961 /* This is it. Duplicate the callee body. Assume callee is
3962 pre-gimplified. Note that we must not alter the caller
3963 function in any way before this point, as this CALL_EXPR may be
3964 a self-referential call; if we're calling ourselves, we need to
3965 duplicate our body before altering anything. */
3966 copy_body (id, bb->count,
3967 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
3968 bb, return_block, NULL, NULL);
3970 /* Reset the escaped solution. */
3971 if (cfun->gimple_df)
3972 pt_solution_reset (&cfun->gimple_df->escaped);
3974 /* Clean up. */
3975 if (id->debug_map)
3977 pointer_map_destroy (id->debug_map);
3978 id->debug_map = dst;
3980 pointer_map_destroy (id->decl_map);
3981 id->decl_map = st;
3983 /* Unlink the calls virtual operands before replacing it. */
3984 unlink_stmt_vdef (stmt);
3986 /* If the inlined function returns a result that we care about,
3987 substitute the GIMPLE_CALL with an assignment of the return
3988 variable to the LHS of the call. That is, if STMT was
3989 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3990 if (use_retvar && gimple_call_lhs (stmt))
3992 gimple old_stmt = stmt;
3993 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3994 gsi_replace (&stmt_gsi, stmt, false);
3995 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3997 else
3999 /* Handle the case of inlining a function with no return
4000 statement, which causes the return value to become undefined. */
4001 if (gimple_call_lhs (stmt)
4002 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4004 tree name = gimple_call_lhs (stmt);
4005 tree var = SSA_NAME_VAR (name);
4006 tree def = ssa_default_def (cfun, var);
4008 if (def)
4010 /* If the variable is used undefined, make this name
4011 undefined via a move. */
4012 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4013 gsi_replace (&stmt_gsi, stmt, true);
4015 else
4017 /* Otherwise make this variable undefined. */
4018 gsi_remove (&stmt_gsi, true);
4019 set_ssa_default_def (cfun, var, name);
4020 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4023 else
4024 gsi_remove (&stmt_gsi, true);
4027 if (purge_dead_abnormal_edges)
4029 gimple_purge_dead_eh_edges (return_block);
4030 gimple_purge_dead_abnormal_call_edges (return_block);
4033 /* If the value of the new expression is ignored, that's OK. We
4034 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4035 the equivalent inlined version either. */
4036 if (is_gimple_assign (stmt))
4038 gcc_assert (gimple_assign_single_p (stmt)
4039 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4040 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4043 /* Output the inlining info for this abstract function, since it has been
4044 inlined. If we don't do this now, we can lose the information about the
4045 variables in the function when the blocks get blown away as soon as we
4046 remove the cgraph node. */
4047 (*debug_hooks->outlining_inline_function) (cg_edge->callee->symbol.decl);
4049 /* Update callgraph if needed. */
4050 cgraph_remove_node (cg_edge->callee);
4052 id->block = NULL_TREE;
4053 successfully_inlined = TRUE;
4055 egress:
4056 input_location = saved_location;
4057 return successfully_inlined;
4060 /* Expand call statements reachable from STMT_P.
4061 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4062 in a MODIFY_EXPR. */
4064 static bool
4065 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4067 gimple_stmt_iterator gsi;
4069 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4071 gimple stmt = gsi_stmt (gsi);
4073 if (is_gimple_call (stmt)
4074 && expand_call_inline (bb, stmt, id))
4075 return true;
4078 return false;
4082 /* Walk all basic blocks created after FIRST and try to fold every statement
4083 in the STATEMENTS pointer set. */
4085 static void
4086 fold_marked_statements (int first, struct pointer_set_t *statements)
4088 for (; first < n_basic_blocks; first++)
4089 if (BASIC_BLOCK (first))
4091 gimple_stmt_iterator gsi;
4093 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4094 !gsi_end_p (gsi);
4095 gsi_next (&gsi))
4096 if (pointer_set_contains (statements, gsi_stmt (gsi)))
4098 gimple old_stmt = gsi_stmt (gsi);
4099 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4101 if (old_decl && DECL_BUILT_IN (old_decl))
4103 /* Folding builtins can create multiple instructions,
4104 we need to look at all of them. */
4105 gimple_stmt_iterator i2 = gsi;
4106 gsi_prev (&i2);
4107 if (fold_stmt (&gsi))
4109 gimple new_stmt;
4110 /* If a builtin at the end of a bb folded into nothing,
4111 the following loop won't work. */
4112 if (gsi_end_p (gsi))
4114 cgraph_update_edges_for_call_stmt (old_stmt,
4115 old_decl, NULL);
4116 break;
4118 if (gsi_end_p (i2))
4119 i2 = gsi_start_bb (BASIC_BLOCK (first));
4120 else
4121 gsi_next (&i2);
4122 while (1)
4124 new_stmt = gsi_stmt (i2);
4125 update_stmt (new_stmt);
4126 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4127 new_stmt);
4129 if (new_stmt == gsi_stmt (gsi))
4131 /* It is okay to check only for the very last
4132 of these statements. If it is a throwing
4133 statement nothing will change. If it isn't
4134 this can remove EH edges. If that weren't
4135 correct then because some intermediate stmts
4136 throw, but not the last one. That would mean
4137 we'd have to split the block, which we can't
4138 here and we'd loose anyway. And as builtins
4139 probably never throw, this all
4140 is mood anyway. */
4141 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4142 new_stmt))
4143 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4144 break;
4146 gsi_next (&i2);
4150 else if (fold_stmt (&gsi))
4152 /* Re-read the statement from GSI as fold_stmt() may
4153 have changed it. */
4154 gimple new_stmt = gsi_stmt (gsi);
4155 update_stmt (new_stmt);
4157 if (is_gimple_call (old_stmt)
4158 || is_gimple_call (new_stmt))
4159 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4160 new_stmt);
4162 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4163 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4169 /* Return true if BB has at least one abnormal outgoing edge. */
4171 static inline bool
4172 has_abnormal_outgoing_edge_p (basic_block bb)
4174 edge e;
4175 edge_iterator ei;
4177 FOR_EACH_EDGE (e, ei, bb->succs)
4178 if (e->flags & EDGE_ABNORMAL)
4179 return true;
4181 return false;
4184 /* Expand calls to inline functions in the body of FN. */
4186 unsigned int
4187 optimize_inline_calls (tree fn)
4189 copy_body_data id;
4190 basic_block bb;
4191 int last = n_basic_blocks;
4192 struct gimplify_ctx gctx;
4193 bool inlined_p = false;
4195 /* Clear out ID. */
4196 memset (&id, 0, sizeof (id));
4198 id.src_node = id.dst_node = cgraph_get_node (fn);
4199 gcc_assert (id.dst_node->analyzed);
4200 id.dst_fn = fn;
4201 /* Or any functions that aren't finished yet. */
4202 if (current_function_decl)
4203 id.dst_fn = current_function_decl;
4205 id.copy_decl = copy_decl_maybe_to_var;
4206 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4207 id.transform_new_cfg = false;
4208 id.transform_return_to_modify = true;
4209 id.transform_lang_insert_block = NULL;
4210 id.statements_to_fold = pointer_set_create ();
4212 push_gimplify_context (&gctx);
4214 /* We make no attempts to keep dominance info up-to-date. */
4215 free_dominance_info (CDI_DOMINATORS);
4216 free_dominance_info (CDI_POST_DOMINATORS);
4218 /* Register specific gimple functions. */
4219 gimple_register_cfg_hooks ();
4221 /* Reach the trees by walking over the CFG, and note the
4222 enclosing basic-blocks in the call edges. */
4223 /* We walk the blocks going forward, because inlined function bodies
4224 will split id->current_basic_block, and the new blocks will
4225 follow it; we'll trudge through them, processing their CALL_EXPRs
4226 along the way. */
4227 FOR_EACH_BB (bb)
4228 inlined_p |= gimple_expand_calls_inline (bb, &id);
4230 pop_gimplify_context (NULL);
4232 #ifdef ENABLE_CHECKING
4234 struct cgraph_edge *e;
4236 verify_cgraph_node (id.dst_node);
4238 /* Double check that we inlined everything we are supposed to inline. */
4239 for (e = id.dst_node->callees; e; e = e->next_callee)
4240 gcc_assert (e->inline_failed);
4242 #endif
4244 /* Fold queued statements. */
4245 fold_marked_statements (last, id.statements_to_fold);
4246 pointer_set_destroy (id.statements_to_fold);
4248 gcc_assert (!id.debug_stmts);
4250 /* If we didn't inline into the function there is nothing to do. */
4251 if (!inlined_p)
4252 return 0;
4254 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4255 number_blocks (fn);
4257 delete_unreachable_blocks_update_callgraph (&id);
4258 #ifdef ENABLE_CHECKING
4259 verify_cgraph_node (id.dst_node);
4260 #endif
4262 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4263 not possible yet - the IPA passes might make various functions to not
4264 throw and they don't care to proactively update local EH info. This is
4265 done later in fixup_cfg pass that also execute the verification. */
4266 return (TODO_update_ssa
4267 | TODO_cleanup_cfg
4268 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4269 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4270 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4273 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4275 tree
4276 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4278 enum tree_code code = TREE_CODE (*tp);
4279 enum tree_code_class cl = TREE_CODE_CLASS (code);
4281 /* We make copies of most nodes. */
4282 if (IS_EXPR_CODE_CLASS (cl)
4283 || code == TREE_LIST
4284 || code == TREE_VEC
4285 || code == TYPE_DECL
4286 || code == OMP_CLAUSE)
4288 /* Because the chain gets clobbered when we make a copy, we save it
4289 here. */
4290 tree chain = NULL_TREE, new_tree;
4292 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4293 chain = TREE_CHAIN (*tp);
4295 /* Copy the node. */
4296 new_tree = copy_node (*tp);
4298 /* Propagate mudflap marked-ness. */
4299 if (flag_mudflap && mf_marked_p (*tp))
4300 mf_mark (new_tree);
4302 *tp = new_tree;
4304 /* Now, restore the chain, if appropriate. That will cause
4305 walk_tree to walk into the chain as well. */
4306 if (code == PARM_DECL
4307 || code == TREE_LIST
4308 || code == OMP_CLAUSE)
4309 TREE_CHAIN (*tp) = chain;
4311 /* For now, we don't update BLOCKs when we make copies. So, we
4312 have to nullify all BIND_EXPRs. */
4313 if (TREE_CODE (*tp) == BIND_EXPR)
4314 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4316 else if (code == CONSTRUCTOR)
4318 /* CONSTRUCTOR nodes need special handling because
4319 we need to duplicate the vector of elements. */
4320 tree new_tree;
4322 new_tree = copy_node (*tp);
4324 /* Propagate mudflap marked-ness. */
4325 if (flag_mudflap && mf_marked_p (*tp))
4326 mf_mark (new_tree);
4328 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4329 CONSTRUCTOR_ELTS (*tp));
4330 *tp = new_tree;
4332 else if (code == STATEMENT_LIST)
4333 /* We used to just abort on STATEMENT_LIST, but we can run into them
4334 with statement-expressions (c++/40975). */
4335 copy_statement_list (tp);
4336 else if (TREE_CODE_CLASS (code) == tcc_type)
4337 *walk_subtrees = 0;
4338 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4339 *walk_subtrees = 0;
4340 else if (TREE_CODE_CLASS (code) == tcc_constant)
4341 *walk_subtrees = 0;
4342 return NULL_TREE;
4345 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4346 information indicating to what new SAVE_EXPR this one should be mapped,
4347 use that one. Otherwise, create a new node and enter it in ST. FN is
4348 the function into which the copy will be placed. */
4350 static void
4351 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4353 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4354 tree *n;
4355 tree t;
4357 /* See if we already encountered this SAVE_EXPR. */
4358 n = (tree *) pointer_map_contains (st, *tp);
4360 /* If we didn't already remap this SAVE_EXPR, do so now. */
4361 if (!n)
4363 t = copy_node (*tp);
4365 /* Remember this SAVE_EXPR. */
4366 *pointer_map_insert (st, *tp) = t;
4367 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4368 *pointer_map_insert (st, t) = t;
4370 else
4372 /* We've already walked into this SAVE_EXPR; don't do it again. */
4373 *walk_subtrees = 0;
4374 t = *n;
4377 /* Replace this SAVE_EXPR with the copy. */
4378 *tp = t;
4381 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4382 copies the declaration and enters it in the splay_tree in DATA (which is
4383 really an `copy_body_data *'). */
4385 static tree
4386 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4387 void *data)
4389 copy_body_data *id = (copy_body_data *) data;
4391 /* Don't walk into types. */
4392 if (TYPE_P (*tp))
4393 *walk_subtrees = 0;
4395 else if (TREE_CODE (*tp) == LABEL_EXPR)
4397 tree decl = TREE_OPERAND (*tp, 0);
4399 /* Copy the decl and remember the copy. */
4400 insert_decl_map (id, decl, id->copy_decl (decl, id));
4403 return NULL_TREE;
4406 /* Perform any modifications to EXPR required when it is unsaved. Does
4407 not recurse into EXPR's subtrees. */
4409 static void
4410 unsave_expr_1 (tree expr)
4412 switch (TREE_CODE (expr))
4414 case TARGET_EXPR:
4415 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4416 It's OK for this to happen if it was part of a subtree that
4417 isn't immediately expanded, such as operand 2 of another
4418 TARGET_EXPR. */
4419 if (TREE_OPERAND (expr, 1))
4420 break;
4422 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4423 TREE_OPERAND (expr, 3) = NULL_TREE;
4424 break;
4426 default:
4427 break;
4431 /* Called via walk_tree when an expression is unsaved. Using the
4432 splay_tree pointed to by ST (which is really a `splay_tree'),
4433 remaps all local declarations to appropriate replacements. */
4435 static tree
4436 unsave_r (tree *tp, int *walk_subtrees, void *data)
4438 copy_body_data *id = (copy_body_data *) data;
4439 struct pointer_map_t *st = id->decl_map;
4440 tree *n;
4442 /* Only a local declaration (variable or label). */
4443 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4444 || TREE_CODE (*tp) == LABEL_DECL)
4446 /* Lookup the declaration. */
4447 n = (tree *) pointer_map_contains (st, *tp);
4449 /* If it's there, remap it. */
4450 if (n)
4451 *tp = *n;
4454 else if (TREE_CODE (*tp) == STATEMENT_LIST)
4455 gcc_unreachable ();
4456 else if (TREE_CODE (*tp) == BIND_EXPR)
4457 copy_bind_expr (tp, walk_subtrees, id);
4458 else if (TREE_CODE (*tp) == SAVE_EXPR
4459 || TREE_CODE (*tp) == TARGET_EXPR)
4460 remap_save_expr (tp, st, walk_subtrees);
4461 else
4463 copy_tree_r (tp, walk_subtrees, NULL);
4465 /* Do whatever unsaving is required. */
4466 unsave_expr_1 (*tp);
4469 /* Keep iterating. */
4470 return NULL_TREE;
4473 /* Copies everything in EXPR and replaces variables, labels
4474 and SAVE_EXPRs local to EXPR. */
4476 tree
4477 unsave_expr_now (tree expr)
4479 copy_body_data id;
4481 /* There's nothing to do for NULL_TREE. */
4482 if (expr == 0)
4483 return expr;
4485 /* Set up ID. */
4486 memset (&id, 0, sizeof (id));
4487 id.src_fn = current_function_decl;
4488 id.dst_fn = current_function_decl;
4489 id.decl_map = pointer_map_create ();
4490 id.debug_map = NULL;
4492 id.copy_decl = copy_decl_no_change;
4493 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4494 id.transform_new_cfg = false;
4495 id.transform_return_to_modify = false;
4496 id.transform_lang_insert_block = NULL;
4498 /* Walk the tree once to find local labels. */
4499 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4501 /* Walk the tree again, copying, remapping, and unsaving. */
4502 walk_tree (&expr, unsave_r, &id, NULL);
4504 /* Clean up. */
4505 pointer_map_destroy (id.decl_map);
4506 if (id.debug_map)
4507 pointer_map_destroy (id.debug_map);
4509 return expr;
4512 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4513 label, copies the declaration and enters it in the splay_tree in DATA (which
4514 is really a 'copy_body_data *'. */
4516 static tree
4517 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4518 bool *handled_ops_p ATTRIBUTE_UNUSED,
4519 struct walk_stmt_info *wi)
4521 copy_body_data *id = (copy_body_data *) wi->info;
4522 gimple stmt = gsi_stmt (*gsip);
4524 if (gimple_code (stmt) == GIMPLE_LABEL)
4526 tree decl = gimple_label_label (stmt);
4528 /* Copy the decl and remember the copy. */
4529 insert_decl_map (id, decl, id->copy_decl (decl, id));
4532 return NULL_TREE;
4536 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4537 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4538 remaps all local declarations to appropriate replacements in gimple
4539 operands. */
4541 static tree
4542 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4544 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4545 copy_body_data *id = (copy_body_data *) wi->info;
4546 struct pointer_map_t *st = id->decl_map;
4547 tree *n;
4548 tree expr = *tp;
4550 /* Only a local declaration (variable or label). */
4551 if ((TREE_CODE (expr) == VAR_DECL
4552 && !TREE_STATIC (expr))
4553 || TREE_CODE (expr) == LABEL_DECL)
4555 /* Lookup the declaration. */
4556 n = (tree *) pointer_map_contains (st, expr);
4558 /* If it's there, remap it. */
4559 if (n)
4560 *tp = *n;
4561 *walk_subtrees = 0;
4563 else if (TREE_CODE (expr) == STATEMENT_LIST
4564 || TREE_CODE (expr) == BIND_EXPR
4565 || TREE_CODE (expr) == SAVE_EXPR)
4566 gcc_unreachable ();
4567 else if (TREE_CODE (expr) == TARGET_EXPR)
4569 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4570 It's OK for this to happen if it was part of a subtree that
4571 isn't immediately expanded, such as operand 2 of another
4572 TARGET_EXPR. */
4573 if (!TREE_OPERAND (expr, 1))
4575 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4576 TREE_OPERAND (expr, 3) = NULL_TREE;
4580 /* Keep iterating. */
4581 return NULL_TREE;
4585 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4586 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4587 remaps all local declarations to appropriate replacements in gimple
4588 statements. */
4590 static tree
4591 replace_locals_stmt (gimple_stmt_iterator *gsip,
4592 bool *handled_ops_p ATTRIBUTE_UNUSED,
4593 struct walk_stmt_info *wi)
4595 copy_body_data *id = (copy_body_data *) wi->info;
4596 gimple stmt = gsi_stmt (*gsip);
4598 if (gimple_code (stmt) == GIMPLE_BIND)
4600 tree block = gimple_bind_block (stmt);
4602 if (block)
4604 remap_block (&block, id);
4605 gimple_bind_set_block (stmt, block);
4608 /* This will remap a lot of the same decls again, but this should be
4609 harmless. */
4610 if (gimple_bind_vars (stmt))
4611 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4614 /* Keep iterating. */
4615 return NULL_TREE;
4619 /* Copies everything in SEQ and replaces variables and labels local to
4620 current_function_decl. */
4622 gimple_seq
4623 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4625 copy_body_data id;
4626 struct walk_stmt_info wi;
4627 struct pointer_set_t *visited;
4628 gimple_seq copy;
4630 /* There's nothing to do for NULL_TREE. */
4631 if (seq == NULL)
4632 return seq;
4634 /* Set up ID. */
4635 memset (&id, 0, sizeof (id));
4636 id.src_fn = current_function_decl;
4637 id.dst_fn = current_function_decl;
4638 id.decl_map = pointer_map_create ();
4639 id.debug_map = NULL;
4641 id.copy_decl = copy_decl_no_change;
4642 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4643 id.transform_new_cfg = false;
4644 id.transform_return_to_modify = false;
4645 id.transform_lang_insert_block = NULL;
4647 /* Walk the tree once to find local labels. */
4648 memset (&wi, 0, sizeof (wi));
4649 visited = pointer_set_create ();
4650 wi.info = &id;
4651 wi.pset = visited;
4652 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4653 pointer_set_destroy (visited);
4655 copy = gimple_seq_copy (seq);
4657 /* Walk the copy, remapping decls. */
4658 memset (&wi, 0, sizeof (wi));
4659 wi.info = &id;
4660 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4662 /* Clean up. */
4663 pointer_map_destroy (id.decl_map);
4664 if (id.debug_map)
4665 pointer_map_destroy (id.debug_map);
4667 return copy;
4671 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4673 static tree
4674 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4676 if (*tp == data)
4677 return (tree) data;
4678 else
4679 return NULL;
4682 DEBUG_FUNCTION bool
4683 debug_find_tree (tree top, tree search)
4685 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4689 /* Declare the variables created by the inliner. Add all the variables in
4690 VARS to BIND_EXPR. */
4692 static void
4693 declare_inline_vars (tree block, tree vars)
4695 tree t;
4696 for (t = vars; t; t = DECL_CHAIN (t))
4698 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4699 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4700 add_local_decl (cfun, t);
4703 if (block)
4704 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4707 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4708 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4709 VAR_DECL translation. */
4711 static tree
4712 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4714 /* Don't generate debug information for the copy if we wouldn't have
4715 generated it for the copy either. */
4716 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4717 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4719 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4720 declaration inspired this copy. */
4721 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4723 /* The new variable/label has no RTL, yet. */
4724 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4725 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4726 SET_DECL_RTL (copy, 0);
4728 /* These args would always appear unused, if not for this. */
4729 TREE_USED (copy) = 1;
4731 /* Set the context for the new declaration. */
4732 if (!DECL_CONTEXT (decl))
4733 /* Globals stay global. */
4735 else if (DECL_CONTEXT (decl) != id->src_fn)
4736 /* Things that weren't in the scope of the function we're inlining
4737 from aren't in the scope we're inlining to, either. */
4739 else if (TREE_STATIC (decl))
4740 /* Function-scoped static variables should stay in the original
4741 function. */
4743 else
4744 /* Ordinary automatic local variables are now in the scope of the
4745 new function. */
4746 DECL_CONTEXT (copy) = id->dst_fn;
4748 return copy;
4751 static tree
4752 copy_decl_to_var (tree decl, copy_body_data *id)
4754 tree copy, type;
4756 gcc_assert (TREE_CODE (decl) == PARM_DECL
4757 || TREE_CODE (decl) == RESULT_DECL);
4759 type = TREE_TYPE (decl);
4761 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4762 VAR_DECL, DECL_NAME (decl), type);
4763 if (DECL_PT_UID_SET_P (decl))
4764 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4765 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4766 TREE_READONLY (copy) = TREE_READONLY (decl);
4767 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4768 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4770 return copy_decl_for_dup_finish (id, decl, copy);
4773 /* Like copy_decl_to_var, but create a return slot object instead of a
4774 pointer variable for return by invisible reference. */
4776 static tree
4777 copy_result_decl_to_var (tree decl, copy_body_data *id)
4779 tree copy, type;
4781 gcc_assert (TREE_CODE (decl) == PARM_DECL
4782 || TREE_CODE (decl) == RESULT_DECL);
4784 type = TREE_TYPE (decl);
4785 if (DECL_BY_REFERENCE (decl))
4786 type = TREE_TYPE (type);
4788 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4789 VAR_DECL, DECL_NAME (decl), type);
4790 if (DECL_PT_UID_SET_P (decl))
4791 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4792 TREE_READONLY (copy) = TREE_READONLY (decl);
4793 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4794 if (!DECL_BY_REFERENCE (decl))
4796 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4797 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4800 return copy_decl_for_dup_finish (id, decl, copy);
4803 tree
4804 copy_decl_no_change (tree decl, copy_body_data *id)
4806 tree copy;
4808 copy = copy_node (decl);
4810 /* The COPY is not abstract; it will be generated in DST_FN. */
4811 DECL_ABSTRACT (copy) = 0;
4812 lang_hooks.dup_lang_specific_decl (copy);
4814 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4815 been taken; it's for internal bookkeeping in expand_goto_internal. */
4816 if (TREE_CODE (copy) == LABEL_DECL)
4818 TREE_ADDRESSABLE (copy) = 0;
4819 LABEL_DECL_UID (copy) = -1;
4822 return copy_decl_for_dup_finish (id, decl, copy);
4825 static tree
4826 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4828 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4829 return copy_decl_to_var (decl, id);
4830 else
4831 return copy_decl_no_change (decl, id);
4834 /* Return a copy of the function's argument tree. */
4835 static tree
4836 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4837 bitmap args_to_skip, tree *vars)
4839 tree arg, *parg;
4840 tree new_parm = NULL;
4841 int i = 0;
4843 parg = &new_parm;
4845 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
4846 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4848 tree new_tree = remap_decl (arg, id);
4849 if (TREE_CODE (new_tree) != PARM_DECL)
4850 new_tree = id->copy_decl (arg, id);
4851 lang_hooks.dup_lang_specific_decl (new_tree);
4852 *parg = new_tree;
4853 parg = &DECL_CHAIN (new_tree);
4855 else if (!pointer_map_contains (id->decl_map, arg))
4857 /* Make an equivalent VAR_DECL. If the argument was used
4858 as temporary variable later in function, the uses will be
4859 replaced by local variable. */
4860 tree var = copy_decl_to_var (arg, id);
4861 insert_decl_map (id, arg, var);
4862 /* Declare this new variable. */
4863 DECL_CHAIN (var) = *vars;
4864 *vars = var;
4866 return new_parm;
4869 /* Return a copy of the function's static chain. */
4870 static tree
4871 copy_static_chain (tree static_chain, copy_body_data * id)
4873 tree *chain_copy, *pvar;
4875 chain_copy = &static_chain;
4876 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
4878 tree new_tree = remap_decl (*pvar, id);
4879 lang_hooks.dup_lang_specific_decl (new_tree);
4880 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
4881 *pvar = new_tree;
4883 return static_chain;
4886 /* Return true if the function is allowed to be versioned.
4887 This is a guard for the versioning functionality. */
4889 bool
4890 tree_versionable_function_p (tree fndecl)
4892 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4893 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4896 /* Delete all unreachable basic blocks and update callgraph.
4897 Doing so is somewhat nontrivial because we need to update all clones and
4898 remove inline function that become unreachable. */
4900 static bool
4901 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4903 bool changed = false;
4904 basic_block b, next_bb;
4906 find_unreachable_blocks ();
4908 /* Delete all unreachable basic blocks. */
4910 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4912 next_bb = b->next_bb;
4914 if (!(b->flags & BB_REACHABLE))
4916 gimple_stmt_iterator bsi;
4918 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4919 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4921 struct cgraph_edge *e;
4922 struct cgraph_node *node;
4924 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4926 if (!e->inline_failed)
4927 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
4928 else
4929 cgraph_remove_edge (e);
4931 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4932 && id->dst_node->clones)
4933 for (node = id->dst_node->clones; node != id->dst_node;)
4935 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4937 if (!e->inline_failed)
4938 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
4939 else
4940 cgraph_remove_edge (e);
4943 if (node->clones)
4944 node = node->clones;
4945 else if (node->next_sibling_clone)
4946 node = node->next_sibling_clone;
4947 else
4949 while (node != id->dst_node && !node->next_sibling_clone)
4950 node = node->clone_of;
4951 if (node != id->dst_node)
4952 node = node->next_sibling_clone;
4956 delete_basic_block (b);
4957 changed = true;
4961 return changed;
4964 /* Update clone info after duplication. */
4966 static void
4967 update_clone_info (copy_body_data * id)
4969 struct cgraph_node *node;
4970 if (!id->dst_node->clones)
4971 return;
4972 for (node = id->dst_node->clones; node != id->dst_node;)
4974 /* First update replace maps to match the new body. */
4975 if (node->clone.tree_map)
4977 unsigned int i;
4978 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4980 struct ipa_replace_map *replace_info;
4981 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4982 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4983 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4986 if (node->clones)
4987 node = node->clones;
4988 else if (node->next_sibling_clone)
4989 node = node->next_sibling_clone;
4990 else
4992 while (node != id->dst_node && !node->next_sibling_clone)
4993 node = node->clone_of;
4994 if (node != id->dst_node)
4995 node = node->next_sibling_clone;
5000 /* Create a copy of a function's tree.
5001 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5002 of the original function and the new copied function
5003 respectively. In case we want to replace a DECL
5004 tree with another tree while duplicating the function's
5005 body, TREE_MAP represents the mapping between these
5006 trees. If UPDATE_CLONES is set, the call_stmt fields
5007 of edges of clones of the function will be updated.
5009 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5010 from new version.
5011 If SKIP_RETURN is true, the new version will return void.
5012 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5013 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5015 void
5016 tree_function_versioning (tree old_decl, tree new_decl,
5017 VEC(ipa_replace_map_p,gc)* tree_map,
5018 bool update_clones, bitmap args_to_skip,
5019 bool skip_return, bitmap blocks_to_copy,
5020 basic_block new_entry)
5022 struct cgraph_node *old_version_node;
5023 struct cgraph_node *new_version_node;
5024 copy_body_data id;
5025 tree p;
5026 unsigned i;
5027 struct ipa_replace_map *replace_info;
5028 basic_block old_entry_block, bb;
5029 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
5031 tree old_current_function_decl = current_function_decl;
5032 tree vars = NULL_TREE;
5034 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5035 && TREE_CODE (new_decl) == FUNCTION_DECL);
5036 DECL_POSSIBLY_INLINED (old_decl) = 1;
5038 old_version_node = cgraph_get_node (old_decl);
5039 gcc_checking_assert (old_version_node);
5040 new_version_node = cgraph_get_node (new_decl);
5041 gcc_checking_assert (new_version_node);
5043 /* Copy over debug args. */
5044 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5046 VEC(tree, gc) **new_debug_args, **old_debug_args;
5047 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5048 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5049 old_debug_args = decl_debug_args_lookup (old_decl);
5050 if (old_debug_args)
5052 new_debug_args = decl_debug_args_insert (new_decl);
5053 *new_debug_args = VEC_copy (tree, gc, *old_debug_args);
5057 /* Output the inlining info for this abstract function, since it has been
5058 inlined. If we don't do this now, we can lose the information about the
5059 variables in the function when the blocks get blown away as soon as we
5060 remove the cgraph node. */
5061 (*debug_hooks->outlining_inline_function) (old_decl);
5063 DECL_ARTIFICIAL (new_decl) = 1;
5064 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5065 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5067 /* Prepare the data structures for the tree copy. */
5068 memset (&id, 0, sizeof (id));
5070 /* Generate a new name for the new version. */
5071 id.statements_to_fold = pointer_set_create ();
5073 id.decl_map = pointer_map_create ();
5074 id.debug_map = NULL;
5075 id.src_fn = old_decl;
5076 id.dst_fn = new_decl;
5077 id.src_node = old_version_node;
5078 id.dst_node = new_version_node;
5079 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5080 if (id.src_node->ipa_transforms_to_apply)
5082 VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
5083 unsigned int i;
5085 id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
5086 id.src_node->ipa_transforms_to_apply);
5087 for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
5088 VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
5089 VEC_index (ipa_opt_pass,
5090 old_transforms_to_apply,
5091 i));
5092 VEC_free (ipa_opt_pass, heap, old_transforms_to_apply);
5095 id.copy_decl = copy_decl_no_change;
5096 id.transform_call_graph_edges
5097 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5098 id.transform_new_cfg = true;
5099 id.transform_return_to_modify = false;
5100 id.transform_lang_insert_block = NULL;
5102 current_function_decl = new_decl;
5103 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5104 (DECL_STRUCT_FUNCTION (old_decl));
5105 initialize_cfun (new_decl, old_decl,
5106 old_entry_block->count);
5107 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5108 = id.src_cfun->gimple_df->ipa_pta;
5109 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
5111 /* Copy the function's static chain. */
5112 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5113 if (p)
5114 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5115 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5116 &id);
5118 /* If there's a tree_map, prepare for substitution. */
5119 if (tree_map)
5120 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
5122 gimple init;
5123 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
5124 if (replace_info->replace_p)
5126 tree op = replace_info->new_tree;
5127 if (!replace_info->old_tree)
5129 int i = replace_info->parm_num;
5130 tree parm;
5131 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5132 i --;
5133 replace_info->old_tree = parm;
5137 STRIP_NOPS (op);
5139 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
5140 op = TREE_OPERAND (op, 0);
5142 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5143 init = setup_one_parameter (&id, replace_info->old_tree,
5144 replace_info->new_tree, id.src_fn,
5145 NULL,
5146 &vars);
5147 if (init)
5148 VEC_safe_push (gimple, heap, init_stmts, init);
5151 /* Copy the function's arguments. */
5152 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5153 DECL_ARGUMENTS (new_decl) =
5154 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5155 args_to_skip, &vars);
5157 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5158 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5160 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5162 if (!VEC_empty (tree, DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5163 /* Add local vars. */
5164 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5166 if (DECL_RESULT (old_decl) == NULL_TREE)
5168 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5170 DECL_RESULT (new_decl)
5171 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5172 RESULT_DECL, NULL_TREE, void_type_node);
5173 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5174 cfun->returns_struct = 0;
5175 cfun->returns_pcc_struct = 0;
5177 else
5179 tree old_name;
5180 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5181 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5182 if (gimple_in_ssa_p (id.src_cfun)
5183 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5184 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5186 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5187 insert_decl_map (&id, old_name, new_name);
5188 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5189 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5193 /* Copy the Function's body. */
5194 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5195 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry);
5197 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5198 number_blocks (new_decl);
5200 /* We want to create the BB unconditionally, so that the addition of
5201 debug stmts doesn't affect BB count, which may in the end cause
5202 codegen differences. */
5203 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5204 while (VEC_length (gimple, init_stmts))
5205 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
5206 update_clone_info (&id);
5208 /* Remap the nonlocal_goto_save_area, if any. */
5209 if (cfun->nonlocal_goto_save_area)
5211 struct walk_stmt_info wi;
5213 memset (&wi, 0, sizeof (wi));
5214 wi.info = &id;
5215 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5218 /* Clean up. */
5219 pointer_map_destroy (id.decl_map);
5220 if (id.debug_map)
5221 pointer_map_destroy (id.debug_map);
5222 free_dominance_info (CDI_DOMINATORS);
5223 free_dominance_info (CDI_POST_DOMINATORS);
5225 fold_marked_statements (0, id.statements_to_fold);
5226 pointer_set_destroy (id.statements_to_fold);
5227 fold_cond_expr_cond ();
5228 delete_unreachable_blocks_update_callgraph (&id);
5229 if (id.dst_node->analyzed)
5230 cgraph_rebuild_references ();
5231 update_ssa (TODO_update_ssa);
5233 /* After partial cloning we need to rescale frequencies, so they are
5234 within proper range in the cloned function. */
5235 if (new_entry)
5237 struct cgraph_edge *e;
5238 rebuild_frequencies ();
5240 new_version_node->count = ENTRY_BLOCK_PTR->count;
5241 for (e = new_version_node->callees; e; e = e->next_callee)
5243 basic_block bb = gimple_bb (e->call_stmt);
5244 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5245 bb);
5246 e->count = bb->count;
5248 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5250 basic_block bb = gimple_bb (e->call_stmt);
5251 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5252 bb);
5253 e->count = bb->count;
5257 free_dominance_info (CDI_DOMINATORS);
5258 free_dominance_info (CDI_POST_DOMINATORS);
5260 gcc_assert (!id.debug_stmts);
5261 VEC_free (gimple, heap, init_stmts);
5262 pop_cfun ();
5263 current_function_decl = old_current_function_decl;
5264 gcc_assert (!current_function_decl
5265 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
5266 return;
5269 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5270 the callee and return the inlined body on success. */
5272 tree
5273 maybe_inline_call_in_expr (tree exp)
5275 tree fn = get_callee_fndecl (exp);
5277 /* We can only try to inline "const" functions. */
5278 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5280 struct pointer_map_t *decl_map = pointer_map_create ();
5281 call_expr_arg_iterator iter;
5282 copy_body_data id;
5283 tree param, arg, t;
5285 /* Remap the parameters. */
5286 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5287 param;
5288 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5289 *pointer_map_insert (decl_map, param) = arg;
5291 memset (&id, 0, sizeof (id));
5292 id.src_fn = fn;
5293 id.dst_fn = current_function_decl;
5294 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5295 id.decl_map = decl_map;
5297 id.copy_decl = copy_decl_no_change;
5298 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5299 id.transform_new_cfg = false;
5300 id.transform_return_to_modify = true;
5301 id.transform_lang_insert_block = NULL;
5303 /* Make sure not to unshare trees behind the front-end's back
5304 since front-end specific mechanisms may rely on sharing. */
5305 id.regimplify = false;
5306 id.do_not_unshare = true;
5308 /* We're not inside any EH region. */
5309 id.eh_lp_nr = 0;
5311 t = copy_tree_body (&id);
5312 pointer_map_destroy (decl_map);
5314 /* We can only return something suitable for use in a GENERIC
5315 expression tree. */
5316 if (TREE_CODE (t) == MODIFY_EXPR)
5317 return TREE_OPERAND (t, 1);
5320 return NULL_TREE;
5323 /* Duplicate a type, fields and all. */
5325 tree
5326 build_duplicate_type (tree type)
5328 struct copy_body_data id;
5330 memset (&id, 0, sizeof (id));
5331 id.src_fn = current_function_decl;
5332 id.dst_fn = current_function_decl;
5333 id.src_cfun = cfun;
5334 id.decl_map = pointer_map_create ();
5335 id.debug_map = NULL;
5336 id.copy_decl = copy_decl_no_change;
5338 type = remap_type_1 (type, &id);
5340 pointer_map_destroy (id.decl_map);
5341 if (id.debug_map)
5342 pointer_map_destroy (id.debug_map);
5344 TYPE_CANONICAL (type) = type;
5346 return type;