rs6000.c (rs6000_secondary_reload_inner): Use replace_equiv_address_nv.
[official-gcc.git] / gcc / tree-inline.c
blob7fa0245d16cced68f25fd8211892e88494218cd4
1 /* Tree inlining.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "tree.h"
27 #include "tree-inline.h"
28 #include "flags.h"
29 #include "params.h"
30 #include "input.h"
31 #include "insn-config.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-iterator.h"
36 #include "cgraph.h"
37 #include "intl.h"
38 #include "tree-mudflap.h"
39 #include "tree-flow.h"
40 #include "function.h"
41 #include "tree-flow.h"
42 #include "tree-pretty-print.h"
43 #include "except.h"
44 #include "debug.h"
45 #include "pointer-set.h"
46 #include "ipa-prop.h"
47 #include "value-prof.h"
48 #include "tree-pass.h"
49 #include "target.h"
51 #include "rtl.h" /* FIXME: For asm_str_count. */
53 /* I'm not real happy about this, but we need to handle gimple and
54 non-gimple trees. */
55 #include "gimple.h"
57 /* Inlining, Cloning, Versioning, Parallelization
59 Inlining: a function body is duplicated, but the PARM_DECLs are
60 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
61 MODIFY_EXPRs that store to a dedicated returned-value variable.
62 The duplicated eh_region info of the copy will later be appended
63 to the info for the caller; the eh_region info in copied throwing
64 statements and RESX statements are adjusted accordingly.
66 Cloning: (only in C++) We have one body for a con/de/structor, and
67 multiple function decls, each with a unique parameter list.
68 Duplicate the body, using the given splay tree; some parameters
69 will become constants (like 0 or 1).
71 Versioning: a function body is duplicated and the result is a new
72 function rather than into blocks of an existing function as with
73 inlining. Some parameters will become constants.
75 Parallelization: a region of a function is duplicated resulting in
76 a new function. Variables may be replaced with complex expressions
77 to enable shared variable semantics.
79 All of these will simultaneously lookup any callgraph edges. If
80 we're going to inline the duplicated function body, and the given
81 function has some cloned callgraph nodes (one for each place this
82 function will be inlined) those callgraph edges will be duplicated.
83 If we're cloning the body, those callgraph edges will be
84 updated to point into the new body. (Note that the original
85 callgraph node and edge list will not be altered.)
87 See the CALL_EXPR handling case in copy_tree_body_r (). */
89 /* To Do:
91 o In order to make inlining-on-trees work, we pessimized
92 function-local static constants. In particular, they are now
93 always output, even when not addressed. Fix this by treating
94 function-local static constants just like global static
95 constants; the back-end already knows not to output them if they
96 are not needed.
98 o Provide heuristics to clamp inlining of recursive template
99 calls? */
102 /* Weights that estimate_num_insns uses to estimate the size of the
103 produced code. */
105 eni_weights eni_size_weights;
107 /* Weights that estimate_num_insns uses to estimate the time necessary
108 to execute the produced code. */
110 eni_weights eni_time_weights;
112 /* Prototypes. */
114 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
115 static void remap_block (tree *, copy_body_data *);
116 static void copy_bind_expr (tree *, int *, copy_body_data *);
117 static void declare_inline_vars (tree, tree);
118 static void remap_save_expr (tree *, void *, int *);
119 static void prepend_lexical_block (tree current_block, tree new_block);
120 static tree copy_decl_to_var (tree, copy_body_data *);
121 static tree copy_result_decl_to_var (tree, copy_body_data *);
122 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
123 static gimple remap_gimple_stmt (gimple, copy_body_data *);
124 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
126 /* Insert a tree->tree mapping for ID. Despite the name suggests
127 that the trees should be variables, it is used for more than that. */
129 void
130 insert_decl_map (copy_body_data *id, tree key, tree value)
132 *pointer_map_insert (id->decl_map, key) = value;
134 /* Always insert an identity map as well. If we see this same new
135 node again, we won't want to duplicate it a second time. */
136 if (key != value)
137 *pointer_map_insert (id->decl_map, value) = value;
140 /* Insert a tree->tree mapping for ID. This is only used for
141 variables. */
143 static void
144 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
146 if (!gimple_in_ssa_p (id->src_cfun))
147 return;
149 if (!MAY_HAVE_DEBUG_STMTS)
150 return;
152 if (!target_for_debug_bind (key))
153 return;
155 gcc_assert (TREE_CODE (key) == PARM_DECL);
156 gcc_assert (TREE_CODE (value) == VAR_DECL);
158 if (!id->debug_map)
159 id->debug_map = pointer_map_create ();
161 *pointer_map_insert (id->debug_map, key) = value;
164 /* If nonzero, we're remapping the contents of inlined debug
165 statements. If negative, an error has occurred, such as a
166 reference to a variable that isn't available in the inlined
167 context. */
168 static int processing_debug_stmt = 0;
170 /* Construct new SSA name for old NAME. ID is the inline context. */
172 static tree
173 remap_ssa_name (tree name, copy_body_data *id)
175 tree new_tree, var;
176 tree *n;
178 gcc_assert (TREE_CODE (name) == SSA_NAME);
180 n = (tree *) pointer_map_contains (id->decl_map, name);
181 if (n)
182 return unshare_expr (*n);
184 if (processing_debug_stmt)
186 if (SSA_NAME_IS_DEFAULT_DEF (name)
187 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
188 && id->entry_bb == NULL
189 && single_succ_p (ENTRY_BLOCK_PTR))
191 tree vexpr = make_node (DEBUG_EXPR_DECL);
192 gimple def_temp;
193 gimple_stmt_iterator gsi;
194 tree val = SSA_NAME_VAR (name);
196 n = (tree *) pointer_map_contains (id->decl_map, val);
197 if (n != NULL)
198 val = *n;
199 if (TREE_CODE (val) != PARM_DECL)
201 processing_debug_stmt = -1;
202 return name;
204 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
205 DECL_ARTIFICIAL (vexpr) = 1;
206 TREE_TYPE (vexpr) = TREE_TYPE (name);
207 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
208 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
209 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
210 return vexpr;
213 processing_debug_stmt = -1;
214 return name;
217 /* Remap anonymous SSA names or SSA names of anonymous decls. */
218 var = SSA_NAME_VAR (name);
219 if (!var
220 || (!SSA_NAME_IS_DEFAULT_DEF (name)
221 && TREE_CODE (var) == VAR_DECL
222 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
223 && DECL_ARTIFICIAL (var)
224 && DECL_IGNORED_P (var)
225 && !DECL_NAME (var)))
227 struct ptr_info_def *pi;
228 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id), NULL);
229 if (!var && SSA_NAME_IDENTIFIER (name))
230 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
231 insert_decl_map (id, name, new_tree);
232 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
233 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
234 /* At least IPA points-to info can be directly transferred. */
235 if (id->src_cfun->gimple_df
236 && id->src_cfun->gimple_df->ipa_pta
237 && (pi = SSA_NAME_PTR_INFO (name))
238 && !pi->pt.anything)
240 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
241 new_pi->pt = pi->pt;
243 return new_tree;
246 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
247 in copy_bb. */
248 new_tree = remap_decl (var, id);
250 /* We might've substituted constant or another SSA_NAME for
251 the variable.
253 Replace the SSA name representing RESULT_DECL by variable during
254 inlining: this saves us from need to introduce PHI node in a case
255 return value is just partly initialized. */
256 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
257 && (!SSA_NAME_VAR (name)
258 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
259 || !id->transform_return_to_modify))
261 struct ptr_info_def *pi;
262 new_tree = make_ssa_name (new_tree, NULL);
263 insert_decl_map (id, name, new_tree);
264 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
265 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
266 /* At least IPA points-to info can be directly transferred. */
267 if (id->src_cfun->gimple_df
268 && id->src_cfun->gimple_df->ipa_pta
269 && (pi = SSA_NAME_PTR_INFO (name))
270 && !pi->pt.anything)
272 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
273 new_pi->pt = pi->pt;
275 if (SSA_NAME_IS_DEFAULT_DEF (name))
277 /* By inlining function having uninitialized variable, we might
278 extend the lifetime (variable might get reused). This cause
279 ICE in the case we end up extending lifetime of SSA name across
280 abnormal edge, but also increase register pressure.
282 We simply initialize all uninitialized vars by 0 except
283 for case we are inlining to very first BB. We can avoid
284 this for all BBs that are not inside strongly connected
285 regions of the CFG, but this is expensive to test. */
286 if (id->entry_bb
287 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
288 && (!SSA_NAME_VAR (name)
289 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
290 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
291 || EDGE_COUNT (id->entry_bb->preds) != 1))
293 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
294 gimple init_stmt;
295 tree zero = build_zero_cst (TREE_TYPE (new_tree));
297 init_stmt = gimple_build_assign (new_tree, zero);
298 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
299 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
301 else
303 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
304 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
308 else
309 insert_decl_map (id, name, new_tree);
310 return new_tree;
313 /* Remap DECL during the copying of the BLOCK tree for the function. */
315 tree
316 remap_decl (tree decl, copy_body_data *id)
318 tree *n;
320 /* We only remap local variables in the current function. */
322 /* See if we have remapped this declaration. */
324 n = (tree *) pointer_map_contains (id->decl_map, decl);
326 if (!n && processing_debug_stmt)
328 processing_debug_stmt = -1;
329 return decl;
332 /* If we didn't already have an equivalent for this declaration,
333 create one now. */
334 if (!n)
336 /* Make a copy of the variable or label. */
337 tree t = id->copy_decl (decl, id);
339 /* Remember it, so that if we encounter this local entity again
340 we can reuse this copy. Do this early because remap_type may
341 need this decl for TYPE_STUB_DECL. */
342 insert_decl_map (id, decl, t);
344 if (!DECL_P (t))
345 return t;
347 /* Remap types, if necessary. */
348 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
349 if (TREE_CODE (t) == TYPE_DECL)
350 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
352 /* Remap sizes as necessary. */
353 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
354 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
356 /* If fields, do likewise for offset and qualifier. */
357 if (TREE_CODE (t) == FIELD_DECL)
359 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
360 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
361 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
364 return t;
367 if (id->do_not_unshare)
368 return *n;
369 else
370 return unshare_expr (*n);
373 static tree
374 remap_type_1 (tree type, copy_body_data *id)
376 tree new_tree, t;
378 /* We do need a copy. build and register it now. If this is a pointer or
379 reference type, remap the designated type and make a new pointer or
380 reference type. */
381 if (TREE_CODE (type) == POINTER_TYPE)
383 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
384 TYPE_MODE (type),
385 TYPE_REF_CAN_ALIAS_ALL (type));
386 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
387 new_tree = build_type_attribute_qual_variant (new_tree,
388 TYPE_ATTRIBUTES (type),
389 TYPE_QUALS (type));
390 insert_decl_map (id, type, new_tree);
391 return new_tree;
393 else if (TREE_CODE (type) == REFERENCE_TYPE)
395 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
396 TYPE_MODE (type),
397 TYPE_REF_CAN_ALIAS_ALL (type));
398 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
399 new_tree = build_type_attribute_qual_variant (new_tree,
400 TYPE_ATTRIBUTES (type),
401 TYPE_QUALS (type));
402 insert_decl_map (id, type, new_tree);
403 return new_tree;
405 else
406 new_tree = copy_node (type);
408 insert_decl_map (id, type, new_tree);
410 /* This is a new type, not a copy of an old type. Need to reassociate
411 variants. We can handle everything except the main variant lazily. */
412 t = TYPE_MAIN_VARIANT (type);
413 if (type != t)
415 t = remap_type (t, id);
416 TYPE_MAIN_VARIANT (new_tree) = t;
417 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
418 TYPE_NEXT_VARIANT (t) = new_tree;
420 else
422 TYPE_MAIN_VARIANT (new_tree) = new_tree;
423 TYPE_NEXT_VARIANT (new_tree) = NULL;
426 if (TYPE_STUB_DECL (type))
427 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
429 /* Lazily create pointer and reference types. */
430 TYPE_POINTER_TO (new_tree) = NULL;
431 TYPE_REFERENCE_TO (new_tree) = NULL;
433 switch (TREE_CODE (new_tree))
435 case INTEGER_TYPE:
436 case REAL_TYPE:
437 case FIXED_POINT_TYPE:
438 case ENUMERAL_TYPE:
439 case BOOLEAN_TYPE:
440 t = TYPE_MIN_VALUE (new_tree);
441 if (t && TREE_CODE (t) != INTEGER_CST)
442 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
444 t = TYPE_MAX_VALUE (new_tree);
445 if (t && TREE_CODE (t) != INTEGER_CST)
446 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
447 return new_tree;
449 case FUNCTION_TYPE:
450 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
451 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
452 return new_tree;
454 case ARRAY_TYPE:
455 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
456 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
457 break;
459 case RECORD_TYPE:
460 case UNION_TYPE:
461 case QUAL_UNION_TYPE:
463 tree f, nf = NULL;
465 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
467 t = remap_decl (f, id);
468 DECL_CONTEXT (t) = new_tree;
469 DECL_CHAIN (t) = nf;
470 nf = t;
472 TYPE_FIELDS (new_tree) = nreverse (nf);
474 break;
476 case OFFSET_TYPE:
477 default:
478 /* Shouldn't have been thought variable sized. */
479 gcc_unreachable ();
482 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
483 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
485 return new_tree;
488 tree
489 remap_type (tree type, copy_body_data *id)
491 tree *node;
492 tree tmp;
494 if (type == NULL)
495 return type;
497 /* See if we have remapped this type. */
498 node = (tree *) pointer_map_contains (id->decl_map, type);
499 if (node)
500 return *node;
502 /* The type only needs remapping if it's variably modified. */
503 if (! variably_modified_type_p (type, id->src_fn))
505 insert_decl_map (id, type, type);
506 return type;
509 id->remapping_type_depth++;
510 tmp = remap_type_1 (type, id);
511 id->remapping_type_depth--;
513 return tmp;
516 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
518 static bool
519 can_be_nonlocal (tree decl, copy_body_data *id)
521 /* We can not duplicate function decls. */
522 if (TREE_CODE (decl) == FUNCTION_DECL)
523 return true;
525 /* Local static vars must be non-local or we get multiple declaration
526 problems. */
527 if (TREE_CODE (decl) == VAR_DECL
528 && !auto_var_in_fn_p (decl, id->src_fn))
529 return true;
531 return false;
534 static tree
535 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
536 copy_body_data *id)
538 tree old_var;
539 tree new_decls = NULL_TREE;
541 /* Remap its variables. */
542 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
544 tree new_var;
546 if (can_be_nonlocal (old_var, id))
548 /* We need to add this variable to the local decls as otherwise
549 nothing else will do so. */
550 if (TREE_CODE (old_var) == VAR_DECL
551 && ! DECL_EXTERNAL (old_var))
552 add_local_decl (cfun, old_var);
553 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
554 && !DECL_IGNORED_P (old_var)
555 && nonlocalized_list)
556 vec_safe_push (*nonlocalized_list, old_var);
557 continue;
560 /* Remap the variable. */
561 new_var = remap_decl (old_var, id);
563 /* If we didn't remap this variable, we can't mess with its
564 TREE_CHAIN. If we remapped this variable to the return slot, it's
565 already declared somewhere else, so don't declare it here. */
567 if (new_var == id->retvar)
569 else if (!new_var)
571 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
572 && !DECL_IGNORED_P (old_var)
573 && nonlocalized_list)
574 vec_safe_push (*nonlocalized_list, old_var);
576 else
578 gcc_assert (DECL_P (new_var));
579 DECL_CHAIN (new_var) = new_decls;
580 new_decls = new_var;
582 /* Also copy value-expressions. */
583 if (TREE_CODE (new_var) == VAR_DECL
584 && DECL_HAS_VALUE_EXPR_P (new_var))
586 tree tem = DECL_VALUE_EXPR (new_var);
587 bool old_regimplify = id->regimplify;
588 id->remapping_type_depth++;
589 walk_tree (&tem, copy_tree_body_r, id, NULL);
590 id->remapping_type_depth--;
591 id->regimplify = old_regimplify;
592 SET_DECL_VALUE_EXPR (new_var, tem);
597 return nreverse (new_decls);
600 /* Copy the BLOCK to contain remapped versions of the variables
601 therein. And hook the new block into the block-tree. */
603 static void
604 remap_block (tree *block, copy_body_data *id)
606 tree old_block;
607 tree new_block;
609 /* Make the new block. */
610 old_block = *block;
611 new_block = make_node (BLOCK);
612 TREE_USED (new_block) = TREE_USED (old_block);
613 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
614 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
615 BLOCK_NONLOCALIZED_VARS (new_block)
616 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
617 *block = new_block;
619 /* Remap its variables. */
620 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
621 &BLOCK_NONLOCALIZED_VARS (new_block),
622 id);
624 if (id->transform_lang_insert_block)
625 id->transform_lang_insert_block (new_block);
627 /* Remember the remapped block. */
628 insert_decl_map (id, old_block, new_block);
631 /* Copy the whole block tree and root it in id->block. */
632 static tree
633 remap_blocks (tree block, copy_body_data *id)
635 tree t;
636 tree new_tree = block;
638 if (!block)
639 return NULL;
641 remap_block (&new_tree, id);
642 gcc_assert (new_tree != block);
643 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
644 prepend_lexical_block (new_tree, remap_blocks (t, id));
645 /* Blocks are in arbitrary order, but make things slightly prettier and do
646 not swap order when producing a copy. */
647 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
648 return new_tree;
651 /* Remap the block tree rooted at BLOCK to nothing. */
652 static void
653 remap_blocks_to_null (tree block, copy_body_data *id)
655 tree t;
656 insert_decl_map (id, block, NULL_TREE);
657 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
658 remap_blocks_to_null (t, id);
661 static void
662 copy_statement_list (tree *tp)
664 tree_stmt_iterator oi, ni;
665 tree new_tree;
667 new_tree = alloc_stmt_list ();
668 ni = tsi_start (new_tree);
669 oi = tsi_start (*tp);
670 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
671 *tp = new_tree;
673 for (; !tsi_end_p (oi); tsi_next (&oi))
675 tree stmt = tsi_stmt (oi);
676 if (TREE_CODE (stmt) == STATEMENT_LIST)
677 /* This copy is not redundant; tsi_link_after will smash this
678 STATEMENT_LIST into the end of the one we're building, and we
679 don't want to do that with the original. */
680 copy_statement_list (&stmt);
681 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
685 static void
686 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
688 tree block = BIND_EXPR_BLOCK (*tp);
689 /* Copy (and replace) the statement. */
690 copy_tree_r (tp, walk_subtrees, NULL);
691 if (block)
693 remap_block (&block, id);
694 BIND_EXPR_BLOCK (*tp) = block;
697 if (BIND_EXPR_VARS (*tp))
698 /* This will remap a lot of the same decls again, but this should be
699 harmless. */
700 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
704 /* Create a new gimple_seq by remapping all the statements in BODY
705 using the inlining information in ID. */
707 static gimple_seq
708 remap_gimple_seq (gimple_seq body, copy_body_data *id)
710 gimple_stmt_iterator si;
711 gimple_seq new_body = NULL;
713 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
715 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
716 gimple_seq_add_stmt (&new_body, new_stmt);
719 return new_body;
723 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
724 block using the mapping information in ID. */
726 static gimple
727 copy_gimple_bind (gimple stmt, copy_body_data *id)
729 gimple new_bind;
730 tree new_block, new_vars;
731 gimple_seq body, new_body;
733 /* Copy the statement. Note that we purposely don't use copy_stmt
734 here because we need to remap statements as we copy. */
735 body = gimple_bind_body (stmt);
736 new_body = remap_gimple_seq (body, id);
738 new_block = gimple_bind_block (stmt);
739 if (new_block)
740 remap_block (&new_block, id);
742 /* This will remap a lot of the same decls again, but this should be
743 harmless. */
744 new_vars = gimple_bind_vars (stmt);
745 if (new_vars)
746 new_vars = remap_decls (new_vars, NULL, id);
748 new_bind = gimple_build_bind (new_vars, new_body, new_block);
750 return new_bind;
754 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
755 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
756 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
757 recursing into the children nodes of *TP. */
759 static tree
760 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
762 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
763 copy_body_data *id = (copy_body_data *) wi_p->info;
764 tree fn = id->src_fn;
766 if (TREE_CODE (*tp) == SSA_NAME)
768 *tp = remap_ssa_name (*tp, id);
769 *walk_subtrees = 0;
770 return NULL;
772 else if (auto_var_in_fn_p (*tp, fn))
774 /* Local variables and labels need to be replaced by equivalent
775 variables. We don't want to copy static variables; there's
776 only one of those, no matter how many times we inline the
777 containing function. Similarly for globals from an outer
778 function. */
779 tree new_decl;
781 /* Remap the declaration. */
782 new_decl = remap_decl (*tp, id);
783 gcc_assert (new_decl);
784 /* Replace this variable with the copy. */
785 STRIP_TYPE_NOPS (new_decl);
786 /* ??? The C++ frontend uses void * pointer zero to initialize
787 any other type. This confuses the middle-end type verification.
788 As cloned bodies do not go through gimplification again the fixup
789 there doesn't trigger. */
790 if (TREE_CODE (new_decl) == INTEGER_CST
791 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
792 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
793 *tp = new_decl;
794 *walk_subtrees = 0;
796 else if (TREE_CODE (*tp) == STATEMENT_LIST)
797 gcc_unreachable ();
798 else if (TREE_CODE (*tp) == SAVE_EXPR)
799 gcc_unreachable ();
800 else if (TREE_CODE (*tp) == LABEL_DECL
801 && (!DECL_CONTEXT (*tp)
802 || decl_function_context (*tp) == id->src_fn))
803 /* These may need to be remapped for EH handling. */
804 *tp = remap_decl (*tp, id);
805 else if (TREE_CODE (*tp) == FIELD_DECL)
807 /* If the enclosing record type is variably_modified_type_p, the field
808 has already been remapped. Otherwise, it need not be. */
809 tree *n = (tree *) pointer_map_contains (id->decl_map, *tp);
810 if (n)
811 *tp = *n;
812 *walk_subtrees = 0;
814 else if (TYPE_P (*tp))
815 /* Types may need remapping as well. */
816 *tp = remap_type (*tp, id);
817 else if (CONSTANT_CLASS_P (*tp))
819 /* If this is a constant, we have to copy the node iff the type
820 will be remapped. copy_tree_r will not copy a constant. */
821 tree new_type = remap_type (TREE_TYPE (*tp), id);
823 if (new_type == TREE_TYPE (*tp))
824 *walk_subtrees = 0;
826 else if (TREE_CODE (*tp) == INTEGER_CST)
827 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
828 TREE_INT_CST_HIGH (*tp));
829 else
831 *tp = copy_node (*tp);
832 TREE_TYPE (*tp) = new_type;
835 else
837 /* Otherwise, just copy the node. Note that copy_tree_r already
838 knows not to copy VAR_DECLs, etc., so this is safe. */
840 if (TREE_CODE (*tp) == MEM_REF)
842 tree ptr = TREE_OPERAND (*tp, 0);
843 tree type = remap_type (TREE_TYPE (*tp), id);
844 tree old = *tp;
846 /* We need to re-canonicalize MEM_REFs from inline substitutions
847 that can happen when a pointer argument is an ADDR_EXPR.
848 Recurse here manually to allow that. */
849 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
850 *tp = fold_build2 (MEM_REF, type,
851 ptr, TREE_OPERAND (*tp, 1));
852 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
853 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
854 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
855 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
856 *walk_subtrees = 0;
857 return NULL;
860 /* Here is the "usual case". Copy this tree node, and then
861 tweak some special cases. */
862 copy_tree_r (tp, walk_subtrees, NULL);
864 if (TREE_CODE (*tp) != OMP_CLAUSE)
865 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
867 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
869 /* The copied TARGET_EXPR has never been expanded, even if the
870 original node was expanded already. */
871 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
872 TREE_OPERAND (*tp, 3) = NULL_TREE;
874 else if (TREE_CODE (*tp) == ADDR_EXPR)
876 /* Variable substitution need not be simple. In particular,
877 the MEM_REF substitution above. Make sure that
878 TREE_CONSTANT and friends are up-to-date. */
879 int invariant = is_gimple_min_invariant (*tp);
880 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
881 recompute_tree_invariant_for_addr_expr (*tp);
883 /* If this used to be invariant, but is not any longer,
884 then regimplification is probably needed. */
885 if (invariant && !is_gimple_min_invariant (*tp))
886 id->regimplify = true;
888 *walk_subtrees = 0;
892 /* Update the TREE_BLOCK for the cloned expr. */
893 if (EXPR_P (*tp))
895 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
896 tree old_block = TREE_BLOCK (*tp);
897 if (old_block)
899 tree *n;
900 n = (tree *) pointer_map_contains (id->decl_map,
901 TREE_BLOCK (*tp));
902 if (n)
903 new_block = *n;
905 TREE_SET_BLOCK (*tp, new_block);
908 /* Keep iterating. */
909 return NULL_TREE;
913 /* Called from copy_body_id via walk_tree. DATA is really a
914 `copy_body_data *'. */
916 tree
917 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
919 copy_body_data *id = (copy_body_data *) data;
920 tree fn = id->src_fn;
921 tree new_block;
923 /* Begin by recognizing trees that we'll completely rewrite for the
924 inlining context. Our output for these trees is completely
925 different from out input (e.g. RETURN_EXPR is deleted, and morphs
926 into an edge). Further down, we'll handle trees that get
927 duplicated and/or tweaked. */
929 /* When requested, RETURN_EXPRs should be transformed to just the
930 contained MODIFY_EXPR. The branch semantics of the return will
931 be handled elsewhere by manipulating the CFG rather than a statement. */
932 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
934 tree assignment = TREE_OPERAND (*tp, 0);
936 /* If we're returning something, just turn that into an
937 assignment into the equivalent of the original RESULT_DECL.
938 If the "assignment" is just the result decl, the result
939 decl has already been set (e.g. a recent "foo (&result_decl,
940 ...)"); just toss the entire RETURN_EXPR. */
941 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
943 /* Replace the RETURN_EXPR with (a copy of) the
944 MODIFY_EXPR hanging underneath. */
945 *tp = copy_node (assignment);
947 else /* Else the RETURN_EXPR returns no value. */
949 *tp = NULL;
950 return (tree) (void *)1;
953 else if (TREE_CODE (*tp) == SSA_NAME)
955 *tp = remap_ssa_name (*tp, id);
956 *walk_subtrees = 0;
957 return NULL;
960 /* Local variables and labels need to be replaced by equivalent
961 variables. We don't want to copy static variables; there's only
962 one of those, no matter how many times we inline the containing
963 function. Similarly for globals from an outer function. */
964 else if (auto_var_in_fn_p (*tp, fn))
966 tree new_decl;
968 /* Remap the declaration. */
969 new_decl = remap_decl (*tp, id);
970 gcc_assert (new_decl);
971 /* Replace this variable with the copy. */
972 STRIP_TYPE_NOPS (new_decl);
973 *tp = new_decl;
974 *walk_subtrees = 0;
976 else if (TREE_CODE (*tp) == STATEMENT_LIST)
977 copy_statement_list (tp);
978 else if (TREE_CODE (*tp) == SAVE_EXPR
979 || TREE_CODE (*tp) == TARGET_EXPR)
980 remap_save_expr (tp, id->decl_map, walk_subtrees);
981 else if (TREE_CODE (*tp) == LABEL_DECL
982 && (! DECL_CONTEXT (*tp)
983 || decl_function_context (*tp) == id->src_fn))
984 /* These may need to be remapped for EH handling. */
985 *tp = remap_decl (*tp, id);
986 else if (TREE_CODE (*tp) == BIND_EXPR)
987 copy_bind_expr (tp, walk_subtrees, id);
988 /* Types may need remapping as well. */
989 else if (TYPE_P (*tp))
990 *tp = remap_type (*tp, id);
992 /* If this is a constant, we have to copy the node iff the type will be
993 remapped. copy_tree_r will not copy a constant. */
994 else if (CONSTANT_CLASS_P (*tp))
996 tree new_type = remap_type (TREE_TYPE (*tp), id);
998 if (new_type == TREE_TYPE (*tp))
999 *walk_subtrees = 0;
1001 else if (TREE_CODE (*tp) == INTEGER_CST)
1002 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1003 TREE_INT_CST_HIGH (*tp));
1004 else
1006 *tp = copy_node (*tp);
1007 TREE_TYPE (*tp) = new_type;
1011 /* Otherwise, just copy the node. Note that copy_tree_r already
1012 knows not to copy VAR_DECLs, etc., so this is safe. */
1013 else
1015 /* Here we handle trees that are not completely rewritten.
1016 First we detect some inlining-induced bogosities for
1017 discarding. */
1018 if (TREE_CODE (*tp) == MODIFY_EXPR
1019 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1020 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1022 /* Some assignments VAR = VAR; don't generate any rtl code
1023 and thus don't count as variable modification. Avoid
1024 keeping bogosities like 0 = 0. */
1025 tree decl = TREE_OPERAND (*tp, 0), value;
1026 tree *n;
1028 n = (tree *) pointer_map_contains (id->decl_map, decl);
1029 if (n)
1031 value = *n;
1032 STRIP_TYPE_NOPS (value);
1033 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1035 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1036 return copy_tree_body_r (tp, walk_subtrees, data);
1040 else if (TREE_CODE (*tp) == INDIRECT_REF)
1042 /* Get rid of *& from inline substitutions that can happen when a
1043 pointer argument is an ADDR_EXPR. */
1044 tree decl = TREE_OPERAND (*tp, 0);
1045 tree *n;
1047 n = (tree *) pointer_map_contains (id->decl_map, decl);
1048 if (n)
1050 tree new_tree;
1051 tree old;
1052 /* If we happen to get an ADDR_EXPR in n->value, strip
1053 it manually here as we'll eventually get ADDR_EXPRs
1054 which lie about their types pointed to. In this case
1055 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1056 but we absolutely rely on that. As fold_indirect_ref
1057 does other useful transformations, try that first, though. */
1058 tree type = TREE_TYPE (TREE_TYPE (*n));
1059 if (id->do_not_unshare)
1060 new_tree = *n;
1061 else
1062 new_tree = unshare_expr (*n);
1063 old = *tp;
1064 *tp = gimple_fold_indirect_ref (new_tree);
1065 if (! *tp)
1067 if (TREE_CODE (new_tree) == ADDR_EXPR)
1069 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1070 type, new_tree);
1071 /* ??? We should either assert here or build
1072 a VIEW_CONVERT_EXPR instead of blindly leaking
1073 incompatible types to our IL. */
1074 if (! *tp)
1075 *tp = TREE_OPERAND (new_tree, 0);
1077 else
1079 *tp = build1 (INDIRECT_REF, type, new_tree);
1080 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1081 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1082 TREE_READONLY (*tp) = TREE_READONLY (old);
1083 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
1086 *walk_subtrees = 0;
1087 return NULL;
1090 else if (TREE_CODE (*tp) == MEM_REF)
1092 tree ptr = TREE_OPERAND (*tp, 0);
1093 tree type = remap_type (TREE_TYPE (*tp), id);
1094 tree old = *tp;
1096 /* We need to re-canonicalize MEM_REFs from inline substitutions
1097 that can happen when a pointer argument is an ADDR_EXPR.
1098 Recurse here manually to allow that. */
1099 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1100 *tp = fold_build2 (MEM_REF, type,
1101 ptr, TREE_OPERAND (*tp, 1));
1102 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
1103 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1104 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1105 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1106 *walk_subtrees = 0;
1107 return NULL;
1110 /* Here is the "usual case". Copy this tree node, and then
1111 tweak some special cases. */
1112 copy_tree_r (tp, walk_subtrees, NULL);
1114 /* If EXPR has block defined, map it to newly constructed block.
1115 When inlining we want EXPRs without block appear in the block
1116 of function call if we are not remapping a type. */
1117 if (EXPR_P (*tp))
1119 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1120 if (TREE_BLOCK (*tp))
1122 tree *n;
1123 n = (tree *) pointer_map_contains (id->decl_map,
1124 TREE_BLOCK (*tp));
1125 if (n)
1126 new_block = *n;
1128 TREE_SET_BLOCK (*tp, new_block);
1131 if (TREE_CODE (*tp) != OMP_CLAUSE)
1132 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1134 /* The copied TARGET_EXPR has never been expanded, even if the
1135 original node was expanded already. */
1136 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1138 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1139 TREE_OPERAND (*tp, 3) = NULL_TREE;
1142 /* Variable substitution need not be simple. In particular, the
1143 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1144 and friends are up-to-date. */
1145 else if (TREE_CODE (*tp) == ADDR_EXPR)
1147 int invariant = is_gimple_min_invariant (*tp);
1148 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1150 /* Handle the case where we substituted an INDIRECT_REF
1151 into the operand of the ADDR_EXPR. */
1152 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1153 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1154 else
1155 recompute_tree_invariant_for_addr_expr (*tp);
1157 /* If this used to be invariant, but is not any longer,
1158 then regimplification is probably needed. */
1159 if (invariant && !is_gimple_min_invariant (*tp))
1160 id->regimplify = true;
1162 *walk_subtrees = 0;
1166 /* Keep iterating. */
1167 return NULL_TREE;
1170 /* Helper for remap_gimple_stmt. Given an EH region number for the
1171 source function, map that to the duplicate EH region number in
1172 the destination function. */
1174 static int
1175 remap_eh_region_nr (int old_nr, copy_body_data *id)
1177 eh_region old_r, new_r;
1178 void **slot;
1180 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1181 slot = pointer_map_contains (id->eh_map, old_r);
1182 new_r = (eh_region) *slot;
1184 return new_r->index;
1187 /* Similar, but operate on INTEGER_CSTs. */
1189 static tree
1190 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1192 int old_nr, new_nr;
1194 old_nr = tree_low_cst (old_t_nr, 0);
1195 new_nr = remap_eh_region_nr (old_nr, id);
1197 return build_int_cst (integer_type_node, new_nr);
1200 /* Helper for copy_bb. Remap statement STMT using the inlining
1201 information in ID. Return the new statement copy. */
1203 static gimple
1204 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1206 gimple copy = NULL;
1207 struct walk_stmt_info wi;
1208 bool skip_first = false;
1210 /* Begin by recognizing trees that we'll completely rewrite for the
1211 inlining context. Our output for these trees is completely
1212 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1213 into an edge). Further down, we'll handle trees that get
1214 duplicated and/or tweaked. */
1216 /* When requested, GIMPLE_RETURNs should be transformed to just the
1217 contained GIMPLE_ASSIGN. The branch semantics of the return will
1218 be handled elsewhere by manipulating the CFG rather than the
1219 statement. */
1220 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1222 tree retval = gimple_return_retval (stmt);
1224 /* If we're returning something, just turn that into an
1225 assignment into the equivalent of the original RESULT_DECL.
1226 If RETVAL is just the result decl, the result decl has
1227 already been set (e.g. a recent "foo (&result_decl, ...)");
1228 just toss the entire GIMPLE_RETURN. */
1229 if (retval
1230 && (TREE_CODE (retval) != RESULT_DECL
1231 && (TREE_CODE (retval) != SSA_NAME
1232 || ! SSA_NAME_VAR (retval)
1233 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1235 copy = gimple_build_assign (id->retvar, retval);
1236 /* id->retvar is already substituted. Skip it on later remapping. */
1237 skip_first = true;
1239 else
1240 return gimple_build_nop ();
1242 else if (gimple_has_substatements (stmt))
1244 gimple_seq s1, s2;
1246 /* When cloning bodies from the C++ front end, we will be handed bodies
1247 in High GIMPLE form. Handle here all the High GIMPLE statements that
1248 have embedded statements. */
1249 switch (gimple_code (stmt))
1251 case GIMPLE_BIND:
1252 copy = copy_gimple_bind (stmt, id);
1253 break;
1255 case GIMPLE_CATCH:
1256 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1257 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1258 break;
1260 case GIMPLE_EH_FILTER:
1261 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1262 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1263 break;
1265 case GIMPLE_TRY:
1266 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1267 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1268 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1269 break;
1271 case GIMPLE_WITH_CLEANUP_EXPR:
1272 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1273 copy = gimple_build_wce (s1);
1274 break;
1276 case GIMPLE_OMP_PARALLEL:
1277 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1278 copy = gimple_build_omp_parallel
1279 (s1,
1280 gimple_omp_parallel_clauses (stmt),
1281 gimple_omp_parallel_child_fn (stmt),
1282 gimple_omp_parallel_data_arg (stmt));
1283 break;
1285 case GIMPLE_OMP_TASK:
1286 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1287 copy = gimple_build_omp_task
1288 (s1,
1289 gimple_omp_task_clauses (stmt),
1290 gimple_omp_task_child_fn (stmt),
1291 gimple_omp_task_data_arg (stmt),
1292 gimple_omp_task_copy_fn (stmt),
1293 gimple_omp_task_arg_size (stmt),
1294 gimple_omp_task_arg_align (stmt));
1295 break;
1297 case GIMPLE_OMP_FOR:
1298 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1299 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1300 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1301 gimple_omp_for_collapse (stmt), s2);
1303 size_t i;
1304 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1306 gimple_omp_for_set_index (copy, i,
1307 gimple_omp_for_index (stmt, i));
1308 gimple_omp_for_set_initial (copy, i,
1309 gimple_omp_for_initial (stmt, i));
1310 gimple_omp_for_set_final (copy, i,
1311 gimple_omp_for_final (stmt, i));
1312 gimple_omp_for_set_incr (copy, i,
1313 gimple_omp_for_incr (stmt, i));
1314 gimple_omp_for_set_cond (copy, i,
1315 gimple_omp_for_cond (stmt, i));
1318 break;
1320 case GIMPLE_OMP_MASTER:
1321 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1322 copy = gimple_build_omp_master (s1);
1323 break;
1325 case GIMPLE_OMP_ORDERED:
1326 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1327 copy = gimple_build_omp_ordered (s1);
1328 break;
1330 case GIMPLE_OMP_SECTION:
1331 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1332 copy = gimple_build_omp_section (s1);
1333 break;
1335 case GIMPLE_OMP_SECTIONS:
1336 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1337 copy = gimple_build_omp_sections
1338 (s1, gimple_omp_sections_clauses (stmt));
1339 break;
1341 case GIMPLE_OMP_SINGLE:
1342 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1343 copy = gimple_build_omp_single
1344 (s1, gimple_omp_single_clauses (stmt));
1345 break;
1347 case GIMPLE_OMP_CRITICAL:
1348 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1349 copy
1350 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1351 break;
1353 case GIMPLE_TRANSACTION:
1354 s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1355 copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1356 gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1357 break;
1359 default:
1360 gcc_unreachable ();
1363 else
1365 if (gimple_assign_copy_p (stmt)
1366 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1367 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1369 /* Here we handle statements that are not completely rewritten.
1370 First we detect some inlining-induced bogosities for
1371 discarding. */
1373 /* Some assignments VAR = VAR; don't generate any rtl code
1374 and thus don't count as variable modification. Avoid
1375 keeping bogosities like 0 = 0. */
1376 tree decl = gimple_assign_lhs (stmt), value;
1377 tree *n;
1379 n = (tree *) pointer_map_contains (id->decl_map, decl);
1380 if (n)
1382 value = *n;
1383 STRIP_TYPE_NOPS (value);
1384 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1385 return gimple_build_nop ();
1389 if (gimple_debug_bind_p (stmt))
1391 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1392 gimple_debug_bind_get_value (stmt),
1393 stmt);
1394 id->debug_stmts.safe_push (copy);
1395 return copy;
1397 if (gimple_debug_source_bind_p (stmt))
1399 copy = gimple_build_debug_source_bind
1400 (gimple_debug_source_bind_get_var (stmt),
1401 gimple_debug_source_bind_get_value (stmt), stmt);
1402 id->debug_stmts.safe_push (copy);
1403 return copy;
1406 /* Create a new deep copy of the statement. */
1407 copy = gimple_copy (stmt);
1409 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1410 RESX and EH_DISPATCH. */
1411 if (id->eh_map)
1412 switch (gimple_code (copy))
1414 case GIMPLE_CALL:
1416 tree r, fndecl = gimple_call_fndecl (copy);
1417 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1418 switch (DECL_FUNCTION_CODE (fndecl))
1420 case BUILT_IN_EH_COPY_VALUES:
1421 r = gimple_call_arg (copy, 1);
1422 r = remap_eh_region_tree_nr (r, id);
1423 gimple_call_set_arg (copy, 1, r);
1424 /* FALLTHRU */
1426 case BUILT_IN_EH_POINTER:
1427 case BUILT_IN_EH_FILTER:
1428 r = gimple_call_arg (copy, 0);
1429 r = remap_eh_region_tree_nr (r, id);
1430 gimple_call_set_arg (copy, 0, r);
1431 break;
1433 default:
1434 break;
1437 /* Reset alias info if we didn't apply measures to
1438 keep it valid over inlining by setting DECL_PT_UID. */
1439 if (!id->src_cfun->gimple_df
1440 || !id->src_cfun->gimple_df->ipa_pta)
1441 gimple_call_reset_alias_info (copy);
1443 break;
1445 case GIMPLE_RESX:
1447 int r = gimple_resx_region (copy);
1448 r = remap_eh_region_nr (r, id);
1449 gimple_resx_set_region (copy, r);
1451 break;
1453 case GIMPLE_EH_DISPATCH:
1455 int r = gimple_eh_dispatch_region (copy);
1456 r = remap_eh_region_nr (r, id);
1457 gimple_eh_dispatch_set_region (copy, r);
1459 break;
1461 default:
1462 break;
1466 /* If STMT has a block defined, map it to the newly constructed
1467 block. */
1468 if (gimple_block (copy))
1470 tree *n;
1471 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1472 gcc_assert (n);
1473 gimple_set_block (copy, *n);
1476 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1477 return copy;
1479 /* Remap all the operands in COPY. */
1480 memset (&wi, 0, sizeof (wi));
1481 wi.info = id;
1482 if (skip_first)
1483 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1484 else
1485 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1487 /* Clear the copied virtual operands. We are not remapping them here
1488 but are going to recreate them from scratch. */
1489 if (gimple_has_mem_ops (copy))
1491 gimple_set_vdef (copy, NULL_TREE);
1492 gimple_set_vuse (copy, NULL_TREE);
1495 return copy;
1499 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1500 later */
1502 static basic_block
1503 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1504 gcov_type count_scale)
1506 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1507 basic_block copy_basic_block;
1508 tree decl;
1509 gcov_type freq;
1510 basic_block prev;
1512 /* Search for previous copied basic block. */
1513 prev = bb->prev_bb;
1514 while (!prev->aux)
1515 prev = prev->prev_bb;
1517 /* create_basic_block() will append every new block to
1518 basic_block_info automatically. */
1519 copy_basic_block = create_basic_block (NULL, (void *) 0,
1520 (basic_block) prev->aux);
1521 /* Update to use apply_probability(). */
1522 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1524 /* We are going to rebuild frequencies from scratch. These values
1525 have just small importance to drive canonicalize_loop_headers. */
1526 /* Update to use EDGE_FREQUENCY. */
1527 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1529 /* We recompute frequencies after inlining, so this is quite safe. */
1530 if (freq > BB_FREQ_MAX)
1531 freq = BB_FREQ_MAX;
1532 copy_basic_block->frequency = freq;
1534 copy_gsi = gsi_start_bb (copy_basic_block);
1536 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1538 gimple stmt = gsi_stmt (gsi);
1539 gimple orig_stmt = stmt;
1541 id->regimplify = false;
1542 stmt = remap_gimple_stmt (stmt, id);
1543 if (gimple_nop_p (stmt))
1544 continue;
1546 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1547 seq_gsi = copy_gsi;
1549 /* With return slot optimization we can end up with
1550 non-gimple (foo *)&this->m, fix that here. */
1551 if (is_gimple_assign (stmt)
1552 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1553 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1555 tree new_rhs;
1556 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1557 gimple_assign_rhs1 (stmt),
1558 true, NULL, false,
1559 GSI_CONTINUE_LINKING);
1560 gimple_assign_set_rhs1 (stmt, new_rhs);
1561 id->regimplify = false;
1564 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1566 if (id->regimplify)
1567 gimple_regimplify_operands (stmt, &seq_gsi);
1569 /* If copy_basic_block has been empty at the start of this iteration,
1570 call gsi_start_bb again to get at the newly added statements. */
1571 if (gsi_end_p (copy_gsi))
1572 copy_gsi = gsi_start_bb (copy_basic_block);
1573 else
1574 gsi_next (&copy_gsi);
1576 /* Process the new statement. The call to gimple_regimplify_operands
1577 possibly turned the statement into multiple statements, we
1578 need to process all of them. */
1581 tree fn;
1583 stmt = gsi_stmt (copy_gsi);
1584 if (is_gimple_call (stmt)
1585 && gimple_call_va_arg_pack_p (stmt)
1586 && id->gimple_call)
1588 /* __builtin_va_arg_pack () should be replaced by
1589 all arguments corresponding to ... in the caller. */
1590 tree p;
1591 gimple new_call;
1592 vec<tree> argarray;
1593 size_t nargs = gimple_call_num_args (id->gimple_call);
1594 size_t n;
1596 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1597 nargs--;
1599 /* Create the new array of arguments. */
1600 n = nargs + gimple_call_num_args (stmt);
1601 argarray.create (n);
1602 argarray.safe_grow_cleared (n);
1604 /* Copy all the arguments before '...' */
1605 memcpy (argarray.address (),
1606 gimple_call_arg_ptr (stmt, 0),
1607 gimple_call_num_args (stmt) * sizeof (tree));
1609 /* Append the arguments passed in '...' */
1610 memcpy (argarray.address () + gimple_call_num_args (stmt),
1611 gimple_call_arg_ptr (id->gimple_call, 0)
1612 + (gimple_call_num_args (id->gimple_call) - nargs),
1613 nargs * sizeof (tree));
1615 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1616 argarray);
1618 argarray.release ();
1620 /* Copy all GIMPLE_CALL flags, location and block, except
1621 GF_CALL_VA_ARG_PACK. */
1622 gimple_call_copy_flags (new_call, stmt);
1623 gimple_call_set_va_arg_pack (new_call, false);
1624 gimple_set_location (new_call, gimple_location (stmt));
1625 gimple_set_block (new_call, gimple_block (stmt));
1626 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1628 gsi_replace (&copy_gsi, new_call, false);
1629 stmt = new_call;
1631 else if (is_gimple_call (stmt)
1632 && id->gimple_call
1633 && (decl = gimple_call_fndecl (stmt))
1634 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1635 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1637 /* __builtin_va_arg_pack_len () should be replaced by
1638 the number of anonymous arguments. */
1639 size_t nargs = gimple_call_num_args (id->gimple_call);
1640 tree count, p;
1641 gimple new_stmt;
1643 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1644 nargs--;
1646 count = build_int_cst (integer_type_node, nargs);
1647 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1648 gsi_replace (&copy_gsi, new_stmt, false);
1649 stmt = new_stmt;
1652 /* Statements produced by inlining can be unfolded, especially
1653 when we constant propagated some operands. We can't fold
1654 them right now for two reasons:
1655 1) folding require SSA_NAME_DEF_STMTs to be correct
1656 2) we can't change function calls to builtins.
1657 So we just mark statement for later folding. We mark
1658 all new statements, instead just statements that has changed
1659 by some nontrivial substitution so even statements made
1660 foldable indirectly are updated. If this turns out to be
1661 expensive, copy_body can be told to watch for nontrivial
1662 changes. */
1663 if (id->statements_to_fold)
1664 pointer_set_insert (id->statements_to_fold, stmt);
1666 /* We're duplicating a CALL_EXPR. Find any corresponding
1667 callgraph edges and update or duplicate them. */
1668 if (is_gimple_call (stmt))
1670 struct cgraph_edge *edge;
1671 int flags;
1673 switch (id->transform_call_graph_edges)
1675 case CB_CGE_DUPLICATE:
1676 edge = cgraph_edge (id->src_node, orig_stmt);
1677 if (edge)
1679 int edge_freq = edge->frequency;
1680 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1681 gimple_uid (stmt),
1682 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1683 true);
1684 /* We could also just rescale the frequency, but
1685 doing so would introduce roundoff errors and make
1686 verifier unhappy. */
1687 edge->frequency
1688 = compute_call_stmt_bb_frequency (id->dst_node->symbol.decl,
1689 copy_basic_block);
1690 if (dump_file
1691 && profile_status_for_function (cfun) != PROFILE_ABSENT
1692 && (edge_freq > edge->frequency + 10
1693 || edge_freq < edge->frequency - 10))
1695 fprintf (dump_file, "Edge frequency estimated by "
1696 "cgraph %i diverge from inliner's estimate %i\n",
1697 edge_freq,
1698 edge->frequency);
1699 fprintf (dump_file,
1700 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1701 bb->index,
1702 bb->frequency,
1703 copy_basic_block->frequency);
1705 stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
1707 break;
1709 case CB_CGE_MOVE_CLONES:
1710 cgraph_set_call_stmt_including_clones (id->dst_node,
1711 orig_stmt, stmt);
1712 edge = cgraph_edge (id->dst_node, stmt);
1713 break;
1715 case CB_CGE_MOVE:
1716 edge = cgraph_edge (id->dst_node, orig_stmt);
1717 if (edge)
1718 cgraph_set_call_stmt (edge, stmt);
1719 break;
1721 default:
1722 gcc_unreachable ();
1725 /* Constant propagation on argument done during inlining
1726 may create new direct call. Produce an edge for it. */
1727 if ((!edge
1728 || (edge->indirect_inlining_edge
1729 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1730 && id->dst_node->analyzed
1731 && (fn = gimple_call_fndecl (stmt)) != NULL)
1733 struct cgraph_node *dest = cgraph_get_node (fn);
1735 /* We have missing edge in the callgraph. This can happen
1736 when previous inlining turned an indirect call into a
1737 direct call by constant propagating arguments or we are
1738 producing dead clone (for further cloning). In all
1739 other cases we hit a bug (incorrect node sharing is the
1740 most common reason for missing edges). */
1741 gcc_assert (!dest->analyzed
1742 || dest->symbol.address_taken
1743 || !id->src_node->analyzed
1744 || !id->dst_node->analyzed);
1745 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1746 cgraph_create_edge_including_clones
1747 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1748 compute_call_stmt_bb_frequency (id->dst_node->symbol.decl,
1749 copy_basic_block),
1750 CIF_ORIGINALLY_INDIRECT_CALL);
1751 else
1752 cgraph_create_edge (id->dst_node, dest, stmt,
1753 bb->count,
1754 compute_call_stmt_bb_frequency
1755 (id->dst_node->symbol.decl,
1756 copy_basic_block))->inline_failed
1757 = CIF_ORIGINALLY_INDIRECT_CALL;
1758 if (dump_file)
1760 fprintf (dump_file, "Created new direct edge to %s\n",
1761 cgraph_node_name (dest));
1765 flags = gimple_call_flags (stmt);
1766 if (flags & ECF_MAY_BE_ALLOCA)
1767 cfun->calls_alloca = true;
1768 if (flags & ECF_RETURNS_TWICE)
1769 cfun->calls_setjmp = true;
1772 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1773 id->eh_map, id->eh_lp_nr);
1775 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1777 ssa_op_iter i;
1778 tree def;
1780 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1781 if (TREE_CODE (def) == SSA_NAME)
1782 SSA_NAME_DEF_STMT (def) = stmt;
1785 gsi_next (&copy_gsi);
1787 while (!gsi_end_p (copy_gsi));
1789 copy_gsi = gsi_last_bb (copy_basic_block);
1792 return copy_basic_block;
1795 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1796 form is quite easy, since dominator relationship for old basic blocks does
1797 not change.
1799 There is however exception where inlining might change dominator relation
1800 across EH edges from basic block within inlined functions destinating
1801 to landing pads in function we inline into.
1803 The function fills in PHI_RESULTs of such PHI nodes if they refer
1804 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1805 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1806 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1807 set, and this means that there will be no overlapping live ranges
1808 for the underlying symbol.
1810 This might change in future if we allow redirecting of EH edges and
1811 we might want to change way build CFG pre-inlining to include
1812 all the possible edges then. */
1813 static void
1814 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1815 bool can_throw, bool nonlocal_goto)
1817 edge e;
1818 edge_iterator ei;
1820 FOR_EACH_EDGE (e, ei, bb->succs)
1821 if (!e->dest->aux
1822 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1824 gimple phi;
1825 gimple_stmt_iterator si;
1827 if (!nonlocal_goto)
1828 gcc_assert (e->flags & EDGE_EH);
1830 if (!can_throw)
1831 gcc_assert (!(e->flags & EDGE_EH));
1833 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1835 edge re;
1837 phi = gsi_stmt (si);
1839 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1840 gcc_assert (!e->dest->aux);
1842 gcc_assert ((e->flags & EDGE_EH)
1843 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1845 if (virtual_operand_p (PHI_RESULT (phi)))
1847 mark_virtual_operands_for_renaming (cfun);
1848 continue;
1851 re = find_edge (ret_bb, e->dest);
1852 gcc_assert (re);
1853 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1854 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1856 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1857 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1863 /* Copy edges from BB into its copy constructed earlier, scale profile
1864 accordingly. Edges will be taken care of later. Assume aux
1865 pointers to point to the copies of each BB. Return true if any
1866 debug stmts are left after a statement that must end the basic block. */
1868 static bool
1869 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
1870 bool can_make_abnormal_goto)
1872 basic_block new_bb = (basic_block) bb->aux;
1873 edge_iterator ei;
1874 edge old_edge;
1875 gimple_stmt_iterator si;
1876 int flags;
1877 bool need_debug_cleanup = false;
1879 /* Use the indices from the original blocks to create edges for the
1880 new ones. */
1881 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1882 if (!(old_edge->flags & EDGE_EH))
1884 edge new_edge;
1886 flags = old_edge->flags;
1888 /* Return edges do get a FALLTHRU flag when the get inlined. */
1889 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1890 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1891 flags |= EDGE_FALLTHRU;
1892 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1893 /* Update to use apply_probability(). */
1894 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1895 new_edge->probability = old_edge->probability;
1898 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1899 return false;
1901 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1903 gimple copy_stmt;
1904 bool can_throw, nonlocal_goto;
1906 copy_stmt = gsi_stmt (si);
1907 if (!is_gimple_debug (copy_stmt))
1908 update_stmt (copy_stmt);
1910 /* Do this before the possible split_block. */
1911 gsi_next (&si);
1913 /* If this tree could throw an exception, there are two
1914 cases where we need to add abnormal edge(s): the
1915 tree wasn't in a region and there is a "current
1916 region" in the caller; or the original tree had
1917 EH edges. In both cases split the block after the tree,
1918 and add abnormal edge(s) as needed; we need both
1919 those from the callee and the caller.
1920 We check whether the copy can throw, because the const
1921 propagation can change an INDIRECT_REF which throws
1922 into a COMPONENT_REF which doesn't. If the copy
1923 can throw, the original could also throw. */
1924 can_throw = stmt_can_throw_internal (copy_stmt);
1925 /* If the call we inline cannot make abnormal goto do not add
1926 additional abnormal edges but only retain those already present
1927 in the original function body. */
1928 nonlocal_goto
1929 = can_make_abnormal_goto && stmt_can_make_abnormal_goto (copy_stmt);
1931 if (can_throw || nonlocal_goto)
1933 if (!gsi_end_p (si))
1935 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1936 gsi_next (&si);
1937 if (gsi_end_p (si))
1938 need_debug_cleanup = true;
1940 if (!gsi_end_p (si))
1941 /* Note that bb's predecessor edges aren't necessarily
1942 right at this point; split_block doesn't care. */
1944 edge e = split_block (new_bb, copy_stmt);
1946 new_bb = e->dest;
1947 new_bb->aux = e->src->aux;
1948 si = gsi_start_bb (new_bb);
1952 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1953 make_eh_dispatch_edges (copy_stmt);
1954 else if (can_throw)
1955 make_eh_edges (copy_stmt);
1957 if (nonlocal_goto)
1958 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1960 if ((can_throw || nonlocal_goto)
1961 && gimple_in_ssa_p (cfun))
1962 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1963 can_throw, nonlocal_goto);
1965 return need_debug_cleanup;
1968 /* Copy the PHIs. All blocks and edges are copied, some blocks
1969 was possibly split and new outgoing EH edges inserted.
1970 BB points to the block of original function and AUX pointers links
1971 the original and newly copied blocks. */
1973 static void
1974 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1976 basic_block const new_bb = (basic_block) bb->aux;
1977 edge_iterator ei;
1978 gimple phi;
1979 gimple_stmt_iterator si;
1980 edge new_edge;
1981 bool inserted = false;
1983 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
1985 tree res, new_res;
1986 gimple new_phi;
1988 phi = gsi_stmt (si);
1989 res = PHI_RESULT (phi);
1990 new_res = res;
1991 if (!virtual_operand_p (res))
1993 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1994 new_phi = create_phi_node (new_res, new_bb);
1995 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1997 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
1998 tree arg;
1999 tree new_arg;
2000 edge_iterator ei2;
2001 location_t locus;
2003 /* When doing partial cloning, we allow PHIs on the entry block
2004 as long as all the arguments are the same. Find any input
2005 edge to see argument to copy. */
2006 if (!old_edge)
2007 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2008 if (!old_edge->src->aux)
2009 break;
2011 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2012 new_arg = arg;
2013 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2014 gcc_assert (new_arg);
2015 /* With return slot optimization we can end up with
2016 non-gimple (foo *)&this->m, fix that here. */
2017 if (TREE_CODE (new_arg) != SSA_NAME
2018 && TREE_CODE (new_arg) != FUNCTION_DECL
2019 && !is_gimple_val (new_arg))
2021 gimple_seq stmts = NULL;
2022 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2023 gsi_insert_seq_on_edge (new_edge, stmts);
2024 inserted = true;
2026 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2027 if (LOCATION_BLOCK (locus))
2029 tree *n;
2030 n = (tree *) pointer_map_contains (id->decl_map,
2031 LOCATION_BLOCK (locus));
2032 gcc_assert (n);
2033 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2035 else
2036 locus = LOCATION_LOCUS (locus);
2038 add_phi_arg (new_phi, new_arg, new_edge, locus);
2043 /* Commit the delayed edge insertions. */
2044 if (inserted)
2045 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2046 gsi_commit_one_edge_insert (new_edge, NULL);
2050 /* Wrapper for remap_decl so it can be used as a callback. */
2052 static tree
2053 remap_decl_1 (tree decl, void *data)
2055 return remap_decl (decl, (copy_body_data *) data);
2058 /* Build struct function and associated datastructures for the new clone
2059 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2060 the cfun to the function of new_fndecl (and current_function_decl too). */
2062 static void
2063 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2065 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2066 gcov_type count_scale;
2068 /* Update to use GCOV_COMPUTE_SCALE. */
2069 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2070 count_scale = (REG_BR_PROB_BASE * count
2071 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2072 else
2073 count_scale = REG_BR_PROB_BASE;
2075 /* Register specific tree functions. */
2076 gimple_register_cfg_hooks ();
2078 /* Get clean struct function. */
2079 push_struct_function (new_fndecl);
2081 /* We will rebuild these, so just sanity check that they are empty. */
2082 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2083 gcc_assert (cfun->local_decls == NULL);
2084 gcc_assert (cfun->cfg == NULL);
2085 gcc_assert (cfun->decl == new_fndecl);
2087 /* Copy items we preserve during cloning. */
2088 cfun->static_chain_decl = src_cfun->static_chain_decl;
2089 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2090 cfun->function_end_locus = src_cfun->function_end_locus;
2091 cfun->curr_properties = src_cfun->curr_properties & ~PROP_loops;
2092 cfun->last_verified = src_cfun->last_verified;
2093 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2094 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2095 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2096 cfun->stdarg = src_cfun->stdarg;
2097 cfun->after_inlining = src_cfun->after_inlining;
2098 cfun->can_throw_non_call_exceptions
2099 = src_cfun->can_throw_non_call_exceptions;
2100 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2101 cfun->returns_struct = src_cfun->returns_struct;
2102 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2104 init_empty_tree_cfg ();
2106 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2107 ENTRY_BLOCK_PTR->count =
2108 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2109 REG_BR_PROB_BASE);
2110 ENTRY_BLOCK_PTR->frequency
2111 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2112 EXIT_BLOCK_PTR->count =
2113 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2114 REG_BR_PROB_BASE);
2115 EXIT_BLOCK_PTR->frequency =
2116 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2117 if (src_cfun->eh)
2118 init_eh_for_function ();
2120 if (src_cfun->gimple_df)
2122 init_tree_ssa (cfun);
2123 cfun->gimple_df->in_ssa_p = true;
2124 init_ssa_operands (cfun);
2128 /* Helper function for copy_cfg_body. Move debug stmts from the end
2129 of NEW_BB to the beginning of successor basic blocks when needed. If the
2130 successor has multiple predecessors, reset them, otherwise keep
2131 their value. */
2133 static void
2134 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2136 edge e;
2137 edge_iterator ei;
2138 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2140 if (gsi_end_p (si)
2141 || gsi_one_before_end_p (si)
2142 || !(stmt_can_throw_internal (gsi_stmt (si))
2143 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2144 return;
2146 FOR_EACH_EDGE (e, ei, new_bb->succs)
2148 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2149 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2150 while (is_gimple_debug (gsi_stmt (ssi)))
2152 gimple stmt = gsi_stmt (ssi), new_stmt;
2153 tree var;
2154 tree value;
2156 /* For the last edge move the debug stmts instead of copying
2157 them. */
2158 if (ei_one_before_end_p (ei))
2160 si = ssi;
2161 gsi_prev (&ssi);
2162 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2163 gimple_debug_bind_reset_value (stmt);
2164 gsi_remove (&si, false);
2165 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2166 continue;
2169 if (gimple_debug_bind_p (stmt))
2171 var = gimple_debug_bind_get_var (stmt);
2172 if (single_pred_p (e->dest))
2174 value = gimple_debug_bind_get_value (stmt);
2175 value = unshare_expr (value);
2177 else
2178 value = NULL_TREE;
2179 new_stmt = gimple_build_debug_bind (var, value, stmt);
2181 else if (gimple_debug_source_bind_p (stmt))
2183 var = gimple_debug_source_bind_get_var (stmt);
2184 value = gimple_debug_source_bind_get_value (stmt);
2185 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2187 else
2188 gcc_unreachable ();
2189 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2190 id->debug_stmts.safe_push (new_stmt);
2191 gsi_prev (&ssi);
2196 /* Make a copy of the body of FN so that it can be inserted inline in
2197 another function. Walks FN via CFG, returns new fndecl. */
2199 static tree
2200 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2201 basic_block entry_block_map, basic_block exit_block_map,
2202 bitmap blocks_to_copy, basic_block new_entry)
2204 tree callee_fndecl = id->src_fn;
2205 /* Original cfun for the callee, doesn't change. */
2206 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2207 struct function *cfun_to_copy;
2208 basic_block bb;
2209 tree new_fndecl = NULL;
2210 bool need_debug_cleanup = false;
2211 gcov_type count_scale;
2212 int last;
2213 int incoming_frequency = 0;
2214 gcov_type incoming_count = 0;
2216 /* Update to use GCOV_COMPUTE_SCALE. */
2217 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2218 count_scale = (REG_BR_PROB_BASE * count
2219 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2220 else
2221 count_scale = REG_BR_PROB_BASE;
2223 /* Register specific tree functions. */
2224 gimple_register_cfg_hooks ();
2226 /* If we are inlining just region of the function, make sure to connect new entry
2227 to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute
2228 frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
2229 probabilities of edges incoming from nonduplicated region. */
2230 if (new_entry)
2232 edge e;
2233 edge_iterator ei;
2235 FOR_EACH_EDGE (e, ei, new_entry->preds)
2236 if (!e->src->aux)
2238 incoming_frequency += EDGE_FREQUENCY (e);
2239 incoming_count += e->count;
2241 /* Update to use apply_probability(). */
2242 incoming_count = incoming_count * count_scale / REG_BR_PROB_BASE;
2243 /* Update to use EDGE_FREQUENCY. */
2244 incoming_frequency
2245 = incoming_frequency * frequency_scale / REG_BR_PROB_BASE;
2246 ENTRY_BLOCK_PTR->count = incoming_count;
2247 ENTRY_BLOCK_PTR->frequency = incoming_frequency;
2250 /* Must have a CFG here at this point. */
2251 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2252 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2254 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2256 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2257 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2258 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2259 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2261 /* Duplicate any exception-handling regions. */
2262 if (cfun->eh)
2263 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2264 remap_decl_1, id);
2266 /* Use aux pointers to map the original blocks to copy. */
2267 FOR_EACH_BB_FN (bb, cfun_to_copy)
2268 if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index))
2270 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2271 bb->aux = new_bb;
2272 new_bb->aux = bb;
2275 last = last_basic_block;
2277 /* Now that we've duplicated the blocks, duplicate their edges. */
2278 bool can_make_abormal_goto
2279 = id->gimple_call && stmt_can_make_abnormal_goto (id->gimple_call);
2280 FOR_ALL_BB_FN (bb, cfun_to_copy)
2281 if (!blocks_to_copy
2282 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2283 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2284 can_make_abormal_goto);
2286 if (new_entry)
2288 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2289 e->probability = REG_BR_PROB_BASE;
2290 e->count = incoming_count;
2293 if (gimple_in_ssa_p (cfun))
2294 FOR_ALL_BB_FN (bb, cfun_to_copy)
2295 if (!blocks_to_copy
2296 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2297 copy_phis_for_bb (bb, id);
2299 FOR_ALL_BB_FN (bb, cfun_to_copy)
2300 if (bb->aux)
2302 if (need_debug_cleanup
2303 && bb->index != ENTRY_BLOCK
2304 && bb->index != EXIT_BLOCK)
2305 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2306 ((basic_block)bb->aux)->aux = NULL;
2307 bb->aux = NULL;
2310 /* Zero out AUX fields of newly created block during EH edge
2311 insertion. */
2312 for (; last < last_basic_block; last++)
2314 if (need_debug_cleanup)
2315 maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2316 BASIC_BLOCK (last)->aux = NULL;
2318 entry_block_map->aux = NULL;
2319 exit_block_map->aux = NULL;
2321 if (id->eh_map)
2323 pointer_map_destroy (id->eh_map);
2324 id->eh_map = NULL;
2327 return new_fndecl;
2330 /* Copy the debug STMT using ID. We deal with these statements in a
2331 special way: if any variable in their VALUE expression wasn't
2332 remapped yet, we won't remap it, because that would get decl uids
2333 out of sync, causing codegen differences between -g and -g0. If
2334 this arises, we drop the VALUE expression altogether. */
2336 static void
2337 copy_debug_stmt (gimple stmt, copy_body_data *id)
2339 tree t, *n;
2340 struct walk_stmt_info wi;
2342 if (gimple_block (stmt))
2344 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2345 gimple_set_block (stmt, n ? *n : id->block);
2348 /* Remap all the operands in COPY. */
2349 memset (&wi, 0, sizeof (wi));
2350 wi.info = id;
2352 processing_debug_stmt = 1;
2354 if (gimple_debug_source_bind_p (stmt))
2355 t = gimple_debug_source_bind_get_var (stmt);
2356 else
2357 t = gimple_debug_bind_get_var (stmt);
2359 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2360 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2362 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2363 t = *n;
2365 else if (TREE_CODE (t) == VAR_DECL
2366 && !is_global_var (t)
2367 && !pointer_map_contains (id->decl_map, t))
2368 /* T is a non-localized variable. */;
2369 else
2370 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2372 if (gimple_debug_bind_p (stmt))
2374 gimple_debug_bind_set_var (stmt, t);
2376 if (gimple_debug_bind_has_value_p (stmt))
2377 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2378 remap_gimple_op_r, &wi, NULL);
2380 /* Punt if any decl couldn't be remapped. */
2381 if (processing_debug_stmt < 0)
2382 gimple_debug_bind_reset_value (stmt);
2384 else if (gimple_debug_source_bind_p (stmt))
2386 gimple_debug_source_bind_set_var (stmt, t);
2387 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2388 remap_gimple_op_r, &wi, NULL);
2389 /* When inlining and source bind refers to one of the optimized
2390 away parameters, change the source bind into normal debug bind
2391 referring to the corresponding DEBUG_EXPR_DECL that should have
2392 been bound before the call stmt. */
2393 t = gimple_debug_source_bind_get_value (stmt);
2394 if (t != NULL_TREE
2395 && TREE_CODE (t) == PARM_DECL
2396 && id->gimple_call)
2398 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2399 unsigned int i;
2400 if (debug_args != NULL)
2402 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2403 if ((**debug_args)[i] == DECL_ORIGIN (t)
2404 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2406 t = (**debug_args)[i + 1];
2407 stmt->gsbase.subcode = GIMPLE_DEBUG_BIND;
2408 gimple_debug_bind_set_value (stmt, t);
2409 break;
2415 processing_debug_stmt = 0;
2417 update_stmt (stmt);
2420 /* Process deferred debug stmts. In order to give values better odds
2421 of being successfully remapped, we delay the processing of debug
2422 stmts until all other stmts that might require remapping are
2423 processed. */
2425 static void
2426 copy_debug_stmts (copy_body_data *id)
2428 size_t i;
2429 gimple stmt;
2431 if (!id->debug_stmts.exists ())
2432 return;
2434 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2435 copy_debug_stmt (stmt, id);
2437 id->debug_stmts.release ();
2440 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2441 another function. */
2443 static tree
2444 copy_tree_body (copy_body_data *id)
2446 tree fndecl = id->src_fn;
2447 tree body = DECL_SAVED_TREE (fndecl);
2449 walk_tree (&body, copy_tree_body_r, id, NULL);
2451 return body;
2454 /* Make a copy of the body of FN so that it can be inserted inline in
2455 another function. */
2457 static tree
2458 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2459 basic_block entry_block_map, basic_block exit_block_map,
2460 bitmap blocks_to_copy, basic_block new_entry)
2462 tree fndecl = id->src_fn;
2463 tree body;
2465 /* If this body has a CFG, walk CFG and copy. */
2466 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2467 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2468 blocks_to_copy, new_entry);
2469 copy_debug_stmts (id);
2471 return body;
2474 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2475 defined in function FN, or of a data member thereof. */
2477 static bool
2478 self_inlining_addr_expr (tree value, tree fn)
2480 tree var;
2482 if (TREE_CODE (value) != ADDR_EXPR)
2483 return false;
2485 var = get_base_address (TREE_OPERAND (value, 0));
2487 return var && auto_var_in_fn_p (var, fn);
2490 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2491 lexical block and line number information from base_stmt, if given,
2492 or from the last stmt of the block otherwise. */
2494 static gimple
2495 insert_init_debug_bind (copy_body_data *id,
2496 basic_block bb, tree var, tree value,
2497 gimple base_stmt)
2499 gimple note;
2500 gimple_stmt_iterator gsi;
2501 tree tracked_var;
2503 if (!gimple_in_ssa_p (id->src_cfun))
2504 return NULL;
2506 if (!MAY_HAVE_DEBUG_STMTS)
2507 return NULL;
2509 tracked_var = target_for_debug_bind (var);
2510 if (!tracked_var)
2511 return NULL;
2513 if (bb)
2515 gsi = gsi_last_bb (bb);
2516 if (!base_stmt && !gsi_end_p (gsi))
2517 base_stmt = gsi_stmt (gsi);
2520 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2522 if (bb)
2524 if (!gsi_end_p (gsi))
2525 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2526 else
2527 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2530 return note;
2533 static void
2534 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2536 /* If VAR represents a zero-sized variable, it's possible that the
2537 assignment statement may result in no gimple statements. */
2538 if (init_stmt)
2540 gimple_stmt_iterator si = gsi_last_bb (bb);
2542 /* We can end up with init statements that store to a non-register
2543 from a rhs with a conversion. Handle that here by forcing the
2544 rhs into a temporary. gimple_regimplify_operands is not
2545 prepared to do this for us. */
2546 if (!is_gimple_debug (init_stmt)
2547 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2548 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2549 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2551 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2552 gimple_expr_type (init_stmt),
2553 gimple_assign_rhs1 (init_stmt));
2554 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2555 GSI_NEW_STMT);
2556 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2557 gimple_assign_set_rhs1 (init_stmt, rhs);
2559 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2560 gimple_regimplify_operands (init_stmt, &si);
2562 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2564 tree def = gimple_assign_lhs (init_stmt);
2565 insert_init_debug_bind (id, bb, def, def, init_stmt);
2570 /* Initialize parameter P with VALUE. If needed, produce init statement
2571 at the end of BB. When BB is NULL, we return init statement to be
2572 output later. */
2573 static gimple
2574 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2575 basic_block bb, tree *vars)
2577 gimple init_stmt = NULL;
2578 tree var;
2579 tree rhs = value;
2580 tree def = (gimple_in_ssa_p (cfun)
2581 ? ssa_default_def (id->src_cfun, p) : NULL);
2583 if (value
2584 && value != error_mark_node
2585 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2587 /* If we can match up types by promotion/demotion do so. */
2588 if (fold_convertible_p (TREE_TYPE (p), value))
2589 rhs = fold_convert (TREE_TYPE (p), value);
2590 else
2592 /* ??? For valid programs we should not end up here.
2593 Still if we end up with truly mismatched types here, fall back
2594 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2595 GIMPLE to the following passes. */
2596 if (!is_gimple_reg_type (TREE_TYPE (value))
2597 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2598 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2599 else
2600 rhs = build_zero_cst (TREE_TYPE (p));
2604 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2605 here since the type of this decl must be visible to the calling
2606 function. */
2607 var = copy_decl_to_var (p, id);
2609 /* Declare this new variable. */
2610 DECL_CHAIN (var) = *vars;
2611 *vars = var;
2613 /* Make gimplifier happy about this variable. */
2614 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2616 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2617 we would not need to create a new variable here at all, if it
2618 weren't for debug info. Still, we can just use the argument
2619 value. */
2620 if (TREE_READONLY (p)
2621 && !TREE_ADDRESSABLE (p)
2622 && value && !TREE_SIDE_EFFECTS (value)
2623 && !def)
2625 /* We may produce non-gimple trees by adding NOPs or introduce
2626 invalid sharing when operand is not really constant.
2627 It is not big deal to prohibit constant propagation here as
2628 we will constant propagate in DOM1 pass anyway. */
2629 if (is_gimple_min_invariant (value)
2630 && useless_type_conversion_p (TREE_TYPE (p),
2631 TREE_TYPE (value))
2632 /* We have to be very careful about ADDR_EXPR. Make sure
2633 the base variable isn't a local variable of the inlined
2634 function, e.g., when doing recursive inlining, direct or
2635 mutually-recursive or whatever, which is why we don't
2636 just test whether fn == current_function_decl. */
2637 && ! self_inlining_addr_expr (value, fn))
2639 insert_decl_map (id, p, value);
2640 insert_debug_decl_map (id, p, var);
2641 return insert_init_debug_bind (id, bb, var, value, NULL);
2645 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2646 that way, when the PARM_DECL is encountered, it will be
2647 automatically replaced by the VAR_DECL. */
2648 insert_decl_map (id, p, var);
2650 /* Even if P was TREE_READONLY, the new VAR should not be.
2651 In the original code, we would have constructed a
2652 temporary, and then the function body would have never
2653 changed the value of P. However, now, we will be
2654 constructing VAR directly. The constructor body may
2655 change its value multiple times as it is being
2656 constructed. Therefore, it must not be TREE_READONLY;
2657 the back-end assumes that TREE_READONLY variable is
2658 assigned to only once. */
2659 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2660 TREE_READONLY (var) = 0;
2662 /* If there is no setup required and we are in SSA, take the easy route
2663 replacing all SSA names representing the function parameter by the
2664 SSA name passed to function.
2666 We need to construct map for the variable anyway as it might be used
2667 in different SSA names when parameter is set in function.
2669 Do replacement at -O0 for const arguments replaced by constant.
2670 This is important for builtin_constant_p and other construct requiring
2671 constant argument to be visible in inlined function body. */
2672 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2673 && (optimize
2674 || (TREE_READONLY (p)
2675 && is_gimple_min_invariant (rhs)))
2676 && (TREE_CODE (rhs) == SSA_NAME
2677 || is_gimple_min_invariant (rhs))
2678 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2680 insert_decl_map (id, def, rhs);
2681 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2684 /* If the value of argument is never used, don't care about initializing
2685 it. */
2686 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2688 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2689 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2692 /* Initialize this VAR_DECL from the equivalent argument. Convert
2693 the argument to the proper type in case it was promoted. */
2694 if (value)
2696 if (rhs == error_mark_node)
2698 insert_decl_map (id, p, var);
2699 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2702 STRIP_USELESS_TYPE_CONVERSION (rhs);
2704 /* If we are in SSA form properly remap the default definition
2705 or assign to a dummy SSA name if the parameter is unused and
2706 we are not optimizing. */
2707 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2709 if (def)
2711 def = remap_ssa_name (def, id);
2712 init_stmt = gimple_build_assign (def, rhs);
2713 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2714 set_ssa_default_def (cfun, var, NULL);
2716 else if (!optimize)
2718 def = make_ssa_name (var, NULL);
2719 init_stmt = gimple_build_assign (def, rhs);
2722 else
2723 init_stmt = gimple_build_assign (var, rhs);
2725 if (bb && init_stmt)
2726 insert_init_stmt (id, bb, init_stmt);
2728 return init_stmt;
2731 /* Generate code to initialize the parameters of the function at the
2732 top of the stack in ID from the GIMPLE_CALL STMT. */
2734 static void
2735 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2736 tree fn, basic_block bb)
2738 tree parms;
2739 size_t i;
2740 tree p;
2741 tree vars = NULL_TREE;
2742 tree static_chain = gimple_call_chain (stmt);
2744 /* Figure out what the parameters are. */
2745 parms = DECL_ARGUMENTS (fn);
2747 /* Loop through the parameter declarations, replacing each with an
2748 equivalent VAR_DECL, appropriately initialized. */
2749 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2751 tree val;
2752 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2753 setup_one_parameter (id, p, val, fn, bb, &vars);
2755 /* After remapping parameters remap their types. This has to be done
2756 in a second loop over all parameters to appropriately remap
2757 variable sized arrays when the size is specified in a
2758 parameter following the array. */
2759 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2761 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
2762 if (varp
2763 && TREE_CODE (*varp) == VAR_DECL)
2765 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
2766 ? ssa_default_def (id->src_cfun, p) : NULL);
2767 tree var = *varp;
2768 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
2769 /* Also remap the default definition if it was remapped
2770 to the default definition of the parameter replacement
2771 by the parameter setup. */
2772 if (def)
2774 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
2775 if (defp
2776 && TREE_CODE (*defp) == SSA_NAME
2777 && SSA_NAME_VAR (*defp) == var)
2778 TREE_TYPE (*defp) = TREE_TYPE (var);
2783 /* Initialize the static chain. */
2784 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2785 gcc_assert (fn != current_function_decl);
2786 if (p)
2788 /* No static chain? Seems like a bug in tree-nested.c. */
2789 gcc_assert (static_chain);
2791 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2794 declare_inline_vars (id->block, vars);
2798 /* Declare a return variable to replace the RESULT_DECL for the
2799 function we are calling. An appropriate DECL_STMT is returned.
2800 The USE_STMT is filled to contain a use of the declaration to
2801 indicate the return value of the function.
2803 RETURN_SLOT, if non-null is place where to store the result. It
2804 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2805 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2807 The return value is a (possibly null) value that holds the result
2808 as seen by the caller. */
2810 static tree
2811 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2812 basic_block entry_bb)
2814 tree callee = id->src_fn;
2815 tree result = DECL_RESULT (callee);
2816 tree callee_type = TREE_TYPE (result);
2817 tree caller_type;
2818 tree var, use;
2820 /* Handle type-mismatches in the function declaration return type
2821 vs. the call expression. */
2822 if (modify_dest)
2823 caller_type = TREE_TYPE (modify_dest);
2824 else
2825 caller_type = TREE_TYPE (TREE_TYPE (callee));
2827 /* We don't need to do anything for functions that don't return anything. */
2828 if (VOID_TYPE_P (callee_type))
2829 return NULL_TREE;
2831 /* If there was a return slot, then the return value is the
2832 dereferenced address of that object. */
2833 if (return_slot)
2835 /* The front end shouldn't have used both return_slot and
2836 a modify expression. */
2837 gcc_assert (!modify_dest);
2838 if (DECL_BY_REFERENCE (result))
2840 tree return_slot_addr = build_fold_addr_expr (return_slot);
2841 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2843 /* We are going to construct *&return_slot and we can't do that
2844 for variables believed to be not addressable.
2846 FIXME: This check possibly can match, because values returned
2847 via return slot optimization are not believed to have address
2848 taken by alias analysis. */
2849 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2850 var = return_slot_addr;
2852 else
2854 var = return_slot;
2855 gcc_assert (TREE_CODE (var) != SSA_NAME);
2856 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2858 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2859 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2860 && !DECL_GIMPLE_REG_P (result)
2861 && DECL_P (var))
2862 DECL_GIMPLE_REG_P (var) = 0;
2863 use = NULL;
2864 goto done;
2867 /* All types requiring non-trivial constructors should have been handled. */
2868 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2870 /* Attempt to avoid creating a new temporary variable. */
2871 if (modify_dest
2872 && TREE_CODE (modify_dest) != SSA_NAME)
2874 bool use_it = false;
2876 /* We can't use MODIFY_DEST if there's type promotion involved. */
2877 if (!useless_type_conversion_p (callee_type, caller_type))
2878 use_it = false;
2880 /* ??? If we're assigning to a variable sized type, then we must
2881 reuse the destination variable, because we've no good way to
2882 create variable sized temporaries at this point. */
2883 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2884 use_it = true;
2886 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2887 reuse it as the result of the call directly. Don't do this if
2888 it would promote MODIFY_DEST to addressable. */
2889 else if (TREE_ADDRESSABLE (result))
2890 use_it = false;
2891 else
2893 tree base_m = get_base_address (modify_dest);
2895 /* If the base isn't a decl, then it's a pointer, and we don't
2896 know where that's going to go. */
2897 if (!DECL_P (base_m))
2898 use_it = false;
2899 else if (is_global_var (base_m))
2900 use_it = false;
2901 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2902 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2903 && !DECL_GIMPLE_REG_P (result)
2904 && DECL_GIMPLE_REG_P (base_m))
2905 use_it = false;
2906 else if (!TREE_ADDRESSABLE (base_m))
2907 use_it = true;
2910 if (use_it)
2912 var = modify_dest;
2913 use = NULL;
2914 goto done;
2918 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2920 var = copy_result_decl_to_var (result, id);
2921 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2923 /* Do not have the rest of GCC warn about this variable as it should
2924 not be visible to the user. */
2925 TREE_NO_WARNING (var) = 1;
2927 declare_inline_vars (id->block, var);
2929 /* Build the use expr. If the return type of the function was
2930 promoted, convert it back to the expected type. */
2931 use = var;
2932 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2934 /* If we can match up types by promotion/demotion do so. */
2935 if (fold_convertible_p (caller_type, var))
2936 use = fold_convert (caller_type, var);
2937 else
2939 /* ??? For valid programs we should not end up here.
2940 Still if we end up with truly mismatched types here, fall back
2941 to using a MEM_REF to not leak invalid GIMPLE to the following
2942 passes. */
2943 /* Prevent var from being written into SSA form. */
2944 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
2945 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
2946 DECL_GIMPLE_REG_P (var) = false;
2947 else if (is_gimple_reg_type (TREE_TYPE (var)))
2948 TREE_ADDRESSABLE (var) = true;
2949 use = fold_build2 (MEM_REF, caller_type,
2950 build_fold_addr_expr (var),
2951 build_int_cst (ptr_type_node, 0));
2955 STRIP_USELESS_TYPE_CONVERSION (use);
2957 if (DECL_BY_REFERENCE (result))
2959 TREE_ADDRESSABLE (var) = 1;
2960 var = build_fold_addr_expr (var);
2963 done:
2964 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2965 way, when the RESULT_DECL is encountered, it will be
2966 automatically replaced by the VAR_DECL.
2968 When returning by reference, ensure that RESULT_DECL remaps to
2969 gimple_val. */
2970 if (DECL_BY_REFERENCE (result)
2971 && !is_gimple_val (var))
2973 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
2974 insert_decl_map (id, result, temp);
2975 /* When RESULT_DECL is in SSA form, we need to remap and initialize
2976 it's default_def SSA_NAME. */
2977 if (gimple_in_ssa_p (id->src_cfun)
2978 && is_gimple_reg (result))
2980 temp = make_ssa_name (temp, NULL);
2981 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
2983 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
2985 else
2986 insert_decl_map (id, result, var);
2988 /* Remember this so we can ignore it in remap_decls. */
2989 id->retvar = var;
2991 return use;
2994 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2995 to a local label. */
2997 static tree
2998 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3000 tree node = *nodep;
3001 tree fn = (tree) fnp;
3003 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3004 return node;
3006 if (TYPE_P (node))
3007 *walk_subtrees = 0;
3009 return NULL_TREE;
3012 /* Determine if the function can be copied. If so return NULL. If
3013 not return a string describng the reason for failure. */
3015 static const char *
3016 copy_forbidden (struct function *fun, tree fndecl)
3018 const char *reason = fun->cannot_be_copied_reason;
3019 tree decl;
3020 unsigned ix;
3022 /* Only examine the function once. */
3023 if (fun->cannot_be_copied_set)
3024 return reason;
3026 /* We cannot copy a function that receives a non-local goto
3027 because we cannot remap the destination label used in the
3028 function that is performing the non-local goto. */
3029 /* ??? Actually, this should be possible, if we work at it.
3030 No doubt there's just a handful of places that simply
3031 assume it doesn't happen and don't substitute properly. */
3032 if (fun->has_nonlocal_label)
3034 reason = G_("function %q+F can never be copied "
3035 "because it receives a non-local goto");
3036 goto fail;
3039 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3040 if (TREE_CODE (decl) == VAR_DECL
3041 && TREE_STATIC (decl)
3042 && !DECL_EXTERNAL (decl)
3043 && DECL_INITIAL (decl)
3044 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3045 has_label_address_in_static_1,
3046 fndecl))
3048 reason = G_("function %q+F can never be copied because it saves "
3049 "address of local label in a static variable");
3050 goto fail;
3053 fail:
3054 fun->cannot_be_copied_reason = reason;
3055 fun->cannot_be_copied_set = true;
3056 return reason;
3060 static const char *inline_forbidden_reason;
3062 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3063 iff a function can not be inlined. Also sets the reason why. */
3065 static tree
3066 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3067 struct walk_stmt_info *wip)
3069 tree fn = (tree) wip->info;
3070 tree t;
3071 gimple stmt = gsi_stmt (*gsi);
3073 switch (gimple_code (stmt))
3075 case GIMPLE_CALL:
3076 /* Refuse to inline alloca call unless user explicitly forced so as
3077 this may change program's memory overhead drastically when the
3078 function using alloca is called in loop. In GCC present in
3079 SPEC2000 inlining into schedule_block cause it to require 2GB of
3080 RAM instead of 256MB. Don't do so for alloca calls emitted for
3081 VLA objects as those can't cause unbounded growth (they're always
3082 wrapped inside stack_save/stack_restore regions. */
3083 if (gimple_alloca_call_p (stmt)
3084 && !gimple_call_alloca_for_var_p (stmt)
3085 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3087 inline_forbidden_reason
3088 = G_("function %q+F can never be inlined because it uses "
3089 "alloca (override using the always_inline attribute)");
3090 *handled_ops_p = true;
3091 return fn;
3094 t = gimple_call_fndecl (stmt);
3095 if (t == NULL_TREE)
3096 break;
3098 /* We cannot inline functions that call setjmp. */
3099 if (setjmp_call_p (t))
3101 inline_forbidden_reason
3102 = G_("function %q+F can never be inlined because it uses setjmp");
3103 *handled_ops_p = true;
3104 return t;
3107 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3108 switch (DECL_FUNCTION_CODE (t))
3110 /* We cannot inline functions that take a variable number of
3111 arguments. */
3112 case BUILT_IN_VA_START:
3113 case BUILT_IN_NEXT_ARG:
3114 case BUILT_IN_VA_END:
3115 inline_forbidden_reason
3116 = G_("function %q+F can never be inlined because it "
3117 "uses variable argument lists");
3118 *handled_ops_p = true;
3119 return t;
3121 case BUILT_IN_LONGJMP:
3122 /* We can't inline functions that call __builtin_longjmp at
3123 all. The non-local goto machinery really requires the
3124 destination be in a different function. If we allow the
3125 function calling __builtin_longjmp to be inlined into the
3126 function calling __builtin_setjmp, Things will Go Awry. */
3127 inline_forbidden_reason
3128 = G_("function %q+F can never be inlined because "
3129 "it uses setjmp-longjmp exception handling");
3130 *handled_ops_p = true;
3131 return t;
3133 case BUILT_IN_NONLOCAL_GOTO:
3134 /* Similarly. */
3135 inline_forbidden_reason
3136 = G_("function %q+F can never be inlined because "
3137 "it uses non-local goto");
3138 *handled_ops_p = true;
3139 return t;
3141 case BUILT_IN_RETURN:
3142 case BUILT_IN_APPLY_ARGS:
3143 /* If a __builtin_apply_args caller would be inlined,
3144 it would be saving arguments of the function it has
3145 been inlined into. Similarly __builtin_return would
3146 return from the function the inline has been inlined into. */
3147 inline_forbidden_reason
3148 = G_("function %q+F can never be inlined because "
3149 "it uses __builtin_return or __builtin_apply_args");
3150 *handled_ops_p = true;
3151 return t;
3153 default:
3154 break;
3156 break;
3158 case GIMPLE_GOTO:
3159 t = gimple_goto_dest (stmt);
3161 /* We will not inline a function which uses computed goto. The
3162 addresses of its local labels, which may be tucked into
3163 global storage, are of course not constant across
3164 instantiations, which causes unexpected behavior. */
3165 if (TREE_CODE (t) != LABEL_DECL)
3167 inline_forbidden_reason
3168 = G_("function %q+F can never be inlined "
3169 "because it contains a computed goto");
3170 *handled_ops_p = true;
3171 return t;
3173 break;
3175 default:
3176 break;
3179 *handled_ops_p = false;
3180 return NULL_TREE;
3183 /* Return true if FNDECL is a function that cannot be inlined into
3184 another one. */
3186 static bool
3187 inline_forbidden_p (tree fndecl)
3189 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3190 struct walk_stmt_info wi;
3191 struct pointer_set_t *visited_nodes;
3192 basic_block bb;
3193 bool forbidden_p = false;
3195 /* First check for shared reasons not to copy the code. */
3196 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3197 if (inline_forbidden_reason != NULL)
3198 return true;
3200 /* Next, walk the statements of the function looking for
3201 constraucts we can't handle, or are non-optimal for inlining. */
3202 visited_nodes = pointer_set_create ();
3203 memset (&wi, 0, sizeof (wi));
3204 wi.info = (void *) fndecl;
3205 wi.pset = visited_nodes;
3207 FOR_EACH_BB_FN (bb, fun)
3209 gimple ret;
3210 gimple_seq seq = bb_seq (bb);
3211 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3212 forbidden_p = (ret != NULL);
3213 if (forbidden_p)
3214 break;
3217 pointer_set_destroy (visited_nodes);
3218 return forbidden_p;
3221 /* Return false if the function FNDECL cannot be inlined on account of its
3222 attributes, true otherwise. */
3223 static bool
3224 function_attribute_inlinable_p (const_tree fndecl)
3226 if (targetm.attribute_table)
3228 const_tree a;
3230 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3232 const_tree name = TREE_PURPOSE (a);
3233 int i;
3235 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3236 if (is_attribute_p (targetm.attribute_table[i].name, name))
3237 return targetm.function_attribute_inlinable_p (fndecl);
3241 return true;
3244 /* Returns nonzero if FN is a function that does not have any
3245 fundamental inline blocking properties. */
3247 bool
3248 tree_inlinable_function_p (tree fn)
3250 bool inlinable = true;
3251 bool do_warning;
3252 tree always_inline;
3254 /* If we've already decided this function shouldn't be inlined,
3255 there's no need to check again. */
3256 if (DECL_UNINLINABLE (fn))
3257 return false;
3259 /* We only warn for functions declared `inline' by the user. */
3260 do_warning = (warn_inline
3261 && DECL_DECLARED_INLINE_P (fn)
3262 && !DECL_NO_INLINE_WARNING_P (fn)
3263 && !DECL_IN_SYSTEM_HEADER (fn));
3265 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3267 if (flag_no_inline
3268 && always_inline == NULL)
3270 if (do_warning)
3271 warning (OPT_Winline, "function %q+F can never be inlined because it "
3272 "is suppressed using -fno-inline", fn);
3273 inlinable = false;
3276 else if (!function_attribute_inlinable_p (fn))
3278 if (do_warning)
3279 warning (OPT_Winline, "function %q+F can never be inlined because it "
3280 "uses attributes conflicting with inlining", fn);
3281 inlinable = false;
3284 else if (inline_forbidden_p (fn))
3286 /* See if we should warn about uninlinable functions. Previously,
3287 some of these warnings would be issued while trying to expand
3288 the function inline, but that would cause multiple warnings
3289 about functions that would for example call alloca. But since
3290 this a property of the function, just one warning is enough.
3291 As a bonus we can now give more details about the reason why a
3292 function is not inlinable. */
3293 if (always_inline)
3294 error (inline_forbidden_reason, fn);
3295 else if (do_warning)
3296 warning (OPT_Winline, inline_forbidden_reason, fn);
3298 inlinable = false;
3301 /* Squirrel away the result so that we don't have to check again. */
3302 DECL_UNINLINABLE (fn) = !inlinable;
3304 return inlinable;
3307 /* Estimate the cost of a memory move. Use machine dependent
3308 word size and take possible memcpy call into account. */
3311 estimate_move_cost (tree type)
3313 HOST_WIDE_INT size;
3315 gcc_assert (!VOID_TYPE_P (type));
3317 if (TREE_CODE (type) == VECTOR_TYPE)
3319 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3320 enum machine_mode simd
3321 = targetm.vectorize.preferred_simd_mode (inner);
3322 int simd_mode_size = GET_MODE_SIZE (simd);
3323 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3324 / simd_mode_size);
3327 size = int_size_in_bytes (type);
3329 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3330 /* Cost of a memcpy call, 3 arguments and the call. */
3331 return 4;
3332 else
3333 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3336 /* Returns cost of operation CODE, according to WEIGHTS */
3338 static int
3339 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3340 tree op1 ATTRIBUTE_UNUSED, tree op2)
3342 switch (code)
3344 /* These are "free" conversions, or their presumed cost
3345 is folded into other operations. */
3346 case RANGE_EXPR:
3347 CASE_CONVERT:
3348 case COMPLEX_EXPR:
3349 case PAREN_EXPR:
3350 case VIEW_CONVERT_EXPR:
3351 return 0;
3353 /* Assign cost of 1 to usual operations.
3354 ??? We may consider mapping RTL costs to this. */
3355 case COND_EXPR:
3356 case VEC_COND_EXPR:
3357 case VEC_PERM_EXPR:
3359 case PLUS_EXPR:
3360 case POINTER_PLUS_EXPR:
3361 case MINUS_EXPR:
3362 case MULT_EXPR:
3363 case MULT_HIGHPART_EXPR:
3364 case FMA_EXPR:
3366 case ADDR_SPACE_CONVERT_EXPR:
3367 case FIXED_CONVERT_EXPR:
3368 case FIX_TRUNC_EXPR:
3370 case NEGATE_EXPR:
3371 case FLOAT_EXPR:
3372 case MIN_EXPR:
3373 case MAX_EXPR:
3374 case ABS_EXPR:
3376 case LSHIFT_EXPR:
3377 case RSHIFT_EXPR:
3378 case LROTATE_EXPR:
3379 case RROTATE_EXPR:
3380 case VEC_LSHIFT_EXPR:
3381 case VEC_RSHIFT_EXPR:
3383 case BIT_IOR_EXPR:
3384 case BIT_XOR_EXPR:
3385 case BIT_AND_EXPR:
3386 case BIT_NOT_EXPR:
3388 case TRUTH_ANDIF_EXPR:
3389 case TRUTH_ORIF_EXPR:
3390 case TRUTH_AND_EXPR:
3391 case TRUTH_OR_EXPR:
3392 case TRUTH_XOR_EXPR:
3393 case TRUTH_NOT_EXPR:
3395 case LT_EXPR:
3396 case LE_EXPR:
3397 case GT_EXPR:
3398 case GE_EXPR:
3399 case EQ_EXPR:
3400 case NE_EXPR:
3401 case ORDERED_EXPR:
3402 case UNORDERED_EXPR:
3404 case UNLT_EXPR:
3405 case UNLE_EXPR:
3406 case UNGT_EXPR:
3407 case UNGE_EXPR:
3408 case UNEQ_EXPR:
3409 case LTGT_EXPR:
3411 case CONJ_EXPR:
3413 case PREDECREMENT_EXPR:
3414 case PREINCREMENT_EXPR:
3415 case POSTDECREMENT_EXPR:
3416 case POSTINCREMENT_EXPR:
3418 case REALIGN_LOAD_EXPR:
3420 case REDUC_MAX_EXPR:
3421 case REDUC_MIN_EXPR:
3422 case REDUC_PLUS_EXPR:
3423 case WIDEN_SUM_EXPR:
3424 case WIDEN_MULT_EXPR:
3425 case DOT_PROD_EXPR:
3426 case WIDEN_MULT_PLUS_EXPR:
3427 case WIDEN_MULT_MINUS_EXPR:
3428 case WIDEN_LSHIFT_EXPR:
3430 case VEC_WIDEN_MULT_HI_EXPR:
3431 case VEC_WIDEN_MULT_LO_EXPR:
3432 case VEC_WIDEN_MULT_EVEN_EXPR:
3433 case VEC_WIDEN_MULT_ODD_EXPR:
3434 case VEC_UNPACK_HI_EXPR:
3435 case VEC_UNPACK_LO_EXPR:
3436 case VEC_UNPACK_FLOAT_HI_EXPR:
3437 case VEC_UNPACK_FLOAT_LO_EXPR:
3438 case VEC_PACK_TRUNC_EXPR:
3439 case VEC_PACK_SAT_EXPR:
3440 case VEC_PACK_FIX_TRUNC_EXPR:
3441 case VEC_WIDEN_LSHIFT_HI_EXPR:
3442 case VEC_WIDEN_LSHIFT_LO_EXPR:
3444 return 1;
3446 /* Few special cases of expensive operations. This is useful
3447 to avoid inlining on functions having too many of these. */
3448 case TRUNC_DIV_EXPR:
3449 case CEIL_DIV_EXPR:
3450 case FLOOR_DIV_EXPR:
3451 case ROUND_DIV_EXPR:
3452 case EXACT_DIV_EXPR:
3453 case TRUNC_MOD_EXPR:
3454 case CEIL_MOD_EXPR:
3455 case FLOOR_MOD_EXPR:
3456 case ROUND_MOD_EXPR:
3457 case RDIV_EXPR:
3458 if (TREE_CODE (op2) != INTEGER_CST)
3459 return weights->div_mod_cost;
3460 return 1;
3462 default:
3463 /* We expect a copy assignment with no operator. */
3464 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3465 return 0;
3470 /* Estimate number of instructions that will be created by expanding
3471 the statements in the statement sequence STMTS.
3472 WEIGHTS contains weights attributed to various constructs. */
3474 static
3475 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3477 int cost;
3478 gimple_stmt_iterator gsi;
3480 cost = 0;
3481 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3482 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3484 return cost;
3488 /* Estimate number of instructions that will be created by expanding STMT.
3489 WEIGHTS contains weights attributed to various constructs. */
3492 estimate_num_insns (gimple stmt, eni_weights *weights)
3494 unsigned cost, i;
3495 enum gimple_code code = gimple_code (stmt);
3496 tree lhs;
3497 tree rhs;
3499 switch (code)
3501 case GIMPLE_ASSIGN:
3502 /* Try to estimate the cost of assignments. We have three cases to
3503 deal with:
3504 1) Simple assignments to registers;
3505 2) Stores to things that must live in memory. This includes
3506 "normal" stores to scalars, but also assignments of large
3507 structures, or constructors of big arrays;
3509 Let us look at the first two cases, assuming we have "a = b + C":
3510 <GIMPLE_ASSIGN <var_decl "a">
3511 <plus_expr <var_decl "b"> <constant C>>
3512 If "a" is a GIMPLE register, the assignment to it is free on almost
3513 any target, because "a" usually ends up in a real register. Hence
3514 the only cost of this expression comes from the PLUS_EXPR, and we
3515 can ignore the GIMPLE_ASSIGN.
3516 If "a" is not a GIMPLE register, the assignment to "a" will most
3517 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3518 of moving something into "a", which we compute using the function
3519 estimate_move_cost. */
3520 if (gimple_clobber_p (stmt))
3521 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3523 lhs = gimple_assign_lhs (stmt);
3524 rhs = gimple_assign_rhs1 (stmt);
3526 cost = 0;
3528 /* Account for the cost of moving to / from memory. */
3529 if (gimple_store_p (stmt))
3530 cost += estimate_move_cost (TREE_TYPE (lhs));
3531 if (gimple_assign_load_p (stmt))
3532 cost += estimate_move_cost (TREE_TYPE (rhs));
3534 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3535 gimple_assign_rhs1 (stmt),
3536 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3537 == GIMPLE_BINARY_RHS
3538 ? gimple_assign_rhs2 (stmt) : NULL);
3539 break;
3541 case GIMPLE_COND:
3542 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3543 gimple_op (stmt, 0),
3544 gimple_op (stmt, 1));
3545 break;
3547 case GIMPLE_SWITCH:
3548 /* Take into account cost of the switch + guess 2 conditional jumps for
3549 each case label.
3551 TODO: once the switch expansion logic is sufficiently separated, we can
3552 do better job on estimating cost of the switch. */
3553 if (weights->time_based)
3554 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3555 else
3556 cost = gimple_switch_num_labels (stmt) * 2;
3557 break;
3559 case GIMPLE_CALL:
3561 tree decl = gimple_call_fndecl (stmt);
3562 struct cgraph_node *node = NULL;
3564 /* Do not special case builtins where we see the body.
3565 This just confuse inliner. */
3566 if (!decl || !(node = cgraph_get_node (decl)) || node->analyzed)
3568 /* For buitins that are likely expanded to nothing or
3569 inlined do not account operand costs. */
3570 else if (is_simple_builtin (decl))
3571 return 0;
3572 else if (is_inexpensive_builtin (decl))
3573 return weights->target_builtin_call_cost;
3574 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3576 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3577 specialize the cheap expansion we do here.
3578 ??? This asks for a more general solution. */
3579 switch (DECL_FUNCTION_CODE (decl))
3581 case BUILT_IN_POW:
3582 case BUILT_IN_POWF:
3583 case BUILT_IN_POWL:
3584 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3585 && REAL_VALUES_EQUAL
3586 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3587 return estimate_operator_cost (MULT_EXPR, weights,
3588 gimple_call_arg (stmt, 0),
3589 gimple_call_arg (stmt, 0));
3590 break;
3592 default:
3593 break;
3597 cost = node ? weights->call_cost : weights->indirect_call_cost;
3598 if (gimple_call_lhs (stmt))
3599 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3600 for (i = 0; i < gimple_call_num_args (stmt); i++)
3602 tree arg = gimple_call_arg (stmt, i);
3603 cost += estimate_move_cost (TREE_TYPE (arg));
3605 break;
3608 case GIMPLE_RETURN:
3609 return weights->return_cost;
3611 case GIMPLE_GOTO:
3612 case GIMPLE_LABEL:
3613 case GIMPLE_NOP:
3614 case GIMPLE_PHI:
3615 case GIMPLE_PREDICT:
3616 case GIMPLE_DEBUG:
3617 return 0;
3619 case GIMPLE_ASM:
3620 return asm_str_count (gimple_asm_string (stmt));
3622 case GIMPLE_RESX:
3623 /* This is either going to be an external function call with one
3624 argument, or two register copy statements plus a goto. */
3625 return 2;
3627 case GIMPLE_EH_DISPATCH:
3628 /* ??? This is going to turn into a switch statement. Ideally
3629 we'd have a look at the eh region and estimate the number of
3630 edges involved. */
3631 return 10;
3633 case GIMPLE_BIND:
3634 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3636 case GIMPLE_EH_FILTER:
3637 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3639 case GIMPLE_CATCH:
3640 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3642 case GIMPLE_TRY:
3643 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3644 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3646 /* OpenMP directives are generally very expensive. */
3648 case GIMPLE_OMP_RETURN:
3649 case GIMPLE_OMP_SECTIONS_SWITCH:
3650 case GIMPLE_OMP_ATOMIC_STORE:
3651 case GIMPLE_OMP_CONTINUE:
3652 /* ...except these, which are cheap. */
3653 return 0;
3655 case GIMPLE_OMP_ATOMIC_LOAD:
3656 return weights->omp_cost;
3658 case GIMPLE_OMP_FOR:
3659 return (weights->omp_cost
3660 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3661 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3663 case GIMPLE_OMP_PARALLEL:
3664 case GIMPLE_OMP_TASK:
3665 case GIMPLE_OMP_CRITICAL:
3666 case GIMPLE_OMP_MASTER:
3667 case GIMPLE_OMP_ORDERED:
3668 case GIMPLE_OMP_SECTION:
3669 case GIMPLE_OMP_SECTIONS:
3670 case GIMPLE_OMP_SINGLE:
3671 return (weights->omp_cost
3672 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3674 case GIMPLE_TRANSACTION:
3675 return (weights->tm_cost
3676 + estimate_num_insns_seq (gimple_transaction_body (stmt),
3677 weights));
3679 default:
3680 gcc_unreachable ();
3683 return cost;
3686 /* Estimate number of instructions that will be created by expanding
3687 function FNDECL. WEIGHTS contains weights attributed to various
3688 constructs. */
3691 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3693 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3694 gimple_stmt_iterator bsi;
3695 basic_block bb;
3696 int n = 0;
3698 gcc_assert (my_function && my_function->cfg);
3699 FOR_EACH_BB_FN (bb, my_function)
3701 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3702 n += estimate_num_insns (gsi_stmt (bsi), weights);
3705 return n;
3709 /* Initializes weights used by estimate_num_insns. */
3711 void
3712 init_inline_once (void)
3714 eni_size_weights.call_cost = 1;
3715 eni_size_weights.indirect_call_cost = 3;
3716 eni_size_weights.target_builtin_call_cost = 1;
3717 eni_size_weights.div_mod_cost = 1;
3718 eni_size_weights.omp_cost = 40;
3719 eni_size_weights.tm_cost = 10;
3720 eni_size_weights.time_based = false;
3721 eni_size_weights.return_cost = 1;
3723 /* Estimating time for call is difficult, since we have no idea what the
3724 called function does. In the current uses of eni_time_weights,
3725 underestimating the cost does less harm than overestimating it, so
3726 we choose a rather small value here. */
3727 eni_time_weights.call_cost = 10;
3728 eni_time_weights.indirect_call_cost = 15;
3729 eni_time_weights.target_builtin_call_cost = 1;
3730 eni_time_weights.div_mod_cost = 10;
3731 eni_time_weights.omp_cost = 40;
3732 eni_time_weights.tm_cost = 40;
3733 eni_time_weights.time_based = true;
3734 eni_time_weights.return_cost = 2;
3737 /* Estimate the number of instructions in a gimple_seq. */
3740 count_insns_seq (gimple_seq seq, eni_weights *weights)
3742 gimple_stmt_iterator gsi;
3743 int n = 0;
3744 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3745 n += estimate_num_insns (gsi_stmt (gsi), weights);
3747 return n;
3751 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3753 static void
3754 prepend_lexical_block (tree current_block, tree new_block)
3756 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3757 BLOCK_SUBBLOCKS (current_block) = new_block;
3758 BLOCK_SUPERCONTEXT (new_block) = current_block;
3761 /* Add local variables from CALLEE to CALLER. */
3763 static inline void
3764 add_local_variables (struct function *callee, struct function *caller,
3765 copy_body_data *id)
3767 tree var;
3768 unsigned ix;
3770 FOR_EACH_LOCAL_DECL (callee, ix, var)
3771 if (!can_be_nonlocal (var, id))
3773 tree new_var = remap_decl (var, id);
3775 /* Remap debug-expressions. */
3776 if (TREE_CODE (new_var) == VAR_DECL
3777 && DECL_HAS_DEBUG_EXPR_P (var)
3778 && new_var != var)
3780 tree tem = DECL_DEBUG_EXPR (var);
3781 bool old_regimplify = id->regimplify;
3782 id->remapping_type_depth++;
3783 walk_tree (&tem, copy_tree_body_r, id, NULL);
3784 id->remapping_type_depth--;
3785 id->regimplify = old_regimplify;
3786 SET_DECL_DEBUG_EXPR (new_var, tem);
3787 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
3789 add_local_decl (caller, new_var);
3793 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3795 static bool
3796 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3798 tree use_retvar;
3799 tree fn;
3800 struct pointer_map_t *st, *dst;
3801 tree return_slot;
3802 tree modify_dest;
3803 location_t saved_location;
3804 struct cgraph_edge *cg_edge;
3805 cgraph_inline_failed_t reason;
3806 basic_block return_block;
3807 edge e;
3808 gimple_stmt_iterator gsi, stmt_gsi;
3809 bool successfully_inlined = FALSE;
3810 bool purge_dead_abnormal_edges;
3812 /* Set input_location here so we get the right instantiation context
3813 if we call instantiate_decl from inlinable_function_p. */
3814 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
3815 saved_location = input_location;
3816 input_location = gimple_location (stmt);
3818 /* From here on, we're only interested in CALL_EXPRs. */
3819 if (gimple_code (stmt) != GIMPLE_CALL)
3820 goto egress;
3822 cg_edge = cgraph_edge (id->dst_node, stmt);
3823 gcc_checking_assert (cg_edge);
3824 /* First, see if we can figure out what function is being called.
3825 If we cannot, then there is no hope of inlining the function. */
3826 if (cg_edge->indirect_unknown_callee)
3827 goto egress;
3828 fn = cg_edge->callee->symbol.decl;
3829 gcc_checking_assert (fn);
3831 /* If FN is a declaration of a function in a nested scope that was
3832 globally declared inline, we don't set its DECL_INITIAL.
3833 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3834 C++ front-end uses it for cdtors to refer to their internal
3835 declarations, that are not real functions. Fortunately those
3836 don't have trees to be saved, so we can tell by checking their
3837 gimple_body. */
3838 if (!DECL_INITIAL (fn)
3839 && DECL_ABSTRACT_ORIGIN (fn)
3840 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3841 fn = DECL_ABSTRACT_ORIGIN (fn);
3843 /* Don't try to inline functions that are not well-suited to inlining. */
3844 if (cg_edge->inline_failed)
3846 reason = cg_edge->inline_failed;
3847 /* If this call was originally indirect, we do not want to emit any
3848 inlining related warnings or sorry messages because there are no
3849 guarantees regarding those. */
3850 if (cg_edge->indirect_inlining_edge)
3851 goto egress;
3853 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3854 /* For extern inline functions that get redefined we always
3855 silently ignored always_inline flag. Better behaviour would
3856 be to be able to keep both bodies and use extern inline body
3857 for inlining, but we can't do that because frontends overwrite
3858 the body. */
3859 && !cg_edge->callee->local.redefined_extern_inline
3860 /* Avoid warnings during early inline pass. */
3861 && cgraph_global_info_ready
3862 /* PR 20090218-1_0.c. Body can be provided by another module. */
3863 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
3865 error ("inlining failed in call to always_inline %q+F: %s", fn,
3866 cgraph_inline_failed_string (reason));
3867 error ("called from here");
3869 else if (warn_inline
3870 && DECL_DECLARED_INLINE_P (fn)
3871 && !DECL_NO_INLINE_WARNING_P (fn)
3872 && !DECL_IN_SYSTEM_HEADER (fn)
3873 && reason != CIF_UNSPECIFIED
3874 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3875 /* Do not warn about not inlined recursive calls. */
3876 && !cgraph_edge_recursive_p (cg_edge)
3877 /* Avoid warnings during early inline pass. */
3878 && cgraph_global_info_ready)
3880 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3881 fn, _(cgraph_inline_failed_string (reason)));
3882 warning (OPT_Winline, "called from here");
3884 goto egress;
3886 fn = cg_edge->callee->symbol.decl;
3888 #ifdef ENABLE_CHECKING
3889 if (cg_edge->callee->symbol.decl != id->dst_node->symbol.decl)
3890 verify_cgraph_node (cg_edge->callee);
3891 #endif
3893 /* We will be inlining this callee. */
3894 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3896 /* Update the callers EH personality. */
3897 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->symbol.decl))
3898 DECL_FUNCTION_PERSONALITY (cg_edge->caller->symbol.decl)
3899 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->symbol.decl);
3901 /* Split the block holding the GIMPLE_CALL. */
3902 e = split_block (bb, stmt);
3903 bb = e->src;
3904 return_block = e->dest;
3905 remove_edge (e);
3907 /* split_block splits after the statement; work around this by
3908 moving the call into the second block manually. Not pretty,
3909 but seems easier than doing the CFG manipulation by hand
3910 when the GIMPLE_CALL is in the last statement of BB. */
3911 stmt_gsi = gsi_last_bb (bb);
3912 gsi_remove (&stmt_gsi, false);
3914 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3915 been the source of abnormal edges. In this case, schedule
3916 the removal of dead abnormal edges. */
3917 gsi = gsi_start_bb (return_block);
3918 if (gsi_end_p (gsi))
3920 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3921 purge_dead_abnormal_edges = true;
3923 else
3925 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3926 purge_dead_abnormal_edges = false;
3929 stmt_gsi = gsi_start_bb (return_block);
3931 /* Build a block containing code to initialize the arguments, the
3932 actual inline expansion of the body, and a label for the return
3933 statements within the function to jump to. The type of the
3934 statement expression is the return type of the function call.
3935 ??? If the call does not have an associated block then we will
3936 remap all callee blocks to NULL, effectively dropping most of
3937 its debug information. This should only happen for calls to
3938 artificial decls inserted by the compiler itself. We need to
3939 either link the inlined blocks into the caller block tree or
3940 not refer to them in any way to not break GC for locations. */
3941 if (gimple_block (stmt))
3943 id->block = make_node (BLOCK);
3944 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3945 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
3946 prepend_lexical_block (gimple_block (stmt), id->block);
3949 /* Local declarations will be replaced by their equivalents in this
3950 map. */
3951 st = id->decl_map;
3952 id->decl_map = pointer_map_create ();
3953 dst = id->debug_map;
3954 id->debug_map = NULL;
3956 /* Record the function we are about to inline. */
3957 id->src_fn = fn;
3958 id->src_node = cg_edge->callee;
3959 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3960 id->gimple_call = stmt;
3962 gcc_assert (!id->src_cfun->after_inlining);
3964 id->entry_bb = bb;
3965 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3967 gimple_stmt_iterator si = gsi_last_bb (bb);
3968 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3969 NOT_TAKEN),
3970 GSI_NEW_STMT);
3972 initialize_inlined_parameters (id, stmt, fn, bb);
3974 if (DECL_INITIAL (fn))
3976 if (gimple_block (stmt))
3978 tree *var;
3980 prepend_lexical_block (id->block,
3981 remap_blocks (DECL_INITIAL (fn), id));
3982 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
3983 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
3984 == NULL_TREE));
3985 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
3986 otherwise for DWARF DW_TAG_formal_parameter will not be children of
3987 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
3988 under it. The parameters can be then evaluated in the debugger,
3989 but don't show in backtraces. */
3990 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
3991 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
3993 tree v = *var;
3994 *var = TREE_CHAIN (v);
3995 TREE_CHAIN (v) = BLOCK_VARS (id->block);
3996 BLOCK_VARS (id->block) = v;
3998 else
3999 var = &TREE_CHAIN (*var);
4001 else
4002 remap_blocks_to_null (DECL_INITIAL (fn), id);
4005 /* Return statements in the function body will be replaced by jumps
4006 to the RET_LABEL. */
4007 gcc_assert (DECL_INITIAL (fn));
4008 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4010 /* Find the LHS to which the result of this call is assigned. */
4011 return_slot = NULL;
4012 if (gimple_call_lhs (stmt))
4014 modify_dest = gimple_call_lhs (stmt);
4016 /* The function which we are inlining might not return a value,
4017 in which case we should issue a warning that the function
4018 does not return a value. In that case the optimizers will
4019 see that the variable to which the value is assigned was not
4020 initialized. We do not want to issue a warning about that
4021 uninitialized variable. */
4022 if (DECL_P (modify_dest))
4023 TREE_NO_WARNING (modify_dest) = 1;
4025 if (gimple_call_return_slot_opt_p (stmt))
4027 return_slot = modify_dest;
4028 modify_dest = NULL;
4031 else
4032 modify_dest = NULL;
4034 /* If we are inlining a call to the C++ operator new, we don't want
4035 to use type based alias analysis on the return value. Otherwise
4036 we may get confused if the compiler sees that the inlined new
4037 function returns a pointer which was just deleted. See bug
4038 33407. */
4039 if (DECL_IS_OPERATOR_NEW (fn))
4041 return_slot = NULL;
4042 modify_dest = NULL;
4045 /* Declare the return variable for the function. */
4046 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4048 /* Add local vars in this inlined callee to caller. */
4049 add_local_variables (id->src_cfun, cfun, id);
4051 if (dump_file && (dump_flags & TDF_DETAILS))
4053 fprintf (dump_file, "Inlining ");
4054 print_generic_expr (dump_file, id->src_fn, 0);
4055 fprintf (dump_file, " to ");
4056 print_generic_expr (dump_file, id->dst_fn, 0);
4057 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4060 /* This is it. Duplicate the callee body. Assume callee is
4061 pre-gimplified. Note that we must not alter the caller
4062 function in any way before this point, as this CALL_EXPR may be
4063 a self-referential call; if we're calling ourselves, we need to
4064 duplicate our body before altering anything. */
4065 copy_body (id, bb->count,
4066 /* Update to use GCOV_COMPUTE_SCALE. */
4067 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
4068 bb, return_block, NULL, NULL);
4070 /* Reset the escaped solution. */
4071 if (cfun->gimple_df)
4072 pt_solution_reset (&cfun->gimple_df->escaped);
4074 /* Clean up. */
4075 if (id->debug_map)
4077 pointer_map_destroy (id->debug_map);
4078 id->debug_map = dst;
4080 pointer_map_destroy (id->decl_map);
4081 id->decl_map = st;
4083 /* Unlink the calls virtual operands before replacing it. */
4084 unlink_stmt_vdef (stmt);
4086 /* If the inlined function returns a result that we care about,
4087 substitute the GIMPLE_CALL with an assignment of the return
4088 variable to the LHS of the call. That is, if STMT was
4089 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4090 if (use_retvar && gimple_call_lhs (stmt))
4092 gimple old_stmt = stmt;
4093 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4094 gsi_replace (&stmt_gsi, stmt, false);
4095 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4097 else
4099 /* Handle the case of inlining a function with no return
4100 statement, which causes the return value to become undefined. */
4101 if (gimple_call_lhs (stmt)
4102 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4104 tree name = gimple_call_lhs (stmt);
4105 tree var = SSA_NAME_VAR (name);
4106 tree def = ssa_default_def (cfun, var);
4108 if (def)
4110 /* If the variable is used undefined, make this name
4111 undefined via a move. */
4112 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4113 gsi_replace (&stmt_gsi, stmt, true);
4115 else
4117 /* Otherwise make this variable undefined. */
4118 gsi_remove (&stmt_gsi, true);
4119 set_ssa_default_def (cfun, var, name);
4120 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4123 else
4124 gsi_remove (&stmt_gsi, true);
4127 if (purge_dead_abnormal_edges)
4129 gimple_purge_dead_eh_edges (return_block);
4130 gimple_purge_dead_abnormal_call_edges (return_block);
4133 /* If the value of the new expression is ignored, that's OK. We
4134 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4135 the equivalent inlined version either. */
4136 if (is_gimple_assign (stmt))
4138 gcc_assert (gimple_assign_single_p (stmt)
4139 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4140 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4143 /* Output the inlining info for this abstract function, since it has been
4144 inlined. If we don't do this now, we can lose the information about the
4145 variables in the function when the blocks get blown away as soon as we
4146 remove the cgraph node. */
4147 if (gimple_block (stmt))
4148 (*debug_hooks->outlining_inline_function) (cg_edge->callee->symbol.decl);
4150 /* Update callgraph if needed. */
4151 cgraph_remove_node (cg_edge->callee);
4153 id->block = NULL_TREE;
4154 successfully_inlined = TRUE;
4156 egress:
4157 input_location = saved_location;
4158 return successfully_inlined;
4161 /* Expand call statements reachable from STMT_P.
4162 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4163 in a MODIFY_EXPR. */
4165 static bool
4166 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4168 gimple_stmt_iterator gsi;
4170 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4172 gimple stmt = gsi_stmt (gsi);
4174 if (is_gimple_call (stmt)
4175 && expand_call_inline (bb, stmt, id))
4176 return true;
4179 return false;
4183 /* Walk all basic blocks created after FIRST and try to fold every statement
4184 in the STATEMENTS pointer set. */
4186 static void
4187 fold_marked_statements (int first, struct pointer_set_t *statements)
4189 for (; first < n_basic_blocks; first++)
4190 if (BASIC_BLOCK (first))
4192 gimple_stmt_iterator gsi;
4194 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4195 !gsi_end_p (gsi);
4196 gsi_next (&gsi))
4197 if (pointer_set_contains (statements, gsi_stmt (gsi)))
4199 gimple old_stmt = gsi_stmt (gsi);
4200 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4202 if (old_decl && DECL_BUILT_IN (old_decl))
4204 /* Folding builtins can create multiple instructions,
4205 we need to look at all of them. */
4206 gimple_stmt_iterator i2 = gsi;
4207 gsi_prev (&i2);
4208 if (fold_stmt (&gsi))
4210 gimple new_stmt;
4211 /* If a builtin at the end of a bb folded into nothing,
4212 the following loop won't work. */
4213 if (gsi_end_p (gsi))
4215 cgraph_update_edges_for_call_stmt (old_stmt,
4216 old_decl, NULL);
4217 break;
4219 if (gsi_end_p (i2))
4220 i2 = gsi_start_bb (BASIC_BLOCK (first));
4221 else
4222 gsi_next (&i2);
4223 while (1)
4225 new_stmt = gsi_stmt (i2);
4226 update_stmt (new_stmt);
4227 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4228 new_stmt);
4230 if (new_stmt == gsi_stmt (gsi))
4232 /* It is okay to check only for the very last
4233 of these statements. If it is a throwing
4234 statement nothing will change. If it isn't
4235 this can remove EH edges. If that weren't
4236 correct then because some intermediate stmts
4237 throw, but not the last one. That would mean
4238 we'd have to split the block, which we can't
4239 here and we'd loose anyway. And as builtins
4240 probably never throw, this all
4241 is mood anyway. */
4242 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4243 new_stmt))
4244 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4245 break;
4247 gsi_next (&i2);
4251 else if (fold_stmt (&gsi))
4253 /* Re-read the statement from GSI as fold_stmt() may
4254 have changed it. */
4255 gimple new_stmt = gsi_stmt (gsi);
4256 update_stmt (new_stmt);
4258 if (is_gimple_call (old_stmt)
4259 || is_gimple_call (new_stmt))
4260 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4261 new_stmt);
4263 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4264 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4270 /* Return true if BB has at least one abnormal outgoing edge. */
4272 static inline bool
4273 has_abnormal_outgoing_edge_p (basic_block bb)
4275 edge e;
4276 edge_iterator ei;
4278 FOR_EACH_EDGE (e, ei, bb->succs)
4279 if (e->flags & EDGE_ABNORMAL)
4280 return true;
4282 return false;
4285 /* Expand calls to inline functions in the body of FN. */
4287 unsigned int
4288 optimize_inline_calls (tree fn)
4290 copy_body_data id;
4291 basic_block bb;
4292 int last = n_basic_blocks;
4293 struct gimplify_ctx gctx;
4294 bool inlined_p = false;
4296 /* Clear out ID. */
4297 memset (&id, 0, sizeof (id));
4299 id.src_node = id.dst_node = cgraph_get_node (fn);
4300 gcc_assert (id.dst_node->analyzed);
4301 id.dst_fn = fn;
4302 /* Or any functions that aren't finished yet. */
4303 if (current_function_decl)
4304 id.dst_fn = current_function_decl;
4306 id.copy_decl = copy_decl_maybe_to_var;
4307 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4308 id.transform_new_cfg = false;
4309 id.transform_return_to_modify = true;
4310 id.transform_lang_insert_block = NULL;
4311 id.statements_to_fold = pointer_set_create ();
4313 push_gimplify_context (&gctx);
4315 /* We make no attempts to keep dominance info up-to-date. */
4316 free_dominance_info (CDI_DOMINATORS);
4317 free_dominance_info (CDI_POST_DOMINATORS);
4319 /* Register specific gimple functions. */
4320 gimple_register_cfg_hooks ();
4322 /* Reach the trees by walking over the CFG, and note the
4323 enclosing basic-blocks in the call edges. */
4324 /* We walk the blocks going forward, because inlined function bodies
4325 will split id->current_basic_block, and the new blocks will
4326 follow it; we'll trudge through them, processing their CALL_EXPRs
4327 along the way. */
4328 FOR_EACH_BB (bb)
4329 inlined_p |= gimple_expand_calls_inline (bb, &id);
4331 pop_gimplify_context (NULL);
4333 #ifdef ENABLE_CHECKING
4335 struct cgraph_edge *e;
4337 verify_cgraph_node (id.dst_node);
4339 /* Double check that we inlined everything we are supposed to inline. */
4340 for (e = id.dst_node->callees; e; e = e->next_callee)
4341 gcc_assert (e->inline_failed);
4343 #endif
4345 /* Fold queued statements. */
4346 fold_marked_statements (last, id.statements_to_fold);
4347 pointer_set_destroy (id.statements_to_fold);
4349 gcc_assert (!id.debug_stmts.exists ());
4351 /* If we didn't inline into the function there is nothing to do. */
4352 if (!inlined_p)
4353 return 0;
4355 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4356 number_blocks (fn);
4358 delete_unreachable_blocks_update_callgraph (&id);
4359 #ifdef ENABLE_CHECKING
4360 verify_cgraph_node (id.dst_node);
4361 #endif
4363 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4364 not possible yet - the IPA passes might make various functions to not
4365 throw and they don't care to proactively update local EH info. This is
4366 done later in fixup_cfg pass that also execute the verification. */
4367 return (TODO_update_ssa
4368 | TODO_cleanup_cfg
4369 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4370 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4371 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4374 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4376 tree
4377 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4379 enum tree_code code = TREE_CODE (*tp);
4380 enum tree_code_class cl = TREE_CODE_CLASS (code);
4382 /* We make copies of most nodes. */
4383 if (IS_EXPR_CODE_CLASS (cl)
4384 || code == TREE_LIST
4385 || code == TREE_VEC
4386 || code == TYPE_DECL
4387 || code == OMP_CLAUSE)
4389 /* Because the chain gets clobbered when we make a copy, we save it
4390 here. */
4391 tree chain = NULL_TREE, new_tree;
4393 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4394 chain = TREE_CHAIN (*tp);
4396 /* Copy the node. */
4397 new_tree = copy_node (*tp);
4399 /* Propagate mudflap marked-ness. */
4400 if (flag_mudflap && mf_marked_p (*tp))
4401 mf_mark (new_tree);
4403 *tp = new_tree;
4405 /* Now, restore the chain, if appropriate. That will cause
4406 walk_tree to walk into the chain as well. */
4407 if (code == PARM_DECL
4408 || code == TREE_LIST
4409 || code == OMP_CLAUSE)
4410 TREE_CHAIN (*tp) = chain;
4412 /* For now, we don't update BLOCKs when we make copies. So, we
4413 have to nullify all BIND_EXPRs. */
4414 if (TREE_CODE (*tp) == BIND_EXPR)
4415 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4417 else if (code == CONSTRUCTOR)
4419 /* CONSTRUCTOR nodes need special handling because
4420 we need to duplicate the vector of elements. */
4421 tree new_tree;
4423 new_tree = copy_node (*tp);
4425 /* Propagate mudflap marked-ness. */
4426 if (flag_mudflap && mf_marked_p (*tp))
4427 mf_mark (new_tree);
4429 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
4430 *tp = new_tree;
4432 else if (code == STATEMENT_LIST)
4433 /* We used to just abort on STATEMENT_LIST, but we can run into them
4434 with statement-expressions (c++/40975). */
4435 copy_statement_list (tp);
4436 else if (TREE_CODE_CLASS (code) == tcc_type)
4437 *walk_subtrees = 0;
4438 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4439 *walk_subtrees = 0;
4440 else if (TREE_CODE_CLASS (code) == tcc_constant)
4441 *walk_subtrees = 0;
4442 return NULL_TREE;
4445 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4446 information indicating to what new SAVE_EXPR this one should be mapped,
4447 use that one. Otherwise, create a new node and enter it in ST. FN is
4448 the function into which the copy will be placed. */
4450 static void
4451 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4453 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4454 tree *n;
4455 tree t;
4457 /* See if we already encountered this SAVE_EXPR. */
4458 n = (tree *) pointer_map_contains (st, *tp);
4460 /* If we didn't already remap this SAVE_EXPR, do so now. */
4461 if (!n)
4463 t = copy_node (*tp);
4465 /* Remember this SAVE_EXPR. */
4466 *pointer_map_insert (st, *tp) = t;
4467 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4468 *pointer_map_insert (st, t) = t;
4470 else
4472 /* We've already walked into this SAVE_EXPR; don't do it again. */
4473 *walk_subtrees = 0;
4474 t = *n;
4477 /* Replace this SAVE_EXPR with the copy. */
4478 *tp = t;
4481 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4482 label, copies the declaration and enters it in the splay_tree in DATA (which
4483 is really a 'copy_body_data *'. */
4485 static tree
4486 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4487 bool *handled_ops_p ATTRIBUTE_UNUSED,
4488 struct walk_stmt_info *wi)
4490 copy_body_data *id = (copy_body_data *) wi->info;
4491 gimple stmt = gsi_stmt (*gsip);
4493 if (gimple_code (stmt) == GIMPLE_LABEL)
4495 tree decl = gimple_label_label (stmt);
4497 /* Copy the decl and remember the copy. */
4498 insert_decl_map (id, decl, id->copy_decl (decl, id));
4501 return NULL_TREE;
4505 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4506 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4507 remaps all local declarations to appropriate replacements in gimple
4508 operands. */
4510 static tree
4511 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4513 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4514 copy_body_data *id = (copy_body_data *) wi->info;
4515 struct pointer_map_t *st = id->decl_map;
4516 tree *n;
4517 tree expr = *tp;
4519 /* Only a local declaration (variable or label). */
4520 if ((TREE_CODE (expr) == VAR_DECL
4521 && !TREE_STATIC (expr))
4522 || TREE_CODE (expr) == LABEL_DECL)
4524 /* Lookup the declaration. */
4525 n = (tree *) pointer_map_contains (st, expr);
4527 /* If it's there, remap it. */
4528 if (n)
4529 *tp = *n;
4530 *walk_subtrees = 0;
4532 else if (TREE_CODE (expr) == STATEMENT_LIST
4533 || TREE_CODE (expr) == BIND_EXPR
4534 || TREE_CODE (expr) == SAVE_EXPR)
4535 gcc_unreachable ();
4536 else if (TREE_CODE (expr) == TARGET_EXPR)
4538 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4539 It's OK for this to happen if it was part of a subtree that
4540 isn't immediately expanded, such as operand 2 of another
4541 TARGET_EXPR. */
4542 if (!TREE_OPERAND (expr, 1))
4544 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4545 TREE_OPERAND (expr, 3) = NULL_TREE;
4549 /* Keep iterating. */
4550 return NULL_TREE;
4554 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4555 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4556 remaps all local declarations to appropriate replacements in gimple
4557 statements. */
4559 static tree
4560 replace_locals_stmt (gimple_stmt_iterator *gsip,
4561 bool *handled_ops_p ATTRIBUTE_UNUSED,
4562 struct walk_stmt_info *wi)
4564 copy_body_data *id = (copy_body_data *) wi->info;
4565 gimple stmt = gsi_stmt (*gsip);
4567 if (gimple_code (stmt) == GIMPLE_BIND)
4569 tree block = gimple_bind_block (stmt);
4571 if (block)
4573 remap_block (&block, id);
4574 gimple_bind_set_block (stmt, block);
4577 /* This will remap a lot of the same decls again, but this should be
4578 harmless. */
4579 if (gimple_bind_vars (stmt))
4580 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
4581 NULL, id));
4584 /* Keep iterating. */
4585 return NULL_TREE;
4589 /* Copies everything in SEQ and replaces variables and labels local to
4590 current_function_decl. */
4592 gimple_seq
4593 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4595 copy_body_data id;
4596 struct walk_stmt_info wi;
4597 struct pointer_set_t *visited;
4598 gimple_seq copy;
4600 /* There's nothing to do for NULL_TREE. */
4601 if (seq == NULL)
4602 return seq;
4604 /* Set up ID. */
4605 memset (&id, 0, sizeof (id));
4606 id.src_fn = current_function_decl;
4607 id.dst_fn = current_function_decl;
4608 id.decl_map = pointer_map_create ();
4609 id.debug_map = NULL;
4611 id.copy_decl = copy_decl_no_change;
4612 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4613 id.transform_new_cfg = false;
4614 id.transform_return_to_modify = false;
4615 id.transform_lang_insert_block = NULL;
4617 /* Walk the tree once to find local labels. */
4618 memset (&wi, 0, sizeof (wi));
4619 visited = pointer_set_create ();
4620 wi.info = &id;
4621 wi.pset = visited;
4622 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4623 pointer_set_destroy (visited);
4625 copy = gimple_seq_copy (seq);
4627 /* Walk the copy, remapping decls. */
4628 memset (&wi, 0, sizeof (wi));
4629 wi.info = &id;
4630 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4632 /* Clean up. */
4633 pointer_map_destroy (id.decl_map);
4634 if (id.debug_map)
4635 pointer_map_destroy (id.debug_map);
4637 return copy;
4641 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4643 static tree
4644 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4646 if (*tp == data)
4647 return (tree) data;
4648 else
4649 return NULL;
4652 DEBUG_FUNCTION bool
4653 debug_find_tree (tree top, tree search)
4655 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4659 /* Declare the variables created by the inliner. Add all the variables in
4660 VARS to BIND_EXPR. */
4662 static void
4663 declare_inline_vars (tree block, tree vars)
4665 tree t;
4666 for (t = vars; t; t = DECL_CHAIN (t))
4668 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4669 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4670 add_local_decl (cfun, t);
4673 if (block)
4674 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4677 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4678 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4679 VAR_DECL translation. */
4681 static tree
4682 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4684 /* Don't generate debug information for the copy if we wouldn't have
4685 generated it for the copy either. */
4686 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4687 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4689 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4690 declaration inspired this copy. */
4691 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4693 /* The new variable/label has no RTL, yet. */
4694 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4695 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4696 SET_DECL_RTL (copy, 0);
4698 /* These args would always appear unused, if not for this. */
4699 TREE_USED (copy) = 1;
4701 /* Set the context for the new declaration. */
4702 if (!DECL_CONTEXT (decl))
4703 /* Globals stay global. */
4705 else if (DECL_CONTEXT (decl) != id->src_fn)
4706 /* Things that weren't in the scope of the function we're inlining
4707 from aren't in the scope we're inlining to, either. */
4709 else if (TREE_STATIC (decl))
4710 /* Function-scoped static variables should stay in the original
4711 function. */
4713 else
4714 /* Ordinary automatic local variables are now in the scope of the
4715 new function. */
4716 DECL_CONTEXT (copy) = id->dst_fn;
4718 return copy;
4721 static tree
4722 copy_decl_to_var (tree decl, copy_body_data *id)
4724 tree copy, type;
4726 gcc_assert (TREE_CODE (decl) == PARM_DECL
4727 || TREE_CODE (decl) == RESULT_DECL);
4729 type = TREE_TYPE (decl);
4731 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4732 VAR_DECL, DECL_NAME (decl), type);
4733 if (DECL_PT_UID_SET_P (decl))
4734 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4735 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4736 TREE_READONLY (copy) = TREE_READONLY (decl);
4737 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4738 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4740 return copy_decl_for_dup_finish (id, decl, copy);
4743 /* Like copy_decl_to_var, but create a return slot object instead of a
4744 pointer variable for return by invisible reference. */
4746 static tree
4747 copy_result_decl_to_var (tree decl, copy_body_data *id)
4749 tree copy, type;
4751 gcc_assert (TREE_CODE (decl) == PARM_DECL
4752 || TREE_CODE (decl) == RESULT_DECL);
4754 type = TREE_TYPE (decl);
4755 if (DECL_BY_REFERENCE (decl))
4756 type = TREE_TYPE (type);
4758 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4759 VAR_DECL, DECL_NAME (decl), type);
4760 if (DECL_PT_UID_SET_P (decl))
4761 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4762 TREE_READONLY (copy) = TREE_READONLY (decl);
4763 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4764 if (!DECL_BY_REFERENCE (decl))
4766 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4767 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4770 return copy_decl_for_dup_finish (id, decl, copy);
4773 tree
4774 copy_decl_no_change (tree decl, copy_body_data *id)
4776 tree copy;
4778 copy = copy_node (decl);
4780 /* The COPY is not abstract; it will be generated in DST_FN. */
4781 DECL_ABSTRACT (copy) = 0;
4782 lang_hooks.dup_lang_specific_decl (copy);
4784 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4785 been taken; it's for internal bookkeeping in expand_goto_internal. */
4786 if (TREE_CODE (copy) == LABEL_DECL)
4788 TREE_ADDRESSABLE (copy) = 0;
4789 LABEL_DECL_UID (copy) = -1;
4792 return copy_decl_for_dup_finish (id, decl, copy);
4795 static tree
4796 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4798 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4799 return copy_decl_to_var (decl, id);
4800 else
4801 return copy_decl_no_change (decl, id);
4804 /* Return a copy of the function's argument tree. */
4805 static tree
4806 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4807 bitmap args_to_skip, tree *vars)
4809 tree arg, *parg;
4810 tree new_parm = NULL;
4811 int i = 0;
4813 parg = &new_parm;
4815 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
4816 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4818 tree new_tree = remap_decl (arg, id);
4819 if (TREE_CODE (new_tree) != PARM_DECL)
4820 new_tree = id->copy_decl (arg, id);
4821 lang_hooks.dup_lang_specific_decl (new_tree);
4822 *parg = new_tree;
4823 parg = &DECL_CHAIN (new_tree);
4825 else if (!pointer_map_contains (id->decl_map, arg))
4827 /* Make an equivalent VAR_DECL. If the argument was used
4828 as temporary variable later in function, the uses will be
4829 replaced by local variable. */
4830 tree var = copy_decl_to_var (arg, id);
4831 insert_decl_map (id, arg, var);
4832 /* Declare this new variable. */
4833 DECL_CHAIN (var) = *vars;
4834 *vars = var;
4836 return new_parm;
4839 /* Return a copy of the function's static chain. */
4840 static tree
4841 copy_static_chain (tree static_chain, copy_body_data * id)
4843 tree *chain_copy, *pvar;
4845 chain_copy = &static_chain;
4846 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
4848 tree new_tree = remap_decl (*pvar, id);
4849 lang_hooks.dup_lang_specific_decl (new_tree);
4850 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
4851 *pvar = new_tree;
4853 return static_chain;
4856 /* Return true if the function is allowed to be versioned.
4857 This is a guard for the versioning functionality. */
4859 bool
4860 tree_versionable_function_p (tree fndecl)
4862 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4863 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4866 /* Delete all unreachable basic blocks and update callgraph.
4867 Doing so is somewhat nontrivial because we need to update all clones and
4868 remove inline function that become unreachable. */
4870 static bool
4871 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4873 bool changed = false;
4874 basic_block b, next_bb;
4876 find_unreachable_blocks ();
4878 /* Delete all unreachable basic blocks. */
4880 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4882 next_bb = b->next_bb;
4884 if (!(b->flags & BB_REACHABLE))
4886 gimple_stmt_iterator bsi;
4888 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4889 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4891 struct cgraph_edge *e;
4892 struct cgraph_node *node;
4894 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4896 if (!e->inline_failed)
4897 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
4898 else
4899 cgraph_remove_edge (e);
4901 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4902 && id->dst_node->clones)
4903 for (node = id->dst_node->clones; node != id->dst_node;)
4905 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4907 if (!e->inline_failed)
4908 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
4909 else
4910 cgraph_remove_edge (e);
4913 if (node->clones)
4914 node = node->clones;
4915 else if (node->next_sibling_clone)
4916 node = node->next_sibling_clone;
4917 else
4919 while (node != id->dst_node && !node->next_sibling_clone)
4920 node = node->clone_of;
4921 if (node != id->dst_node)
4922 node = node->next_sibling_clone;
4926 delete_basic_block (b);
4927 changed = true;
4931 return changed;
4934 /* Update clone info after duplication. */
4936 static void
4937 update_clone_info (copy_body_data * id)
4939 struct cgraph_node *node;
4940 if (!id->dst_node->clones)
4941 return;
4942 for (node = id->dst_node->clones; node != id->dst_node;)
4944 /* First update replace maps to match the new body. */
4945 if (node->clone.tree_map)
4947 unsigned int i;
4948 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
4950 struct ipa_replace_map *replace_info;
4951 replace_info = (*node->clone.tree_map)[i];
4952 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4953 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4956 if (node->clones)
4957 node = node->clones;
4958 else if (node->next_sibling_clone)
4959 node = node->next_sibling_clone;
4960 else
4962 while (node != id->dst_node && !node->next_sibling_clone)
4963 node = node->clone_of;
4964 if (node != id->dst_node)
4965 node = node->next_sibling_clone;
4970 /* Create a copy of a function's tree.
4971 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4972 of the original function and the new copied function
4973 respectively. In case we want to replace a DECL
4974 tree with another tree while duplicating the function's
4975 body, TREE_MAP represents the mapping between these
4976 trees. If UPDATE_CLONES is set, the call_stmt fields
4977 of edges of clones of the function will be updated.
4979 If non-NULL ARGS_TO_SKIP determine function parameters to remove
4980 from new version.
4981 If SKIP_RETURN is true, the new version will return void.
4982 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
4983 If non_NULL NEW_ENTRY determine new entry BB of the clone.
4985 void
4986 tree_function_versioning (tree old_decl, tree new_decl,
4987 vec<ipa_replace_map_p, va_gc> *tree_map,
4988 bool update_clones, bitmap args_to_skip,
4989 bool skip_return, bitmap blocks_to_copy,
4990 basic_block new_entry)
4992 struct cgraph_node *old_version_node;
4993 struct cgraph_node *new_version_node;
4994 copy_body_data id;
4995 tree p;
4996 unsigned i;
4997 struct ipa_replace_map *replace_info;
4998 basic_block old_entry_block, bb;
4999 vec<gimple> init_stmts;
5000 init_stmts.create (10);
5001 tree vars = NULL_TREE;
5003 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5004 && TREE_CODE (new_decl) == FUNCTION_DECL);
5005 DECL_POSSIBLY_INLINED (old_decl) = 1;
5007 old_version_node = cgraph_get_node (old_decl);
5008 gcc_checking_assert (old_version_node);
5009 new_version_node = cgraph_get_node (new_decl);
5010 gcc_checking_assert (new_version_node);
5012 /* Copy over debug args. */
5013 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5015 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5016 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5017 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5018 old_debug_args = decl_debug_args_lookup (old_decl);
5019 if (old_debug_args)
5021 new_debug_args = decl_debug_args_insert (new_decl);
5022 *new_debug_args = vec_safe_copy (*old_debug_args);
5026 /* Output the inlining info for this abstract function, since it has been
5027 inlined. If we don't do this now, we can lose the information about the
5028 variables in the function when the blocks get blown away as soon as we
5029 remove the cgraph node. */
5030 (*debug_hooks->outlining_inline_function) (old_decl);
5032 DECL_ARTIFICIAL (new_decl) = 1;
5033 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5034 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5036 /* Prepare the data structures for the tree copy. */
5037 memset (&id, 0, sizeof (id));
5039 /* Generate a new name for the new version. */
5040 id.statements_to_fold = pointer_set_create ();
5042 id.decl_map = pointer_map_create ();
5043 id.debug_map = NULL;
5044 id.src_fn = old_decl;
5045 id.dst_fn = new_decl;
5046 id.src_node = old_version_node;
5047 id.dst_node = new_version_node;
5048 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5049 if (id.src_node->ipa_transforms_to_apply.exists ())
5051 vec<ipa_opt_pass> old_transforms_to_apply
5052 = id.dst_node->ipa_transforms_to_apply;
5053 unsigned int i;
5055 id.dst_node->ipa_transforms_to_apply
5056 = id.src_node->ipa_transforms_to_apply.copy ();
5057 for (i = 0; i < old_transforms_to_apply.length (); i++)
5058 id.dst_node->ipa_transforms_to_apply.safe_push (old_transforms_to_apply[i]);
5059 old_transforms_to_apply.release ();
5062 id.copy_decl = copy_decl_no_change;
5063 id.transform_call_graph_edges
5064 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5065 id.transform_new_cfg = true;
5066 id.transform_return_to_modify = false;
5067 id.transform_lang_insert_block = NULL;
5069 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5070 (DECL_STRUCT_FUNCTION (old_decl));
5071 initialize_cfun (new_decl, old_decl,
5072 old_entry_block->count);
5073 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5074 = id.src_cfun->gimple_df->ipa_pta;
5076 /* Copy the function's static chain. */
5077 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5078 if (p)
5079 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5080 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5081 &id);
5083 /* If there's a tree_map, prepare for substitution. */
5084 if (tree_map)
5085 for (i = 0; i < tree_map->length (); i++)
5087 gimple init;
5088 replace_info = (*tree_map)[i];
5089 if (replace_info->replace_p)
5091 if (!replace_info->old_tree)
5093 int i = replace_info->parm_num;
5094 tree parm;
5095 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5096 i --;
5097 replace_info->old_tree = parm;
5099 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5100 init = setup_one_parameter (&id, replace_info->old_tree,
5101 replace_info->new_tree, id.src_fn,
5102 NULL,
5103 &vars);
5104 if (init)
5105 init_stmts.safe_push (init);
5108 /* Copy the function's arguments. */
5109 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5110 DECL_ARGUMENTS (new_decl) =
5111 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5112 args_to_skip, &vars);
5114 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5115 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5117 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5119 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5120 /* Add local vars. */
5121 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5123 if (DECL_RESULT (old_decl) == NULL_TREE)
5125 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5127 DECL_RESULT (new_decl)
5128 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5129 RESULT_DECL, NULL_TREE, void_type_node);
5130 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5131 cfun->returns_struct = 0;
5132 cfun->returns_pcc_struct = 0;
5134 else
5136 tree old_name;
5137 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5138 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5139 if (gimple_in_ssa_p (id.src_cfun)
5140 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5141 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5143 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5144 insert_decl_map (&id, old_name, new_name);
5145 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5146 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5150 /* Copy the Function's body. */
5151 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5152 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry);
5154 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5155 number_blocks (new_decl);
5157 /* We want to create the BB unconditionally, so that the addition of
5158 debug stmts doesn't affect BB count, which may in the end cause
5159 codegen differences. */
5160 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5161 while (init_stmts.length ())
5162 insert_init_stmt (&id, bb, init_stmts.pop ());
5163 update_clone_info (&id);
5165 /* Remap the nonlocal_goto_save_area, if any. */
5166 if (cfun->nonlocal_goto_save_area)
5168 struct walk_stmt_info wi;
5170 memset (&wi, 0, sizeof (wi));
5171 wi.info = &id;
5172 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5175 /* Clean up. */
5176 pointer_map_destroy (id.decl_map);
5177 if (id.debug_map)
5178 pointer_map_destroy (id.debug_map);
5179 free_dominance_info (CDI_DOMINATORS);
5180 free_dominance_info (CDI_POST_DOMINATORS);
5182 fold_marked_statements (0, id.statements_to_fold);
5183 pointer_set_destroy (id.statements_to_fold);
5184 fold_cond_expr_cond ();
5185 delete_unreachable_blocks_update_callgraph (&id);
5186 if (id.dst_node->analyzed)
5187 cgraph_rebuild_references ();
5188 update_ssa (TODO_update_ssa);
5190 /* After partial cloning we need to rescale frequencies, so they are
5191 within proper range in the cloned function. */
5192 if (new_entry)
5194 struct cgraph_edge *e;
5195 rebuild_frequencies ();
5197 new_version_node->count = ENTRY_BLOCK_PTR->count;
5198 for (e = new_version_node->callees; e; e = e->next_callee)
5200 basic_block bb = gimple_bb (e->call_stmt);
5201 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5202 bb);
5203 e->count = bb->count;
5205 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5207 basic_block bb = gimple_bb (e->call_stmt);
5208 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5209 bb);
5210 e->count = bb->count;
5214 free_dominance_info (CDI_DOMINATORS);
5215 free_dominance_info (CDI_POST_DOMINATORS);
5217 gcc_assert (!id.debug_stmts.exists ());
5218 init_stmts.release ();
5219 pop_cfun ();
5220 return;
5223 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5224 the callee and return the inlined body on success. */
5226 tree
5227 maybe_inline_call_in_expr (tree exp)
5229 tree fn = get_callee_fndecl (exp);
5231 /* We can only try to inline "const" functions. */
5232 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5234 struct pointer_map_t *decl_map = pointer_map_create ();
5235 call_expr_arg_iterator iter;
5236 copy_body_data id;
5237 tree param, arg, t;
5239 /* Remap the parameters. */
5240 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5241 param;
5242 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5243 *pointer_map_insert (decl_map, param) = arg;
5245 memset (&id, 0, sizeof (id));
5246 id.src_fn = fn;
5247 id.dst_fn = current_function_decl;
5248 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5249 id.decl_map = decl_map;
5251 id.copy_decl = copy_decl_no_change;
5252 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5253 id.transform_new_cfg = false;
5254 id.transform_return_to_modify = true;
5255 id.transform_lang_insert_block = NULL;
5257 /* Make sure not to unshare trees behind the front-end's back
5258 since front-end specific mechanisms may rely on sharing. */
5259 id.regimplify = false;
5260 id.do_not_unshare = true;
5262 /* We're not inside any EH region. */
5263 id.eh_lp_nr = 0;
5265 t = copy_tree_body (&id);
5266 pointer_map_destroy (decl_map);
5268 /* We can only return something suitable for use in a GENERIC
5269 expression tree. */
5270 if (TREE_CODE (t) == MODIFY_EXPR)
5271 return TREE_OPERAND (t, 1);
5274 return NULL_TREE;
5277 /* Duplicate a type, fields and all. */
5279 tree
5280 build_duplicate_type (tree type)
5282 struct copy_body_data id;
5284 memset (&id, 0, sizeof (id));
5285 id.src_fn = current_function_decl;
5286 id.dst_fn = current_function_decl;
5287 id.src_cfun = cfun;
5288 id.decl_map = pointer_map_create ();
5289 id.debug_map = NULL;
5290 id.copy_decl = copy_decl_no_change;
5292 type = remap_type_1 (type, &id);
5294 pointer_map_destroy (id.decl_map);
5295 if (id.debug_map)
5296 pointer_map_destroy (id.debug_map);
5298 TYPE_CANONICAL (type) = type;
5300 return type;