PR c++/40619
[official-gcc/constexpr.git] / gcc / tree-inline.c
blob648e30b47b366c1ec80b46c65951958571fd6a14
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "hashtab.h"
36 #include "langhooks.h"
37 #include "basic-block.h"
38 #include "tree-iterator.h"
39 #include "cgraph.h"
40 #include "intl.h"
41 #include "tree-mudflap.h"
42 #include "tree-flow.h"
43 #include "function.h"
44 #include "ggc.h"
45 #include "tree-flow.h"
46 #include "diagnostic.h"
47 #include "except.h"
48 #include "debug.h"
49 #include "pointer-set.h"
50 #include "ipa-prop.h"
51 #include "value-prof.h"
52 #include "tree-pass.h"
53 #include "target.h"
54 #include "integrate.h"
56 /* I'm not real happy about this, but we need to handle gimple and
57 non-gimple trees. */
58 #include "gimple.h"
60 /* Inlining, Cloning, Versioning, Parallelization
62 Inlining: a function body is duplicated, but the PARM_DECLs are
63 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
64 MODIFY_EXPRs that store to a dedicated returned-value variable.
65 The duplicated eh_region info of the copy will later be appended
66 to the info for the caller; the eh_region info in copied throwing
67 statements and RESX_EXPRs is adjusted accordingly.
69 Cloning: (only in C++) We have one body for a con/de/structor, and
70 multiple function decls, each with a unique parameter list.
71 Duplicate the body, using the given splay tree; some parameters
72 will become constants (like 0 or 1).
74 Versioning: a function body is duplicated and the result is a new
75 function rather than into blocks of an existing function as with
76 inlining. Some parameters will become constants.
78 Parallelization: a region of a function is duplicated resulting in
79 a new function. Variables may be replaced with complex expressions
80 to enable shared variable semantics.
82 All of these will simultaneously lookup any callgraph edges. If
83 we're going to inline the duplicated function body, and the given
84 function has some cloned callgraph nodes (one for each place this
85 function will be inlined) those callgraph edges will be duplicated.
86 If we're cloning the body, those callgraph edges will be
87 updated to point into the new body. (Note that the original
88 callgraph node and edge list will not be altered.)
90 See the CALL_EXPR handling case in copy_tree_body_r (). */
92 /* To Do:
94 o In order to make inlining-on-trees work, we pessimized
95 function-local static constants. In particular, they are now
96 always output, even when not addressed. Fix this by treating
97 function-local static constants just like global static
98 constants; the back-end already knows not to output them if they
99 are not needed.
101 o Provide heuristics to clamp inlining of recursive template
102 calls? */
105 /* Weights that estimate_num_insns uses for heuristics in inlining. */
107 eni_weights eni_inlining_weights;
109 /* Weights that estimate_num_insns uses to estimate the size of the
110 produced code. */
112 eni_weights eni_size_weights;
114 /* Weights that estimate_num_insns uses to estimate the time necessary
115 to execute the produced code. */
117 eni_weights eni_time_weights;
119 /* Prototypes. */
121 static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
122 static void remap_block (tree *, copy_body_data *);
123 static void copy_bind_expr (tree *, int *, copy_body_data *);
124 static tree mark_local_for_remap_r (tree *, int *, void *);
125 static void unsave_expr_1 (tree);
126 static tree unsave_r (tree *, int *, void *);
127 static void declare_inline_vars (tree, tree);
128 static void remap_save_expr (tree *, void *, int *);
129 static void prepend_lexical_block (tree current_block, tree new_block);
130 static tree copy_decl_to_var (tree, copy_body_data *);
131 static tree copy_result_decl_to_var (tree, copy_body_data *);
132 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
133 static gimple remap_gimple_stmt (gimple, copy_body_data *);
134 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
136 /* Insert a tree->tree mapping for ID. Despite the name suggests
137 that the trees should be variables, it is used for more than that. */
139 void
140 insert_decl_map (copy_body_data *id, tree key, tree value)
142 *pointer_map_insert (id->decl_map, key) = value;
144 /* Always insert an identity map as well. If we see this same new
145 node again, we won't want to duplicate it a second time. */
146 if (key != value)
147 *pointer_map_insert (id->decl_map, value) = value;
150 /* Construct new SSA name for old NAME. ID is the inline context. */
152 static tree
153 remap_ssa_name (tree name, copy_body_data *id)
155 tree new_tree;
156 tree *n;
158 gcc_assert (TREE_CODE (name) == SSA_NAME);
160 n = (tree *) pointer_map_contains (id->decl_map, name);
161 if (n)
162 return unshare_expr (*n);
164 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
165 in copy_bb. */
166 new_tree = remap_decl (SSA_NAME_VAR (name), id);
168 /* We might've substituted constant or another SSA_NAME for
169 the variable.
171 Replace the SSA name representing RESULT_DECL by variable during
172 inlining: this saves us from need to introduce PHI node in a case
173 return value is just partly initialized. */
174 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
175 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
176 || !id->transform_return_to_modify))
178 new_tree = make_ssa_name (new_tree, NULL);
179 insert_decl_map (id, name, new_tree);
180 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
181 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
182 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
183 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
185 /* By inlining function having uninitialized variable, we might
186 extend the lifetime (variable might get reused). This cause
187 ICE in the case we end up extending lifetime of SSA name across
188 abnormal edge, but also increase register pressure.
190 We simply initialize all uninitialized vars by 0 except
191 for case we are inlining to very first BB. We can avoid
192 this for all BBs that are not inside strongly connected
193 regions of the CFG, but this is expensive to test. */
194 if (id->entry_bb
195 && is_gimple_reg (SSA_NAME_VAR (name))
196 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
197 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
198 || EDGE_COUNT (id->entry_bb->preds) != 1))
200 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
201 gimple init_stmt;
203 init_stmt = gimple_build_assign (new_tree,
204 fold_convert (TREE_TYPE (new_tree),
205 integer_zero_node));
206 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
207 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
209 else
211 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
212 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
213 == name)
214 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
218 else
219 insert_decl_map (id, name, new_tree);
220 return new_tree;
223 /* Remap DECL during the copying of the BLOCK tree for the function. */
225 tree
226 remap_decl (tree decl, copy_body_data *id)
228 tree *n;
229 tree fn;
231 /* We only remap local variables in the current function. */
232 fn = id->src_fn;
234 /* See if we have remapped this declaration. */
236 n = (tree *) pointer_map_contains (id->decl_map, decl);
238 /* If we didn't already have an equivalent for this declaration,
239 create one now. */
240 if (!n)
242 /* Make a copy of the variable or label. */
243 tree t = id->copy_decl (decl, id);
245 /* Remember it, so that if we encounter this local entity again
246 we can reuse this copy. Do this early because remap_type may
247 need this decl for TYPE_STUB_DECL. */
248 insert_decl_map (id, decl, t);
250 if (!DECL_P (t))
251 return t;
253 /* Remap types, if necessary. */
254 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
255 if (TREE_CODE (t) == TYPE_DECL)
256 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
258 /* Remap sizes as necessary. */
259 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
260 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
262 /* If fields, do likewise for offset and qualifier. */
263 if (TREE_CODE (t) == FIELD_DECL)
265 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
266 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
267 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
270 if (cfun && gimple_in_ssa_p (cfun)
271 && (TREE_CODE (t) == VAR_DECL
272 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
274 tree def = gimple_default_def (id->src_cfun, decl);
275 get_var_ann (t);
276 if (TREE_CODE (decl) != PARM_DECL && def)
278 tree map = remap_ssa_name (def, id);
279 /* Watch out RESULT_DECLs whose SSA names map directly
280 to them. */
281 if (TREE_CODE (map) == SSA_NAME
282 && gimple_nop_p (SSA_NAME_DEF_STMT (map)))
283 set_default_def (t, map);
285 add_referenced_var (t);
287 return t;
290 if (id->do_not_unshare)
291 return *n;
292 else
293 return unshare_expr (*n);
296 static tree
297 remap_type_1 (tree type, copy_body_data *id)
299 tree new_tree, t;
301 /* We do need a copy. build and register it now. If this is a pointer or
302 reference type, remap the designated type and make a new pointer or
303 reference type. */
304 if (TREE_CODE (type) == POINTER_TYPE)
306 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
307 TYPE_MODE (type),
308 TYPE_REF_CAN_ALIAS_ALL (type));
309 insert_decl_map (id, type, new_tree);
310 return new_tree;
312 else if (TREE_CODE (type) == REFERENCE_TYPE)
314 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
315 TYPE_MODE (type),
316 TYPE_REF_CAN_ALIAS_ALL (type));
317 insert_decl_map (id, type, new_tree);
318 return new_tree;
320 else
321 new_tree = copy_node (type);
323 insert_decl_map (id, type, new_tree);
325 /* This is a new type, not a copy of an old type. Need to reassociate
326 variants. We can handle everything except the main variant lazily. */
327 t = TYPE_MAIN_VARIANT (type);
328 if (type != t)
330 t = remap_type (t, id);
331 TYPE_MAIN_VARIANT (new_tree) = t;
332 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
333 TYPE_NEXT_VARIANT (t) = new_tree;
335 else
337 TYPE_MAIN_VARIANT (new_tree) = new_tree;
338 TYPE_NEXT_VARIANT (new_tree) = NULL;
341 if (TYPE_STUB_DECL (type))
342 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
344 /* Lazily create pointer and reference types. */
345 TYPE_POINTER_TO (new_tree) = NULL;
346 TYPE_REFERENCE_TO (new_tree) = NULL;
348 switch (TREE_CODE (new_tree))
350 case INTEGER_TYPE:
351 case REAL_TYPE:
352 case FIXED_POINT_TYPE:
353 case ENUMERAL_TYPE:
354 case BOOLEAN_TYPE:
355 t = TYPE_MIN_VALUE (new_tree);
356 if (t && TREE_CODE (t) != INTEGER_CST)
357 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
359 t = TYPE_MAX_VALUE (new_tree);
360 if (t && TREE_CODE (t) != INTEGER_CST)
361 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
362 return new_tree;
364 case FUNCTION_TYPE:
365 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
366 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
367 return new_tree;
369 case ARRAY_TYPE:
370 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
371 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
372 break;
374 case RECORD_TYPE:
375 case UNION_TYPE:
376 case QUAL_UNION_TYPE:
378 tree f, nf = NULL;
380 for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
382 t = remap_decl (f, id);
383 DECL_CONTEXT (t) = new_tree;
384 TREE_CHAIN (t) = nf;
385 nf = t;
387 TYPE_FIELDS (new_tree) = nreverse (nf);
389 break;
391 case OFFSET_TYPE:
392 default:
393 /* Shouldn't have been thought variable sized. */
394 gcc_unreachable ();
397 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
398 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
400 return new_tree;
403 tree
404 remap_type (tree type, copy_body_data *id)
406 tree *node;
407 tree tmp;
409 if (type == NULL)
410 return type;
412 /* See if we have remapped this type. */
413 node = (tree *) pointer_map_contains (id->decl_map, type);
414 if (node)
415 return *node;
417 /* The type only needs remapping if it's variably modified. */
418 if (! variably_modified_type_p (type, id->src_fn))
420 insert_decl_map (id, type, type);
421 return type;
424 id->remapping_type_depth++;
425 tmp = remap_type_1 (type, id);
426 id->remapping_type_depth--;
428 return tmp;
431 /* Return previously remapped type of TYPE in ID. Return NULL if TYPE
432 is NULL or TYPE has not been remapped before. */
434 static tree
435 remapped_type (tree type, copy_body_data *id)
437 tree *node;
439 if (type == NULL)
440 return type;
442 /* See if we have remapped this type. */
443 node = (tree *) pointer_map_contains (id->decl_map, type);
444 if (node)
445 return *node;
446 else
447 return NULL;
450 /* The type only needs remapping if it's variably modified. */
451 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
453 static bool
454 can_be_nonlocal (tree decl, copy_body_data *id)
456 /* We can not duplicate function decls. */
457 if (TREE_CODE (decl) == FUNCTION_DECL)
458 return true;
460 /* Local static vars must be non-local or we get multiple declaration
461 problems. */
462 if (TREE_CODE (decl) == VAR_DECL
463 && !auto_var_in_fn_p (decl, id->src_fn))
464 return true;
466 /* At the moment dwarf2out can handle only these types of nodes. We
467 can support more later. */
468 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
469 return false;
471 /* We must use global type. We call remapped_type instead of
472 remap_type since we don't want to remap this type here if it
473 hasn't been remapped before. */
474 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
475 return false;
477 /* Wihtout SSA we can't tell if variable is used. */
478 if (!gimple_in_ssa_p (cfun))
479 return false;
481 /* Live variables must be copied so we can attach DECL_RTL. */
482 if (var_ann (decl))
483 return false;
485 return true;
488 static tree
489 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
491 tree old_var;
492 tree new_decls = NULL_TREE;
494 /* Remap its variables. */
495 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
497 tree new_var;
498 tree origin_var = DECL_ORIGIN (old_var);
500 if (can_be_nonlocal (old_var, id))
502 if (TREE_CODE (old_var) == VAR_DECL
503 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
504 cfun->local_decls = tree_cons (NULL_TREE, old_var,
505 cfun->local_decls);
506 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
507 && !DECL_IGNORED_P (old_var)
508 && nonlocalized_list)
509 VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
510 continue;
513 /* Remap the variable. */
514 new_var = remap_decl (old_var, id);
516 /* If we didn't remap this variable, we can't mess with its
517 TREE_CHAIN. If we remapped this variable to the return slot, it's
518 already declared somewhere else, so don't declare it here. */
520 if (new_var == id->retvar)
522 else if (!new_var)
524 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
525 && !DECL_IGNORED_P (old_var)
526 && nonlocalized_list)
527 VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
529 else
531 gcc_assert (DECL_P (new_var));
532 TREE_CHAIN (new_var) = new_decls;
533 new_decls = new_var;
537 return nreverse (new_decls);
540 /* Copy the BLOCK to contain remapped versions of the variables
541 therein. And hook the new block into the block-tree. */
543 static void
544 remap_block (tree *block, copy_body_data *id)
546 tree old_block;
547 tree new_block;
548 tree fn;
550 /* Make the new block. */
551 old_block = *block;
552 new_block = make_node (BLOCK);
553 TREE_USED (new_block) = TREE_USED (old_block);
554 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
555 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
556 BLOCK_NONLOCALIZED_VARS (new_block)
557 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
558 *block = new_block;
560 /* Remap its variables. */
561 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
562 &BLOCK_NONLOCALIZED_VARS (new_block),
563 id);
565 fn = id->dst_fn;
567 if (id->transform_lang_insert_block)
568 id->transform_lang_insert_block (new_block);
570 /* Remember the remapped block. */
571 insert_decl_map (id, old_block, new_block);
574 /* Copy the whole block tree and root it in id->block. */
575 static tree
576 remap_blocks (tree block, copy_body_data *id)
578 tree t;
579 tree new_tree = block;
581 if (!block)
582 return NULL;
584 remap_block (&new_tree, id);
585 gcc_assert (new_tree != block);
586 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
587 prepend_lexical_block (new_tree, remap_blocks (t, id));
588 /* Blocks are in arbitrary order, but make things slightly prettier and do
589 not swap order when producing a copy. */
590 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
591 return new_tree;
594 static void
595 copy_statement_list (tree *tp)
597 tree_stmt_iterator oi, ni;
598 tree new_tree;
600 new_tree = alloc_stmt_list ();
601 ni = tsi_start (new_tree);
602 oi = tsi_start (*tp);
603 *tp = new_tree;
605 for (; !tsi_end_p (oi); tsi_next (&oi))
606 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
609 static void
610 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
612 tree block = BIND_EXPR_BLOCK (*tp);
613 /* Copy (and replace) the statement. */
614 copy_tree_r (tp, walk_subtrees, NULL);
615 if (block)
617 remap_block (&block, id);
618 BIND_EXPR_BLOCK (*tp) = block;
621 if (BIND_EXPR_VARS (*tp))
622 /* This will remap a lot of the same decls again, but this should be
623 harmless. */
624 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
628 /* Create a new gimple_seq by remapping all the statements in BODY
629 using the inlining information in ID. */
631 gimple_seq
632 remap_gimple_seq (gimple_seq body, copy_body_data *id)
634 gimple_stmt_iterator si;
635 gimple_seq new_body = NULL;
637 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
639 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
640 gimple_seq_add_stmt (&new_body, new_stmt);
643 return new_body;
647 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
648 block using the mapping information in ID. */
650 static gimple
651 copy_gimple_bind (gimple stmt, copy_body_data *id)
653 gimple new_bind;
654 tree new_block, new_vars;
655 gimple_seq body, new_body;
657 /* Copy the statement. Note that we purposely don't use copy_stmt
658 here because we need to remap statements as we copy. */
659 body = gimple_bind_body (stmt);
660 new_body = remap_gimple_seq (body, id);
662 new_block = gimple_bind_block (stmt);
663 if (new_block)
664 remap_block (&new_block, id);
666 /* This will remap a lot of the same decls again, but this should be
667 harmless. */
668 new_vars = gimple_bind_vars (stmt);
669 if (new_vars)
670 new_vars = remap_decls (new_vars, NULL, id);
672 new_bind = gimple_build_bind (new_vars, new_body, new_block);
674 return new_bind;
678 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
679 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
680 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
681 recursing into the children nodes of *TP. */
683 static tree
684 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
686 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
687 copy_body_data *id = (copy_body_data *) wi_p->info;
688 tree fn = id->src_fn;
690 if (TREE_CODE (*tp) == SSA_NAME)
692 *tp = remap_ssa_name (*tp, id);
693 *walk_subtrees = 0;
694 return NULL;
696 else if (auto_var_in_fn_p (*tp, fn))
698 /* Local variables and labels need to be replaced by equivalent
699 variables. We don't want to copy static variables; there's
700 only one of those, no matter how many times we inline the
701 containing function. Similarly for globals from an outer
702 function. */
703 tree new_decl;
705 /* Remap the declaration. */
706 new_decl = remap_decl (*tp, id);
707 gcc_assert (new_decl);
708 /* Replace this variable with the copy. */
709 STRIP_TYPE_NOPS (new_decl);
710 /* ??? The C++ frontend uses void * pointer zero to initialize
711 any other type. This confuses the middle-end type verification.
712 As cloned bodies do not go through gimplification again the fixup
713 there doesn't trigger. */
714 if (TREE_CODE (new_decl) == INTEGER_CST
715 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
716 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
717 *tp = new_decl;
718 *walk_subtrees = 0;
720 else if (TREE_CODE (*tp) == STATEMENT_LIST)
721 gcc_unreachable ();
722 else if (TREE_CODE (*tp) == SAVE_EXPR)
723 gcc_unreachable ();
724 else if (TREE_CODE (*tp) == LABEL_DECL
725 && (!DECL_CONTEXT (*tp)
726 || decl_function_context (*tp) == id->src_fn))
727 /* These may need to be remapped for EH handling. */
728 *tp = remap_decl (*tp, id);
729 else if (TYPE_P (*tp))
730 /* Types may need remapping as well. */
731 *tp = remap_type (*tp, id);
732 else if (CONSTANT_CLASS_P (*tp))
734 /* If this is a constant, we have to copy the node iff the type
735 will be remapped. copy_tree_r will not copy a constant. */
736 tree new_type = remap_type (TREE_TYPE (*tp), id);
738 if (new_type == TREE_TYPE (*tp))
739 *walk_subtrees = 0;
741 else if (TREE_CODE (*tp) == INTEGER_CST)
742 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
743 TREE_INT_CST_HIGH (*tp));
744 else
746 *tp = copy_node (*tp);
747 TREE_TYPE (*tp) = new_type;
750 else
752 /* Otherwise, just copy the node. Note that copy_tree_r already
753 knows not to copy VAR_DECLs, etc., so this is safe. */
754 if (TREE_CODE (*tp) == INDIRECT_REF)
756 /* Get rid of *& from inline substitutions that can happen when a
757 pointer argument is an ADDR_EXPR. */
758 tree decl = TREE_OPERAND (*tp, 0);
759 tree *n;
761 n = (tree *) pointer_map_contains (id->decl_map, decl);
762 if (n)
764 tree type, new_tree, old;
766 /* If we happen to get an ADDR_EXPR in n->value, strip
767 it manually here as we'll eventually get ADDR_EXPRs
768 which lie about their types pointed to. In this case
769 build_fold_indirect_ref wouldn't strip the
770 INDIRECT_REF, but we absolutely rely on that. As
771 fold_indirect_ref does other useful transformations,
772 try that first, though. */
773 type = TREE_TYPE (TREE_TYPE (*n));
774 new_tree = unshare_expr (*n);
775 old = *tp;
776 *tp = gimple_fold_indirect_ref (new_tree);
777 if (!*tp)
779 if (TREE_CODE (new_tree) == ADDR_EXPR)
781 *tp = fold_indirect_ref_1 (type, new_tree);
782 /* ??? We should either assert here or build
783 a VIEW_CONVERT_EXPR instead of blindly leaking
784 incompatible types to our IL. */
785 if (! *tp)
786 *tp = TREE_OPERAND (new_tree, 0);
788 else
790 *tp = build1 (INDIRECT_REF, type, new_tree);
791 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
792 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
795 *walk_subtrees = 0;
796 return NULL;
800 /* Here is the "usual case". Copy this tree node, and then
801 tweak some special cases. */
802 copy_tree_r (tp, walk_subtrees, NULL);
804 /* Global variables we haven't seen yet need to go into referenced
805 vars. If not referenced from types only. */
806 if (gimple_in_ssa_p (cfun)
807 && TREE_CODE (*tp) == VAR_DECL
808 && id->remapping_type_depth == 0)
809 add_referenced_var (*tp);
811 /* We should never have TREE_BLOCK set on non-statements. */
812 if (EXPR_P (*tp))
813 gcc_assert (!TREE_BLOCK (*tp));
815 if (TREE_CODE (*tp) != OMP_CLAUSE)
816 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
818 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
820 /* The copied TARGET_EXPR has never been expanded, even if the
821 original node was expanded already. */
822 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
823 TREE_OPERAND (*tp, 3) = NULL_TREE;
825 else if (TREE_CODE (*tp) == ADDR_EXPR)
827 /* Variable substitution need not be simple. In particular,
828 the INDIRECT_REF substitution above. Make sure that
829 TREE_CONSTANT and friends are up-to-date. But make sure
830 to not improperly set TREE_BLOCK on some sub-expressions. */
831 int invariant = is_gimple_min_invariant (*tp);
832 tree block = id->block;
833 id->block = NULL_TREE;
834 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
835 id->block = block;
837 /* Handle the case where we substituted an INDIRECT_REF
838 into the operand of the ADDR_EXPR. */
839 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
840 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
841 else
842 recompute_tree_invariant_for_addr_expr (*tp);
844 /* If this used to be invariant, but is not any longer,
845 then regimplification is probably needed. */
846 if (invariant && !is_gimple_min_invariant (*tp))
847 id->regimplify = true;
849 *walk_subtrees = 0;
853 /* Keep iterating. */
854 return NULL_TREE;
858 /* Called from copy_body_id via walk_tree. DATA is really a
859 `copy_body_data *'. */
861 tree
862 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
864 copy_body_data *id = (copy_body_data *) data;
865 tree fn = id->src_fn;
866 tree new_block;
868 /* Begin by recognizing trees that we'll completely rewrite for the
869 inlining context. Our output for these trees is completely
870 different from out input (e.g. RETURN_EXPR is deleted, and morphs
871 into an edge). Further down, we'll handle trees that get
872 duplicated and/or tweaked. */
874 /* When requested, RETURN_EXPRs should be transformed to just the
875 contained MODIFY_EXPR. The branch semantics of the return will
876 be handled elsewhere by manipulating the CFG rather than a statement. */
877 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
879 tree assignment = TREE_OPERAND (*tp, 0);
881 /* If we're returning something, just turn that into an
882 assignment into the equivalent of the original RESULT_DECL.
883 If the "assignment" is just the result decl, the result
884 decl has already been set (e.g. a recent "foo (&result_decl,
885 ...)"); just toss the entire RETURN_EXPR. */
886 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
888 /* Replace the RETURN_EXPR with (a copy of) the
889 MODIFY_EXPR hanging underneath. */
890 *tp = copy_node (assignment);
892 else /* Else the RETURN_EXPR returns no value. */
894 *tp = NULL;
895 return (tree) (void *)1;
898 else if (TREE_CODE (*tp) == SSA_NAME)
900 *tp = remap_ssa_name (*tp, id);
901 *walk_subtrees = 0;
902 return NULL;
905 /* Local variables and labels need to be replaced by equivalent
906 variables. We don't want to copy static variables; there's only
907 one of those, no matter how many times we inline the containing
908 function. Similarly for globals from an outer function. */
909 else if (auto_var_in_fn_p (*tp, fn))
911 tree new_decl;
913 /* Remap the declaration. */
914 new_decl = remap_decl (*tp, id);
915 gcc_assert (new_decl);
916 /* Replace this variable with the copy. */
917 STRIP_TYPE_NOPS (new_decl);
918 *tp = new_decl;
919 *walk_subtrees = 0;
921 else if (TREE_CODE (*tp) == STATEMENT_LIST)
922 copy_statement_list (tp);
923 else if (TREE_CODE (*tp) == SAVE_EXPR)
924 remap_save_expr (tp, id->decl_map, walk_subtrees);
925 else if (TREE_CODE (*tp) == LABEL_DECL
926 && (! DECL_CONTEXT (*tp)
927 || decl_function_context (*tp) == id->src_fn))
928 /* These may need to be remapped for EH handling. */
929 *tp = remap_decl (*tp, id);
930 else if (TREE_CODE (*tp) == BIND_EXPR)
931 copy_bind_expr (tp, walk_subtrees, id);
932 /* Types may need remapping as well. */
933 else if (TYPE_P (*tp))
934 *tp = remap_type (*tp, id);
936 /* If this is a constant, we have to copy the node iff the type will be
937 remapped. copy_tree_r will not copy a constant. */
938 else if (CONSTANT_CLASS_P (*tp))
940 tree new_type = remap_type (TREE_TYPE (*tp), id);
942 if (new_type == TREE_TYPE (*tp))
943 *walk_subtrees = 0;
945 else if (TREE_CODE (*tp) == INTEGER_CST)
946 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
947 TREE_INT_CST_HIGH (*tp));
948 else
950 *tp = copy_node (*tp);
951 TREE_TYPE (*tp) = new_type;
955 /* Otherwise, just copy the node. Note that copy_tree_r already
956 knows not to copy VAR_DECLs, etc., so this is safe. */
957 else
959 /* Here we handle trees that are not completely rewritten.
960 First we detect some inlining-induced bogosities for
961 discarding. */
962 if (TREE_CODE (*tp) == MODIFY_EXPR
963 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
964 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
966 /* Some assignments VAR = VAR; don't generate any rtl code
967 and thus don't count as variable modification. Avoid
968 keeping bogosities like 0 = 0. */
969 tree decl = TREE_OPERAND (*tp, 0), value;
970 tree *n;
972 n = (tree *) pointer_map_contains (id->decl_map, decl);
973 if (n)
975 value = *n;
976 STRIP_TYPE_NOPS (value);
977 if (TREE_CONSTANT (value) || TREE_READONLY (value))
979 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
980 return copy_tree_body_r (tp, walk_subtrees, data);
984 else if (TREE_CODE (*tp) == INDIRECT_REF)
986 /* Get rid of *& from inline substitutions that can happen when a
987 pointer argument is an ADDR_EXPR. */
988 tree decl = TREE_OPERAND (*tp, 0);
989 tree *n;
991 n = (tree *) pointer_map_contains (id->decl_map, decl);
992 if (n)
994 tree new_tree;
995 tree old;
996 /* If we happen to get an ADDR_EXPR in n->value, strip
997 it manually here as we'll eventually get ADDR_EXPRs
998 which lie about their types pointed to. In this case
999 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1000 but we absolutely rely on that. As fold_indirect_ref
1001 does other useful transformations, try that first, though. */
1002 tree type = TREE_TYPE (TREE_TYPE (*n));
1003 if (id->do_not_unshare)
1004 new_tree = *n;
1005 else
1006 new_tree = unshare_expr (*n);
1007 old = *tp;
1008 *tp = gimple_fold_indirect_ref (new_tree);
1009 if (! *tp)
1011 if (TREE_CODE (new_tree) == ADDR_EXPR)
1013 *tp = fold_indirect_ref_1 (type, new_tree);
1014 /* ??? We should either assert here or build
1015 a VIEW_CONVERT_EXPR instead of blindly leaking
1016 incompatible types to our IL. */
1017 if (! *tp)
1018 *tp = TREE_OPERAND (new_tree, 0);
1020 else
1022 *tp = build1 (INDIRECT_REF, type, new_tree);
1023 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1024 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1027 *walk_subtrees = 0;
1028 return NULL;
1032 /* Here is the "usual case". Copy this tree node, and then
1033 tweak some special cases. */
1034 copy_tree_r (tp, walk_subtrees, NULL);
1036 /* Global variables we haven't seen yet needs to go into referenced
1037 vars. If not referenced from types only. */
1038 if (gimple_in_ssa_p (cfun)
1039 && TREE_CODE (*tp) == VAR_DECL
1040 && id->remapping_type_depth == 0)
1041 add_referenced_var (*tp);
1043 /* If EXPR has block defined, map it to newly constructed block.
1044 When inlining we want EXPRs without block appear in the block
1045 of function call. */
1046 if (EXPR_P (*tp))
1048 new_block = id->block;
1049 if (TREE_BLOCK (*tp))
1051 tree *n;
1052 n = (tree *) pointer_map_contains (id->decl_map,
1053 TREE_BLOCK (*tp));
1054 gcc_assert (n);
1055 new_block = *n;
1057 TREE_BLOCK (*tp) = new_block;
1060 if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
1061 TREE_OPERAND (*tp, 0) =
1062 build_int_cst (NULL_TREE,
1063 id->eh_region_offset
1064 + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
1066 if (TREE_CODE (*tp) != OMP_CLAUSE)
1067 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1069 /* The copied TARGET_EXPR has never been expanded, even if the
1070 original node was expanded already. */
1071 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1073 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1074 TREE_OPERAND (*tp, 3) = NULL_TREE;
1077 /* Variable substitution need not be simple. In particular, the
1078 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1079 and friends are up-to-date. */
1080 else if (TREE_CODE (*tp) == ADDR_EXPR)
1082 int invariant = is_gimple_min_invariant (*tp);
1083 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1085 /* Handle the case where we substituted an INDIRECT_REF
1086 into the operand of the ADDR_EXPR. */
1087 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1088 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1089 else
1090 recompute_tree_invariant_for_addr_expr (*tp);
1092 /* If this used to be invariant, but is not any longer,
1093 then regimplification is probably needed. */
1094 if (invariant && !is_gimple_min_invariant (*tp))
1095 id->regimplify = true;
1097 *walk_subtrees = 0;
1101 /* Keep iterating. */
1102 return NULL_TREE;
1106 /* Helper for copy_bb. Remap statement STMT using the inlining
1107 information in ID. Return the new statement copy. */
1109 static gimple
1110 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1112 gimple copy = NULL;
1113 struct walk_stmt_info wi;
1114 tree new_block;
1115 bool skip_first = false;
1117 /* Begin by recognizing trees that we'll completely rewrite for the
1118 inlining context. Our output for these trees is completely
1119 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1120 into an edge). Further down, we'll handle trees that get
1121 duplicated and/or tweaked. */
1123 /* When requested, GIMPLE_RETURNs should be transformed to just the
1124 contained GIMPLE_ASSIGN. The branch semantics of the return will
1125 be handled elsewhere by manipulating the CFG rather than the
1126 statement. */
1127 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1129 tree retval = gimple_return_retval (stmt);
1131 /* If we're returning something, just turn that into an
1132 assignment into the equivalent of the original RESULT_DECL.
1133 If RETVAL is just the result decl, the result decl has
1134 already been set (e.g. a recent "foo (&result_decl, ...)");
1135 just toss the entire GIMPLE_RETURN. */
1136 if (retval && TREE_CODE (retval) != RESULT_DECL)
1138 copy = gimple_build_assign (id->retvar, retval);
1139 /* id->retvar is already substituted. Skip it on later remapping. */
1140 skip_first = true;
1142 else
1143 return gimple_build_nop ();
1145 else if (gimple_has_substatements (stmt))
1147 gimple_seq s1, s2;
1149 /* When cloning bodies from the C++ front end, we will be handed bodies
1150 in High GIMPLE form. Handle here all the High GIMPLE statements that
1151 have embedded statements. */
1152 switch (gimple_code (stmt))
1154 case GIMPLE_BIND:
1155 copy = copy_gimple_bind (stmt, id);
1156 break;
1158 case GIMPLE_CATCH:
1159 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1160 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1161 break;
1163 case GIMPLE_EH_FILTER:
1164 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1165 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1166 break;
1168 case GIMPLE_TRY:
1169 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1170 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1171 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1172 break;
1174 case GIMPLE_WITH_CLEANUP_EXPR:
1175 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1176 copy = gimple_build_wce (s1);
1177 break;
1179 case GIMPLE_OMP_PARALLEL:
1180 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1181 copy = gimple_build_omp_parallel
1182 (s1,
1183 gimple_omp_parallel_clauses (stmt),
1184 gimple_omp_parallel_child_fn (stmt),
1185 gimple_omp_parallel_data_arg (stmt));
1186 break;
1188 case GIMPLE_OMP_TASK:
1189 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1190 copy = gimple_build_omp_task
1191 (s1,
1192 gimple_omp_task_clauses (stmt),
1193 gimple_omp_task_child_fn (stmt),
1194 gimple_omp_task_data_arg (stmt),
1195 gimple_omp_task_copy_fn (stmt),
1196 gimple_omp_task_arg_size (stmt),
1197 gimple_omp_task_arg_align (stmt));
1198 break;
1200 case GIMPLE_OMP_FOR:
1201 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1202 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1203 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1204 gimple_omp_for_collapse (stmt), s2);
1206 size_t i;
1207 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1209 gimple_omp_for_set_index (copy, i,
1210 gimple_omp_for_index (stmt, i));
1211 gimple_omp_for_set_initial (copy, i,
1212 gimple_omp_for_initial (stmt, i));
1213 gimple_omp_for_set_final (copy, i,
1214 gimple_omp_for_final (stmt, i));
1215 gimple_omp_for_set_incr (copy, i,
1216 gimple_omp_for_incr (stmt, i));
1217 gimple_omp_for_set_cond (copy, i,
1218 gimple_omp_for_cond (stmt, i));
1221 break;
1223 case GIMPLE_OMP_MASTER:
1224 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1225 copy = gimple_build_omp_master (s1);
1226 break;
1228 case GIMPLE_OMP_ORDERED:
1229 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1230 copy = gimple_build_omp_ordered (s1);
1231 break;
1233 case GIMPLE_OMP_SECTION:
1234 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1235 copy = gimple_build_omp_section (s1);
1236 break;
1238 case GIMPLE_OMP_SECTIONS:
1239 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1240 copy = gimple_build_omp_sections
1241 (s1, gimple_omp_sections_clauses (stmt));
1242 break;
1244 case GIMPLE_OMP_SINGLE:
1245 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1246 copy = gimple_build_omp_single
1247 (s1, gimple_omp_single_clauses (stmt));
1248 break;
1250 case GIMPLE_OMP_CRITICAL:
1251 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1252 copy
1253 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1254 break;
1256 default:
1257 gcc_unreachable ();
1260 else
1262 if (gimple_assign_copy_p (stmt)
1263 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1264 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1266 /* Here we handle statements that are not completely rewritten.
1267 First we detect some inlining-induced bogosities for
1268 discarding. */
1270 /* Some assignments VAR = VAR; don't generate any rtl code
1271 and thus don't count as variable modification. Avoid
1272 keeping bogosities like 0 = 0. */
1273 tree decl = gimple_assign_lhs (stmt), value;
1274 tree *n;
1276 n = (tree *) pointer_map_contains (id->decl_map, decl);
1277 if (n)
1279 value = *n;
1280 STRIP_TYPE_NOPS (value);
1281 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1282 return gimple_build_nop ();
1286 /* Create a new deep copy of the statement. */
1287 copy = gimple_copy (stmt);
1290 /* If STMT has a block defined, map it to the newly constructed
1291 block. When inlining we want statements without a block to
1292 appear in the block of the function call. */
1293 new_block = id->block;
1294 if (gimple_block (copy))
1296 tree *n;
1297 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1298 gcc_assert (n);
1299 new_block = *n;
1302 gimple_set_block (copy, new_block);
1304 /* Remap all the operands in COPY. */
1305 memset (&wi, 0, sizeof (wi));
1306 wi.info = id;
1307 if (skip_first)
1308 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1309 else
1310 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1312 /* Clear the copied virtual operands. We are not remapping them here
1313 but are going to recreate them from scratch. */
1314 if (gimple_has_mem_ops (copy))
1316 gimple_set_vdef (copy, NULL_TREE);
1317 gimple_set_vuse (copy, NULL_TREE);
1320 /* We have to handle EH region remapping of GIMPLE_RESX specially because
1321 the region number is not an operand. */
1322 if (gimple_code (stmt) == GIMPLE_RESX && id->eh_region_offset)
1324 gimple_resx_set_region (copy, gimple_resx_region (stmt) + id->eh_region_offset);
1326 return copy;
1330 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1331 later */
1333 static basic_block
1334 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1335 gcov_type count_scale)
1337 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1338 basic_block copy_basic_block;
1339 tree decl;
1341 /* create_basic_block() will append every new block to
1342 basic_block_info automatically. */
1343 copy_basic_block = create_basic_block (NULL, (void *) 0,
1344 (basic_block) bb->prev_bb->aux);
1345 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1347 /* We are going to rebuild frequencies from scratch. These values
1348 have just small importance to drive canonicalize_loop_headers. */
1349 copy_basic_block->frequency = ((gcov_type)bb->frequency
1350 * frequency_scale / REG_BR_PROB_BASE);
1352 if (copy_basic_block->frequency > BB_FREQ_MAX)
1353 copy_basic_block->frequency = BB_FREQ_MAX;
1355 copy_gsi = gsi_start_bb (copy_basic_block);
1357 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1359 gimple stmt = gsi_stmt (gsi);
1360 gimple orig_stmt = stmt;
1362 id->regimplify = false;
1363 stmt = remap_gimple_stmt (stmt, id);
1364 if (gimple_nop_p (stmt))
1365 continue;
1367 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1368 seq_gsi = copy_gsi;
1370 /* With return slot optimization we can end up with
1371 non-gimple (foo *)&this->m, fix that here. */
1372 if (is_gimple_assign (stmt)
1373 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1374 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1376 tree new_rhs;
1377 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1378 gimple_assign_rhs1 (stmt),
1379 true, NULL, true, GSI_SAME_STMT);
1380 gimple_assign_set_rhs1 (stmt, new_rhs);
1381 id->regimplify = false;
1384 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1386 if (id->regimplify)
1387 gimple_regimplify_operands (stmt, &seq_gsi);
1389 /* If copy_basic_block has been empty at the start of this iteration,
1390 call gsi_start_bb again to get at the newly added statements. */
1391 if (gsi_end_p (copy_gsi))
1392 copy_gsi = gsi_start_bb (copy_basic_block);
1393 else
1394 gsi_next (&copy_gsi);
1396 /* Process the new statement. The call to gimple_regimplify_operands
1397 possibly turned the statement into multiple statements, we
1398 need to process all of them. */
1401 tree fn;
1403 stmt = gsi_stmt (copy_gsi);
1404 if (is_gimple_call (stmt)
1405 && gimple_call_va_arg_pack_p (stmt)
1406 && id->gimple_call)
1408 /* __builtin_va_arg_pack () should be replaced by
1409 all arguments corresponding to ... in the caller. */
1410 tree p;
1411 gimple new_call;
1412 VEC(tree, heap) *argarray;
1413 size_t nargs = gimple_call_num_args (id->gimple_call);
1414 size_t n;
1416 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1417 nargs--;
1419 /* Create the new array of arguments. */
1420 n = nargs + gimple_call_num_args (stmt);
1421 argarray = VEC_alloc (tree, heap, n);
1422 VEC_safe_grow (tree, heap, argarray, n);
1424 /* Copy all the arguments before '...' */
1425 memcpy (VEC_address (tree, argarray),
1426 gimple_call_arg_ptr (stmt, 0),
1427 gimple_call_num_args (stmt) * sizeof (tree));
1429 /* Append the arguments passed in '...' */
1430 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1431 gimple_call_arg_ptr (id->gimple_call, 0)
1432 + (gimple_call_num_args (id->gimple_call) - nargs),
1433 nargs * sizeof (tree));
1435 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1436 argarray);
1438 VEC_free (tree, heap, argarray);
1440 /* Copy all GIMPLE_CALL flags, location and block, except
1441 GF_CALL_VA_ARG_PACK. */
1442 gimple_call_copy_flags (new_call, stmt);
1443 gimple_call_set_va_arg_pack (new_call, false);
1444 gimple_set_location (new_call, gimple_location (stmt));
1445 gimple_set_block (new_call, gimple_block (stmt));
1446 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1448 gsi_replace (&copy_gsi, new_call, false);
1449 gimple_set_bb (stmt, NULL);
1450 stmt = new_call;
1452 else if (is_gimple_call (stmt)
1453 && id->gimple_call
1454 && (decl = gimple_call_fndecl (stmt))
1455 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1456 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1458 /* __builtin_va_arg_pack_len () should be replaced by
1459 the number of anonymous arguments. */
1460 size_t nargs = gimple_call_num_args (id->gimple_call);
1461 tree count, p;
1462 gimple new_stmt;
1464 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1465 nargs--;
1467 count = build_int_cst (integer_type_node, nargs);
1468 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1469 gsi_replace (&copy_gsi, new_stmt, false);
1470 stmt = new_stmt;
1473 /* Statements produced by inlining can be unfolded, especially
1474 when we constant propagated some operands. We can't fold
1475 them right now for two reasons:
1476 1) folding require SSA_NAME_DEF_STMTs to be correct
1477 2) we can't change function calls to builtins.
1478 So we just mark statement for later folding. We mark
1479 all new statements, instead just statements that has changed
1480 by some nontrivial substitution so even statements made
1481 foldable indirectly are updated. If this turns out to be
1482 expensive, copy_body can be told to watch for nontrivial
1483 changes. */
1484 if (id->statements_to_fold)
1485 pointer_set_insert (id->statements_to_fold, stmt);
1487 /* We're duplicating a CALL_EXPR. Find any corresponding
1488 callgraph edges and update or duplicate them. */
1489 if (is_gimple_call (stmt))
1491 struct cgraph_edge *edge = cgraph_edge (id->src_node, orig_stmt);
1492 int flags;
1494 switch (id->transform_call_graph_edges)
1496 case CB_CGE_DUPLICATE:
1497 if (edge)
1498 cgraph_clone_edge (edge, id->dst_node, stmt,
1499 REG_BR_PROB_BASE, 1,
1500 edge->frequency, true);
1501 break;
1503 case CB_CGE_MOVE_CLONES:
1504 cgraph_set_call_stmt_including_clones (id->dst_node, orig_stmt, stmt);
1505 break;
1507 case CB_CGE_MOVE:
1508 if (edge)
1509 cgraph_set_call_stmt (edge, stmt);
1510 break;
1512 default:
1513 gcc_unreachable ();
1516 edge = cgraph_edge (id->src_node, orig_stmt);
1517 /* Constant propagation on argument done during inlining
1518 may create new direct call. Produce an edge for it. */
1519 if ((!edge
1520 || (edge->indirect_call
1521 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1522 && is_gimple_call (stmt)
1523 && (fn = gimple_call_fndecl (stmt)) != NULL)
1525 struct cgraph_node *dest = cgraph_node (fn);
1527 /* We have missing edge in the callgraph. This can happen in one case
1528 where previous inlining turned indirect call into direct call by
1529 constant propagating arguments. In all other cases we hit a bug
1530 (incorrect node sharing is most common reason for missing edges. */
1531 gcc_assert (dest->needed || !dest->analyzed);
1532 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1533 cgraph_create_edge_including_clones (id->dst_node, dest, stmt,
1534 bb->count,
1535 compute_call_stmt_bb_frequency (id->dst_node->decl, bb),
1536 bb->loop_depth,
1537 CIF_ORIGINALLY_INDIRECT_CALL);
1538 else
1539 cgraph_create_edge (id->dst_node, dest, stmt,
1540 bb->count, CGRAPH_FREQ_BASE,
1541 bb->loop_depth)->inline_failed
1542 = CIF_ORIGINALLY_INDIRECT_CALL;
1543 if (dump_file)
1545 fprintf (dump_file, "Created new direct edge to %s",
1546 cgraph_node_name (dest));
1550 flags = gimple_call_flags (stmt);
1552 if (flags & ECF_MAY_BE_ALLOCA)
1553 cfun->calls_alloca = true;
1554 if (flags & ECF_RETURNS_TWICE)
1555 cfun->calls_setjmp = true;
1558 /* If you think we can abort here, you are wrong.
1559 There is no region 0 in gimple. */
1560 gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) != 0);
1562 if (stmt_could_throw_p (stmt)
1563 /* When we are cloning for inlining, we are supposed to
1564 construct a clone that calls precisely the same functions
1565 as original. However IPA optimizers might've proved
1566 earlier some function calls as non-trapping that might
1567 render some basic blocks dead that might become
1568 unreachable.
1570 We can't update SSA with unreachable blocks in CFG and thus
1571 we prevent the scenario by preserving even the "dead" eh
1572 edges until the point they are later removed by
1573 fixup_cfg pass. */
1574 || (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
1575 && lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) > 0))
1577 int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
1579 /* Add an entry for the copied tree in the EH hashtable.
1580 When cloning or versioning, use the hashtable in
1581 cfun, and just copy the EH number. When inlining, use the
1582 hashtable in the caller, and adjust the region number. */
1583 if (region > 0)
1584 add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
1586 /* If this tree doesn't have a region associated with it,
1587 and there is a "current region,"
1588 then associate this tree with the current region
1589 and add edges associated with this region. */
1590 if (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) <= 0
1591 && id->eh_region > 0
1592 && stmt_could_throw_p (stmt))
1593 add_stmt_to_eh_region (stmt, id->eh_region);
1596 if (gimple_in_ssa_p (cfun))
1598 ssa_op_iter i;
1599 tree def;
1601 find_new_referenced_vars (gsi_stmt (copy_gsi));
1602 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1603 if (TREE_CODE (def) == SSA_NAME)
1604 SSA_NAME_DEF_STMT (def) = stmt;
1607 gsi_next (&copy_gsi);
1609 while (!gsi_end_p (copy_gsi));
1611 copy_gsi = gsi_last_bb (copy_basic_block);
1614 return copy_basic_block;
1617 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1618 form is quite easy, since dominator relationship for old basic blocks does
1619 not change.
1621 There is however exception where inlining might change dominator relation
1622 across EH edges from basic block within inlined functions destinating
1623 to landing pads in function we inline into.
1625 The function fills in PHI_RESULTs of such PHI nodes if they refer
1626 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1627 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1628 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1629 set, and this means that there will be no overlapping live ranges
1630 for the underlying symbol.
1632 This might change in future if we allow redirecting of EH edges and
1633 we might want to change way build CFG pre-inlining to include
1634 all the possible edges then. */
1635 static void
1636 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1637 bool can_throw, bool nonlocal_goto)
1639 edge e;
1640 edge_iterator ei;
1642 FOR_EACH_EDGE (e, ei, bb->succs)
1643 if (!e->dest->aux
1644 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1646 gimple phi;
1647 gimple_stmt_iterator si;
1649 if (!nonlocal_goto)
1650 gcc_assert (e->flags & EDGE_EH);
1652 if (!can_throw)
1653 gcc_assert (!(e->flags & EDGE_EH));
1655 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1657 edge re;
1659 phi = gsi_stmt (si);
1661 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1662 gcc_assert (!e->dest->aux);
1664 gcc_assert ((e->flags & EDGE_EH)
1665 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1667 if (!is_gimple_reg (PHI_RESULT (phi)))
1669 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1670 continue;
1673 re = find_edge (ret_bb, e->dest);
1674 gcc_assert (re);
1675 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1676 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1678 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1679 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1685 /* Copy edges from BB into its copy constructed earlier, scale profile
1686 accordingly. Edges will be taken care of later. Assume aux
1687 pointers to point to the copies of each BB. */
1689 static void
1690 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1692 basic_block new_bb = (basic_block) bb->aux;
1693 edge_iterator ei;
1694 edge old_edge;
1695 gimple_stmt_iterator si;
1696 int flags;
1698 /* Use the indices from the original blocks to create edges for the
1699 new ones. */
1700 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1701 if (!(old_edge->flags & EDGE_EH))
1703 edge new_edge;
1705 flags = old_edge->flags;
1707 /* Return edges do get a FALLTHRU flag when the get inlined. */
1708 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1709 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1710 flags |= EDGE_FALLTHRU;
1711 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1712 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1713 new_edge->probability = old_edge->probability;
1716 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1717 return;
1719 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1721 gimple copy_stmt;
1722 bool can_throw, nonlocal_goto;
1724 copy_stmt = gsi_stmt (si);
1725 update_stmt (copy_stmt);
1726 if (gimple_in_ssa_p (cfun))
1727 mark_symbols_for_renaming (copy_stmt);
1729 /* Do this before the possible split_block. */
1730 gsi_next (&si);
1732 /* If this tree could throw an exception, there are two
1733 cases where we need to add abnormal edge(s): the
1734 tree wasn't in a region and there is a "current
1735 region" in the caller; or the original tree had
1736 EH edges. In both cases split the block after the tree,
1737 and add abnormal edge(s) as needed; we need both
1738 those from the callee and the caller.
1739 We check whether the copy can throw, because the const
1740 propagation can change an INDIRECT_REF which throws
1741 into a COMPONENT_REF which doesn't. If the copy
1742 can throw, the original could also throw. */
1743 can_throw = stmt_can_throw_internal (copy_stmt);
1744 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1746 if (can_throw || nonlocal_goto)
1748 if (!gsi_end_p (si))
1749 /* Note that bb's predecessor edges aren't necessarily
1750 right at this point; split_block doesn't care. */
1752 edge e = split_block (new_bb, copy_stmt);
1754 new_bb = e->dest;
1755 new_bb->aux = e->src->aux;
1756 si = gsi_start_bb (new_bb);
1760 if (can_throw)
1761 make_eh_edges (copy_stmt);
1763 if (nonlocal_goto)
1764 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1766 if ((can_throw || nonlocal_goto)
1767 && gimple_in_ssa_p (cfun))
1768 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1769 can_throw, nonlocal_goto);
1773 /* Copy the PHIs. All blocks and edges are copied, some blocks
1774 was possibly split and new outgoing EH edges inserted.
1775 BB points to the block of original function and AUX pointers links
1776 the original and newly copied blocks. */
1778 static void
1779 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1781 basic_block const new_bb = (basic_block) bb->aux;
1782 edge_iterator ei;
1783 gimple phi;
1784 gimple_stmt_iterator si;
1786 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1788 tree res, new_res;
1789 gimple new_phi;
1790 edge new_edge;
1792 phi = gsi_stmt (si);
1793 res = PHI_RESULT (phi);
1794 new_res = res;
1795 if (is_gimple_reg (res))
1797 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1798 SSA_NAME_DEF_STMT (new_res)
1799 = new_phi = create_phi_node (new_res, new_bb);
1800 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1802 edge const old_edge
1803 = find_edge ((basic_block) new_edge->src->aux, bb);
1804 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1805 tree new_arg = arg;
1806 tree block = id->block;
1807 id->block = NULL_TREE;
1808 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1809 id->block = block;
1810 gcc_assert (new_arg);
1811 /* With return slot optimization we can end up with
1812 non-gimple (foo *)&this->m, fix that here. */
1813 if (TREE_CODE (new_arg) != SSA_NAME
1814 && TREE_CODE (new_arg) != FUNCTION_DECL
1815 && !is_gimple_val (new_arg))
1817 gimple_seq stmts = NULL;
1818 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1819 gsi_insert_seq_on_edge_immediate (new_edge, stmts);
1821 add_phi_arg (new_phi, new_arg, new_edge);
1828 /* Wrapper for remap_decl so it can be used as a callback. */
1830 static tree
1831 remap_decl_1 (tree decl, void *data)
1833 return remap_decl (decl, (copy_body_data *) data);
1836 /* Build struct function and associated datastructures for the new clone
1837 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1839 static void
1840 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
1841 int frequency)
1843 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1844 gcov_type count_scale, frequency_scale;
1846 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1847 count_scale = (REG_BR_PROB_BASE * count
1848 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1849 else
1850 count_scale = 1;
1852 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1853 frequency_scale = (REG_BR_PROB_BASE * frequency
1855 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1856 else
1857 frequency_scale = count_scale;
1859 /* Register specific tree functions. */
1860 gimple_register_cfg_hooks ();
1862 /* Get clean struct function. */
1863 push_struct_function (new_fndecl);
1865 /* We will rebuild these, so just sanity check that they are empty. */
1866 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
1867 gcc_assert (cfun->local_decls == NULL);
1868 gcc_assert (cfun->cfg == NULL);
1869 gcc_assert (cfun->decl == new_fndecl);
1871 /* Copy items we preserve during clonning. */
1872 cfun->static_chain_decl = src_cfun->static_chain_decl;
1873 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
1874 cfun->function_end_locus = src_cfun->function_end_locus;
1875 cfun->curr_properties = src_cfun->curr_properties;
1876 cfun->last_verified = src_cfun->last_verified;
1877 if (src_cfun->ipa_transforms_to_apply)
1878 cfun->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
1879 src_cfun->ipa_transforms_to_apply);
1880 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
1881 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
1882 cfun->function_frequency = src_cfun->function_frequency;
1883 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
1884 cfun->stdarg = src_cfun->stdarg;
1885 cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
1886 cfun->after_inlining = src_cfun->after_inlining;
1887 cfun->returns_struct = src_cfun->returns_struct;
1888 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
1889 cfun->after_tree_profile = src_cfun->after_tree_profile;
1891 init_empty_tree_cfg ();
1893 ENTRY_BLOCK_PTR->count =
1894 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1895 REG_BR_PROB_BASE);
1896 ENTRY_BLOCK_PTR->frequency =
1897 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1898 frequency_scale / REG_BR_PROB_BASE);
1899 EXIT_BLOCK_PTR->count =
1900 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1901 REG_BR_PROB_BASE);
1902 EXIT_BLOCK_PTR->frequency =
1903 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1904 frequency_scale / REG_BR_PROB_BASE);
1905 if (src_cfun->eh)
1906 init_eh_for_function ();
1908 if (src_cfun->gimple_df)
1910 init_tree_ssa (cfun);
1911 cfun->gimple_df->in_ssa_p = true;
1912 init_ssa_operands ();
1914 pop_cfun ();
1917 /* Make a copy of the body of FN so that it can be inserted inline in
1918 another function. Walks FN via CFG, returns new fndecl. */
1920 static tree
1921 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
1922 basic_block entry_block_map, basic_block exit_block_map)
1924 tree callee_fndecl = id->src_fn;
1925 /* Original cfun for the callee, doesn't change. */
1926 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1927 struct function *cfun_to_copy;
1928 basic_block bb;
1929 tree new_fndecl = NULL;
1930 gcov_type count_scale, frequency_scale;
1931 int last;
1933 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1934 count_scale = (REG_BR_PROB_BASE * count
1935 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1936 else
1937 count_scale = 1;
1939 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1940 frequency_scale = (REG_BR_PROB_BASE * frequency
1942 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1943 else
1944 frequency_scale = count_scale;
1946 /* Register specific tree functions. */
1947 gimple_register_cfg_hooks ();
1949 /* Must have a CFG here at this point. */
1950 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
1951 (DECL_STRUCT_FUNCTION (callee_fndecl)));
1953 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1955 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
1956 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
1957 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1958 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1960 /* Duplicate any exception-handling regions. */
1961 if (cfun->eh)
1963 id->eh_region_offset
1964 = duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
1965 0, id->eh_region);
1968 /* Use aux pointers to map the original blocks to copy. */
1969 FOR_EACH_BB_FN (bb, cfun_to_copy)
1971 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
1972 bb->aux = new_bb;
1973 new_bb->aux = bb;
1976 last = last_basic_block;
1978 /* Now that we've duplicated the blocks, duplicate their edges. */
1979 FOR_ALL_BB_FN (bb, cfun_to_copy)
1980 copy_edges_for_bb (bb, count_scale, exit_block_map);
1982 if (gimple_in_ssa_p (cfun))
1983 FOR_ALL_BB_FN (bb, cfun_to_copy)
1984 copy_phis_for_bb (bb, id);
1986 FOR_ALL_BB_FN (bb, cfun_to_copy)
1988 ((basic_block)bb->aux)->aux = NULL;
1989 bb->aux = NULL;
1992 /* Zero out AUX fields of newly created block during EH edge
1993 insertion. */
1994 for (; last < last_basic_block; last++)
1995 BASIC_BLOCK (last)->aux = NULL;
1996 entry_block_map->aux = NULL;
1997 exit_block_map->aux = NULL;
1999 return new_fndecl;
2002 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2003 another function. */
2005 static tree
2006 copy_tree_body (copy_body_data *id)
2008 tree fndecl = id->src_fn;
2009 tree body = DECL_SAVED_TREE (fndecl);
2011 walk_tree (&body, copy_tree_body_r, id, NULL);
2013 return body;
2016 static tree
2017 copy_body (copy_body_data *id, gcov_type count, int frequency,
2018 basic_block entry_block_map, basic_block exit_block_map)
2020 tree fndecl = id->src_fn;
2021 tree body;
2023 /* If this body has a CFG, walk CFG and copy. */
2024 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2025 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
2027 return body;
2030 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2031 defined in function FN, or of a data member thereof. */
2033 static bool
2034 self_inlining_addr_expr (tree value, tree fn)
2036 tree var;
2038 if (TREE_CODE (value) != ADDR_EXPR)
2039 return false;
2041 var = get_base_address (TREE_OPERAND (value, 0));
2043 return var && auto_var_in_fn_p (var, fn);
2046 static void
2047 insert_init_stmt (basic_block bb, gimple init_stmt)
2049 /* If VAR represents a zero-sized variable, it's possible that the
2050 assignment statement may result in no gimple statements. */
2051 if (init_stmt)
2053 gimple_stmt_iterator si = gsi_last_bb (bb);
2055 /* We can end up with init statements that store to a non-register
2056 from a rhs with a conversion. Handle that here by forcing the
2057 rhs into a temporary. gimple_regimplify_operands is not
2058 prepared to do this for us. */
2059 if (!is_gimple_reg (gimple_assign_lhs (init_stmt))
2060 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2061 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2063 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2064 gimple_expr_type (init_stmt),
2065 gimple_assign_rhs1 (init_stmt));
2066 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2067 GSI_NEW_STMT);
2068 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2069 gimple_assign_set_rhs1 (init_stmt, rhs);
2071 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2072 gimple_regimplify_operands (init_stmt, &si);
2073 mark_symbols_for_renaming (init_stmt);
2077 /* Initialize parameter P with VALUE. If needed, produce init statement
2078 at the end of BB. When BB is NULL, we return init statement to be
2079 output later. */
2080 static gimple
2081 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2082 basic_block bb, tree *vars)
2084 gimple init_stmt = NULL;
2085 tree var;
2086 tree rhs = value;
2087 tree def = (gimple_in_ssa_p (cfun)
2088 ? gimple_default_def (id->src_cfun, p) : NULL);
2090 if (value
2091 && value != error_mark_node
2092 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2094 if (fold_convertible_p (TREE_TYPE (p), value))
2095 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2096 else
2097 /* ??? For valid (GIMPLE) programs we should not end up here.
2098 Still if something has gone wrong and we end up with truly
2099 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2100 to not leak invalid GIMPLE to the following passes. */
2101 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2104 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2105 we may not need to create a new variable here at all. Instead, we may
2106 be able to just use the argument value. */
2107 if (TREE_READONLY (p)
2108 && !TREE_ADDRESSABLE (p)
2109 && value && !TREE_SIDE_EFFECTS (value)
2110 && !def)
2112 /* We may produce non-gimple trees by adding NOPs or introduce
2113 invalid sharing when operand is not really constant.
2114 It is not big deal to prohibit constant propagation here as
2115 we will constant propagate in DOM1 pass anyway. */
2116 if (is_gimple_min_invariant (value)
2117 && useless_type_conversion_p (TREE_TYPE (p),
2118 TREE_TYPE (value))
2119 /* We have to be very careful about ADDR_EXPR. Make sure
2120 the base variable isn't a local variable of the inlined
2121 function, e.g., when doing recursive inlining, direct or
2122 mutually-recursive or whatever, which is why we don't
2123 just test whether fn == current_function_decl. */
2124 && ! self_inlining_addr_expr (value, fn))
2126 insert_decl_map (id, p, value);
2127 return NULL;
2131 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2132 here since the type of this decl must be visible to the calling
2133 function. */
2134 var = copy_decl_to_var (p, id);
2135 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2137 get_var_ann (var);
2138 add_referenced_var (var);
2141 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2142 that way, when the PARM_DECL is encountered, it will be
2143 automatically replaced by the VAR_DECL. */
2144 insert_decl_map (id, p, var);
2146 /* Declare this new variable. */
2147 TREE_CHAIN (var) = *vars;
2148 *vars = var;
2150 /* Make gimplifier happy about this variable. */
2151 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2153 /* Even if P was TREE_READONLY, the new VAR should not be.
2154 In the original code, we would have constructed a
2155 temporary, and then the function body would have never
2156 changed the value of P. However, now, we will be
2157 constructing VAR directly. The constructor body may
2158 change its value multiple times as it is being
2159 constructed. Therefore, it must not be TREE_READONLY;
2160 the back-end assumes that TREE_READONLY variable is
2161 assigned to only once. */
2162 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2163 TREE_READONLY (var) = 0;
2165 /* If there is no setup required and we are in SSA, take the easy route
2166 replacing all SSA names representing the function parameter by the
2167 SSA name passed to function.
2169 We need to construct map for the variable anyway as it might be used
2170 in different SSA names when parameter is set in function.
2172 Do replacement at -O0 for const arguments replaced by constant.
2173 This is important for builtin_constant_p and other construct requiring
2174 constant argument to be visible in inlined function body.
2176 FIXME: This usually kills the last connection in between inlined
2177 function parameter and the actual value in debug info. Can we do
2178 better here? If we just inserted the statement, copy propagation
2179 would kill it anyway as it always did in older versions of GCC.
2181 We might want to introduce a notion that single SSA_NAME might
2182 represent multiple variables for purposes of debugging. */
2183 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2184 && (optimize
2185 || (TREE_READONLY (p)
2186 && is_gimple_min_invariant (rhs)))
2187 && (TREE_CODE (rhs) == SSA_NAME
2188 || is_gimple_min_invariant (rhs))
2189 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2191 insert_decl_map (id, def, rhs);
2192 return NULL;
2195 /* If the value of argument is never used, don't care about initializing
2196 it. */
2197 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2199 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2200 return NULL;
2203 /* Initialize this VAR_DECL from the equivalent argument. Convert
2204 the argument to the proper type in case it was promoted. */
2205 if (value)
2207 if (rhs == error_mark_node)
2209 insert_decl_map (id, p, var);
2210 return NULL;
2213 STRIP_USELESS_TYPE_CONVERSION (rhs);
2215 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2216 keep our trees in gimple form. */
2217 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2219 def = remap_ssa_name (def, id);
2220 init_stmt = gimple_build_assign (def, rhs);
2221 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2222 set_default_def (var, NULL);
2224 else
2225 init_stmt = gimple_build_assign (var, rhs);
2227 if (bb && init_stmt)
2228 insert_init_stmt (bb, init_stmt);
2230 return init_stmt;
2233 /* Generate code to initialize the parameters of the function at the
2234 top of the stack in ID from the GIMPLE_CALL STMT. */
2236 static void
2237 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2238 tree fn, basic_block bb)
2240 tree parms;
2241 size_t i;
2242 tree p;
2243 tree vars = NULL_TREE;
2244 tree static_chain = gimple_call_chain (stmt);
2246 /* Figure out what the parameters are. */
2247 parms = DECL_ARGUMENTS (fn);
2249 /* Loop through the parameter declarations, replacing each with an
2250 equivalent VAR_DECL, appropriately initialized. */
2251 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2253 tree val;
2254 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2255 setup_one_parameter (id, p, val, fn, bb, &vars);
2258 /* Initialize the static chain. */
2259 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2260 gcc_assert (fn != current_function_decl);
2261 if (p)
2263 /* No static chain? Seems like a bug in tree-nested.c. */
2264 gcc_assert (static_chain);
2266 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2269 declare_inline_vars (id->block, vars);
2273 /* Declare a return variable to replace the RESULT_DECL for the
2274 function we are calling. An appropriate DECL_STMT is returned.
2275 The USE_STMT is filled to contain a use of the declaration to
2276 indicate the return value of the function.
2278 RETURN_SLOT, if non-null is place where to store the result. It
2279 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2280 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2282 The return value is a (possibly null) value that is the result of the
2283 function as seen by the callee. *USE_P is a (possibly null) value that
2284 holds the result as seen by the caller. */
2286 static tree
2287 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2288 tree *use_p)
2290 tree callee = id->src_fn;
2291 tree caller = id->dst_fn;
2292 tree result = DECL_RESULT (callee);
2293 tree callee_type = TREE_TYPE (result);
2294 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
2295 tree var, use;
2297 /* We don't need to do anything for functions that don't return
2298 anything. */
2299 if (!result || VOID_TYPE_P (callee_type))
2301 *use_p = NULL_TREE;
2302 return NULL_TREE;
2305 /* If there was a return slot, then the return value is the
2306 dereferenced address of that object. */
2307 if (return_slot)
2309 /* The front end shouldn't have used both return_slot and
2310 a modify expression. */
2311 gcc_assert (!modify_dest);
2312 if (DECL_BY_REFERENCE (result))
2314 tree return_slot_addr = build_fold_addr_expr (return_slot);
2315 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2317 /* We are going to construct *&return_slot and we can't do that
2318 for variables believed to be not addressable.
2320 FIXME: This check possibly can match, because values returned
2321 via return slot optimization are not believed to have address
2322 taken by alias analysis. */
2323 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2324 if (gimple_in_ssa_p (cfun))
2326 HOST_WIDE_INT bitsize;
2327 HOST_WIDE_INT bitpos;
2328 tree offset;
2329 enum machine_mode mode;
2330 int unsignedp;
2331 int volatilep;
2332 tree base;
2333 base = get_inner_reference (return_slot, &bitsize, &bitpos,
2334 &offset,
2335 &mode, &unsignedp, &volatilep,
2336 false);
2337 if (TREE_CODE (base) == INDIRECT_REF)
2338 base = TREE_OPERAND (base, 0);
2339 if (TREE_CODE (base) == SSA_NAME)
2340 base = SSA_NAME_VAR (base);
2341 mark_sym_for_renaming (base);
2343 var = return_slot_addr;
2345 else
2347 var = return_slot;
2348 gcc_assert (TREE_CODE (var) != SSA_NAME);
2349 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2351 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2352 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2353 && !DECL_GIMPLE_REG_P (result)
2354 && DECL_P (var))
2355 DECL_GIMPLE_REG_P (var) = 0;
2356 use = NULL;
2357 goto done;
2360 /* All types requiring non-trivial constructors should have been handled. */
2361 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2363 /* Attempt to avoid creating a new temporary variable. */
2364 if (modify_dest
2365 && TREE_CODE (modify_dest) != SSA_NAME)
2367 bool use_it = false;
2369 /* We can't use MODIFY_DEST if there's type promotion involved. */
2370 if (!useless_type_conversion_p (callee_type, caller_type))
2371 use_it = false;
2373 /* ??? If we're assigning to a variable sized type, then we must
2374 reuse the destination variable, because we've no good way to
2375 create variable sized temporaries at this point. */
2376 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2377 use_it = true;
2379 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2380 reuse it as the result of the call directly. Don't do this if
2381 it would promote MODIFY_DEST to addressable. */
2382 else if (TREE_ADDRESSABLE (result))
2383 use_it = false;
2384 else
2386 tree base_m = get_base_address (modify_dest);
2388 /* If the base isn't a decl, then it's a pointer, and we don't
2389 know where that's going to go. */
2390 if (!DECL_P (base_m))
2391 use_it = false;
2392 else if (is_global_var (base_m))
2393 use_it = false;
2394 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2395 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2396 && !DECL_GIMPLE_REG_P (result)
2397 && DECL_GIMPLE_REG_P (base_m))
2398 use_it = false;
2399 else if (!TREE_ADDRESSABLE (base_m))
2400 use_it = true;
2403 if (use_it)
2405 var = modify_dest;
2406 use = NULL;
2407 goto done;
2411 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2413 var = copy_result_decl_to_var (result, id);
2414 if (gimple_in_ssa_p (cfun))
2416 get_var_ann (var);
2417 add_referenced_var (var);
2420 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2421 DECL_STRUCT_FUNCTION (caller)->local_decls
2422 = tree_cons (NULL_TREE, var,
2423 DECL_STRUCT_FUNCTION (caller)->local_decls);
2425 /* Do not have the rest of GCC warn about this variable as it should
2426 not be visible to the user. */
2427 TREE_NO_WARNING (var) = 1;
2429 declare_inline_vars (id->block, var);
2431 /* Build the use expr. If the return type of the function was
2432 promoted, convert it back to the expected type. */
2433 use = var;
2434 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2435 use = fold_convert (caller_type, var);
2437 STRIP_USELESS_TYPE_CONVERSION (use);
2439 if (DECL_BY_REFERENCE (result))
2441 TREE_ADDRESSABLE (var) = 1;
2442 var = build_fold_addr_expr (var);
2445 done:
2446 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2447 way, when the RESULT_DECL is encountered, it will be
2448 automatically replaced by the VAR_DECL. */
2449 insert_decl_map (id, result, var);
2451 /* Remember this so we can ignore it in remap_decls. */
2452 id->retvar = var;
2454 *use_p = use;
2455 return var;
2458 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2459 to a local label. */
2461 static tree
2462 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2464 tree node = *nodep;
2465 tree fn = (tree) fnp;
2467 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2468 return node;
2470 if (TYPE_P (node))
2471 *walk_subtrees = 0;
2473 return NULL_TREE;
2476 /* Callback through walk_tree. Determine if we've got an aggregate
2477 type that we can't support; return non-null if so. */
2479 static tree
2480 cannot_copy_type_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
2481 void *data ATTRIBUTE_UNUSED)
2483 tree t, node = *nodep;
2485 if (TREE_CODE (node) == RECORD_TYPE || TREE_CODE (node) == UNION_TYPE)
2487 /* We cannot inline a function of the form
2489 void F (int i) { struct S { int ar[i]; } s; }
2491 Attempting to do so produces a catch-22.
2492 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
2493 UNION_TYPE nodes, then it goes into infinite recursion on a
2494 structure containing a pointer to its own type. If it doesn't,
2495 then the type node for S doesn't get adjusted properly when
2496 F is inlined.
2498 ??? This is likely no longer true, but it's too late in the 4.0
2499 cycle to try to find out. This should be checked for 4.1. */
2500 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
2501 if (variably_modified_type_p (TREE_TYPE (t), NULL))
2502 return node;
2505 return NULL_TREE;
2509 /* Determine if the function can be copied. If so return NULL. If
2510 not return a string describng the reason for failure. */
2512 static const char *
2513 copy_forbidden (struct function *fun, tree fndecl)
2515 const char *reason = fun->cannot_be_copied_reason;
2516 tree step;
2518 /* Only examine the function once. */
2519 if (fun->cannot_be_copied_set)
2520 return reason;
2522 /* We cannot copy a function that receives a non-local goto
2523 because we cannot remap the destination label used in the
2524 function that is performing the non-local goto. */
2525 /* ??? Actually, this should be possible, if we work at it.
2526 No doubt there's just a handful of places that simply
2527 assume it doesn't happen and don't substitute properly. */
2528 if (fun->has_nonlocal_label)
2530 reason = G_("function %q+F can never be copied "
2531 "because it receives a non-local goto");
2532 goto fail;
2535 for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2537 tree decl = TREE_VALUE (step);
2539 if (TREE_CODE (decl) == VAR_DECL
2540 && TREE_STATIC (decl)
2541 && !DECL_EXTERNAL (decl)
2542 && DECL_INITIAL (decl)
2543 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2544 has_label_address_in_static_1,
2545 fndecl))
2547 reason = G_("function %q+F can never be copied because it saves "
2548 "address of local label in a static variable");
2549 goto fail;
2552 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2553 && variably_modified_type_p (TREE_TYPE (decl), NULL)
2554 && walk_tree_without_duplicates (&TREE_TYPE (decl),
2555 cannot_copy_type_1, NULL))
2557 reason = G_("function %q+F can never be copied "
2558 "because it uses variable sized variables");
2559 goto fail;
2563 fail:
2564 fun->cannot_be_copied_reason = reason;
2565 fun->cannot_be_copied_set = true;
2566 return reason;
2570 static const char *inline_forbidden_reason;
2572 /* A callback for walk_gimple_seq to handle statements. Returns non-null
2573 iff a function can not be inlined. Also sets the reason why. */
2575 static tree
2576 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2577 struct walk_stmt_info *wip)
2579 tree fn = (tree) wip->info;
2580 tree t;
2581 gimple stmt = gsi_stmt (*gsi);
2583 switch (gimple_code (stmt))
2585 case GIMPLE_CALL:
2586 /* Refuse to inline alloca call unless user explicitly forced so as
2587 this may change program's memory overhead drastically when the
2588 function using alloca is called in loop. In GCC present in
2589 SPEC2000 inlining into schedule_block cause it to require 2GB of
2590 RAM instead of 256MB. */
2591 if (gimple_alloca_call_p (stmt)
2592 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2594 inline_forbidden_reason
2595 = G_("function %q+F can never be inlined because it uses "
2596 "alloca (override using the always_inline attribute)");
2597 *handled_ops_p = true;
2598 return fn;
2601 t = gimple_call_fndecl (stmt);
2602 if (t == NULL_TREE)
2603 break;
2605 /* We cannot inline functions that call setjmp. */
2606 if (setjmp_call_p (t))
2608 inline_forbidden_reason
2609 = G_("function %q+F can never be inlined because it uses setjmp");
2610 *handled_ops_p = true;
2611 return t;
2614 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
2615 switch (DECL_FUNCTION_CODE (t))
2617 /* We cannot inline functions that take a variable number of
2618 arguments. */
2619 case BUILT_IN_VA_START:
2620 case BUILT_IN_NEXT_ARG:
2621 case BUILT_IN_VA_END:
2622 inline_forbidden_reason
2623 = G_("function %q+F can never be inlined because it "
2624 "uses variable argument lists");
2625 *handled_ops_p = true;
2626 return t;
2628 case BUILT_IN_LONGJMP:
2629 /* We can't inline functions that call __builtin_longjmp at
2630 all. The non-local goto machinery really requires the
2631 destination be in a different function. If we allow the
2632 function calling __builtin_longjmp to be inlined into the
2633 function calling __builtin_setjmp, Things will Go Awry. */
2634 inline_forbidden_reason
2635 = G_("function %q+F can never be inlined because "
2636 "it uses setjmp-longjmp exception handling");
2637 *handled_ops_p = true;
2638 return t;
2640 case BUILT_IN_NONLOCAL_GOTO:
2641 /* Similarly. */
2642 inline_forbidden_reason
2643 = G_("function %q+F can never be inlined because "
2644 "it uses non-local goto");
2645 *handled_ops_p = true;
2646 return t;
2648 case BUILT_IN_RETURN:
2649 case BUILT_IN_APPLY_ARGS:
2650 /* If a __builtin_apply_args caller would be inlined,
2651 it would be saving arguments of the function it has
2652 been inlined into. Similarly __builtin_return would
2653 return from the function the inline has been inlined into. */
2654 inline_forbidden_reason
2655 = G_("function %q+F can never be inlined because "
2656 "it uses __builtin_return or __builtin_apply_args");
2657 *handled_ops_p = true;
2658 return t;
2660 default:
2661 break;
2663 break;
2665 case GIMPLE_GOTO:
2666 t = gimple_goto_dest (stmt);
2668 /* We will not inline a function which uses computed goto. The
2669 addresses of its local labels, which may be tucked into
2670 global storage, are of course not constant across
2671 instantiations, which causes unexpected behavior. */
2672 if (TREE_CODE (t) != LABEL_DECL)
2674 inline_forbidden_reason
2675 = G_("function %q+F can never be inlined "
2676 "because it contains a computed goto");
2677 *handled_ops_p = true;
2678 return t;
2680 break;
2682 default:
2683 break;
2686 *handled_ops_p = false;
2687 return NULL_TREE;
2690 /* Return true if FNDECL is a function that cannot be inlined into
2691 another one. */
2693 static bool
2694 inline_forbidden_p (tree fndecl)
2696 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
2697 struct walk_stmt_info wi;
2698 struct pointer_set_t *visited_nodes;
2699 basic_block bb;
2700 bool forbidden_p = false;
2702 /* First check for shared reasons not to copy the code. */
2703 inline_forbidden_reason = copy_forbidden (fun, fndecl);
2704 if (inline_forbidden_reason != NULL)
2705 return true;
2707 /* Next, walk the statements of the function looking for
2708 constraucts we can't handle, or are non-optimal for inlining. */
2709 visited_nodes = pointer_set_create ();
2710 memset (&wi, 0, sizeof (wi));
2711 wi.info = (void *) fndecl;
2712 wi.pset = visited_nodes;
2714 FOR_EACH_BB_FN (bb, fun)
2716 gimple ret;
2717 gimple_seq seq = bb_seq (bb);
2718 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
2719 forbidden_p = (ret != NULL);
2720 if (forbidden_p)
2721 break;
2724 pointer_set_destroy (visited_nodes);
2725 return forbidden_p;
2728 /* Returns nonzero if FN is a function that does not have any
2729 fundamental inline blocking properties. */
2731 bool
2732 tree_inlinable_function_p (tree fn)
2734 bool inlinable = true;
2735 bool do_warning;
2736 tree always_inline;
2738 /* If we've already decided this function shouldn't be inlined,
2739 there's no need to check again. */
2740 if (DECL_UNINLINABLE (fn))
2741 return false;
2743 /* We only warn for functions declared `inline' by the user. */
2744 do_warning = (warn_inline
2745 && DECL_DECLARED_INLINE_P (fn)
2746 && !DECL_NO_INLINE_WARNING_P (fn)
2747 && !DECL_IN_SYSTEM_HEADER (fn));
2749 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
2751 if (flag_no_inline
2752 && always_inline == NULL)
2754 if (do_warning)
2755 warning (OPT_Winline, "function %q+F can never be inlined because it "
2756 "is suppressed using -fno-inline", fn);
2757 inlinable = false;
2760 /* Don't auto-inline anything that might not be bound within
2761 this unit of translation. */
2762 else if (!DECL_DECLARED_INLINE_P (fn)
2763 && DECL_REPLACEABLE_P (fn))
2764 inlinable = false;
2766 else if (!function_attribute_inlinable_p (fn))
2768 if (do_warning)
2769 warning (OPT_Winline, "function %q+F can never be inlined because it "
2770 "uses attributes conflicting with inlining", fn);
2771 inlinable = false;
2774 else if (inline_forbidden_p (fn))
2776 /* See if we should warn about uninlinable functions. Previously,
2777 some of these warnings would be issued while trying to expand
2778 the function inline, but that would cause multiple warnings
2779 about functions that would for example call alloca. But since
2780 this a property of the function, just one warning is enough.
2781 As a bonus we can now give more details about the reason why a
2782 function is not inlinable. */
2783 if (always_inline)
2784 sorry (inline_forbidden_reason, fn);
2785 else if (do_warning)
2786 warning (OPT_Winline, inline_forbidden_reason, fn);
2788 inlinable = false;
2791 /* Squirrel away the result so that we don't have to check again. */
2792 DECL_UNINLINABLE (fn) = !inlinable;
2794 return inlinable;
2797 /* Estimate the cost of a memory move. Use machine dependent
2798 word size and take possible memcpy call into account. */
2801 estimate_move_cost (tree type)
2803 HOST_WIDE_INT size;
2805 gcc_assert (!VOID_TYPE_P (type));
2807 size = int_size_in_bytes (type);
2809 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
2810 /* Cost of a memcpy call, 3 arguments and the call. */
2811 return 4;
2812 else
2813 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
2816 /* Returns cost of operation CODE, according to WEIGHTS */
2818 static int
2819 estimate_operator_cost (enum tree_code code, eni_weights *weights,
2820 tree op1 ATTRIBUTE_UNUSED, tree op2)
2822 switch (code)
2824 /* These are "free" conversions, or their presumed cost
2825 is folded into other operations. */
2826 case RANGE_EXPR:
2827 CASE_CONVERT:
2828 case COMPLEX_EXPR:
2829 case PAREN_EXPR:
2830 return 0;
2832 /* Assign cost of 1 to usual operations.
2833 ??? We may consider mapping RTL costs to this. */
2834 case COND_EXPR:
2835 case VEC_COND_EXPR:
2837 case PLUS_EXPR:
2838 case POINTER_PLUS_EXPR:
2839 case MINUS_EXPR:
2840 case MULT_EXPR:
2842 case FIXED_CONVERT_EXPR:
2843 case FIX_TRUNC_EXPR:
2845 case NEGATE_EXPR:
2846 case FLOAT_EXPR:
2847 case MIN_EXPR:
2848 case MAX_EXPR:
2849 case ABS_EXPR:
2851 case LSHIFT_EXPR:
2852 case RSHIFT_EXPR:
2853 case LROTATE_EXPR:
2854 case RROTATE_EXPR:
2855 case VEC_LSHIFT_EXPR:
2856 case VEC_RSHIFT_EXPR:
2858 case BIT_IOR_EXPR:
2859 case BIT_XOR_EXPR:
2860 case BIT_AND_EXPR:
2861 case BIT_NOT_EXPR:
2863 case TRUTH_ANDIF_EXPR:
2864 case TRUTH_ORIF_EXPR:
2865 case TRUTH_AND_EXPR:
2866 case TRUTH_OR_EXPR:
2867 case TRUTH_XOR_EXPR:
2868 case TRUTH_NOT_EXPR:
2870 case LT_EXPR:
2871 case LE_EXPR:
2872 case GT_EXPR:
2873 case GE_EXPR:
2874 case EQ_EXPR:
2875 case NE_EXPR:
2876 case ORDERED_EXPR:
2877 case UNORDERED_EXPR:
2879 case UNLT_EXPR:
2880 case UNLE_EXPR:
2881 case UNGT_EXPR:
2882 case UNGE_EXPR:
2883 case UNEQ_EXPR:
2884 case LTGT_EXPR:
2886 case CONJ_EXPR:
2888 case PREDECREMENT_EXPR:
2889 case PREINCREMENT_EXPR:
2890 case POSTDECREMENT_EXPR:
2891 case POSTINCREMENT_EXPR:
2893 case REALIGN_LOAD_EXPR:
2895 case REDUC_MAX_EXPR:
2896 case REDUC_MIN_EXPR:
2897 case REDUC_PLUS_EXPR:
2898 case WIDEN_SUM_EXPR:
2899 case WIDEN_MULT_EXPR:
2900 case DOT_PROD_EXPR:
2902 case VEC_WIDEN_MULT_HI_EXPR:
2903 case VEC_WIDEN_MULT_LO_EXPR:
2904 case VEC_UNPACK_HI_EXPR:
2905 case VEC_UNPACK_LO_EXPR:
2906 case VEC_UNPACK_FLOAT_HI_EXPR:
2907 case VEC_UNPACK_FLOAT_LO_EXPR:
2908 case VEC_PACK_TRUNC_EXPR:
2909 case VEC_PACK_SAT_EXPR:
2910 case VEC_PACK_FIX_TRUNC_EXPR:
2911 case VEC_EXTRACT_EVEN_EXPR:
2912 case VEC_EXTRACT_ODD_EXPR:
2913 case VEC_INTERLEAVE_HIGH_EXPR:
2914 case VEC_INTERLEAVE_LOW_EXPR:
2916 return 1;
2918 /* Few special cases of expensive operations. This is useful
2919 to avoid inlining on functions having too many of these. */
2920 case TRUNC_DIV_EXPR:
2921 case CEIL_DIV_EXPR:
2922 case FLOOR_DIV_EXPR:
2923 case ROUND_DIV_EXPR:
2924 case EXACT_DIV_EXPR:
2925 case TRUNC_MOD_EXPR:
2926 case CEIL_MOD_EXPR:
2927 case FLOOR_MOD_EXPR:
2928 case ROUND_MOD_EXPR:
2929 case RDIV_EXPR:
2930 if (TREE_CODE (op2) != INTEGER_CST)
2931 return weights->div_mod_cost;
2932 return 1;
2934 default:
2935 /* We expect a copy assignment with no operator. */
2936 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
2937 return 0;
2942 /* Estimate number of instructions that will be created by expanding
2943 the statements in the statement sequence STMTS.
2944 WEIGHTS contains weights attributed to various constructs. */
2946 static
2947 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
2949 int cost;
2950 gimple_stmt_iterator gsi;
2952 cost = 0;
2953 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
2954 cost += estimate_num_insns (gsi_stmt (gsi), weights);
2956 return cost;
2960 /* Estimate number of instructions that will be created by expanding STMT.
2961 WEIGHTS contains weights attributed to various constructs. */
2964 estimate_num_insns (gimple stmt, eni_weights *weights)
2966 unsigned cost, i;
2967 enum gimple_code code = gimple_code (stmt);
2968 tree lhs;
2969 tree rhs;
2971 switch (code)
2973 case GIMPLE_ASSIGN:
2974 /* Try to estimate the cost of assignments. We have three cases to
2975 deal with:
2976 1) Simple assignments to registers;
2977 2) Stores to things that must live in memory. This includes
2978 "normal" stores to scalars, but also assignments of large
2979 structures, or constructors of big arrays;
2981 Let us look at the first two cases, assuming we have "a = b + C":
2982 <GIMPLE_ASSIGN <var_decl "a">
2983 <plus_expr <var_decl "b"> <constant C>>
2984 If "a" is a GIMPLE register, the assignment to it is free on almost
2985 any target, because "a" usually ends up in a real register. Hence
2986 the only cost of this expression comes from the PLUS_EXPR, and we
2987 can ignore the GIMPLE_ASSIGN.
2988 If "a" is not a GIMPLE register, the assignment to "a" will most
2989 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
2990 of moving something into "a", which we compute using the function
2991 estimate_move_cost. */
2992 lhs = gimple_assign_lhs (stmt);
2993 rhs = gimple_assign_rhs1 (stmt);
2995 /* EH magic stuff is most probably going to be optimized out.
2996 We rarely really need to save EH info for unwinding
2997 nested exceptions. */
2998 if (TREE_CODE (lhs) == FILTER_EXPR
2999 || TREE_CODE (lhs) == EXC_PTR_EXPR
3000 || TREE_CODE (rhs) == FILTER_EXPR
3001 || TREE_CODE (rhs) == EXC_PTR_EXPR)
3002 return 0;
3003 if (is_gimple_reg (lhs))
3004 cost = 0;
3005 else
3006 cost = estimate_move_cost (TREE_TYPE (lhs));
3008 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3009 cost += estimate_move_cost (TREE_TYPE (rhs));
3011 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3012 gimple_assign_rhs1 (stmt),
3013 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3014 == GIMPLE_BINARY_RHS
3015 ? gimple_assign_rhs2 (stmt) : NULL);
3016 break;
3018 case GIMPLE_COND:
3019 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3020 gimple_op (stmt, 0),
3021 gimple_op (stmt, 1));
3022 break;
3024 case GIMPLE_SWITCH:
3025 /* Take into account cost of the switch + guess 2 conditional jumps for
3026 each case label.
3028 TODO: once the switch expansion logic is sufficiently separated, we can
3029 do better job on estimating cost of the switch. */
3030 if (weights->time_based)
3031 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3032 else
3033 cost = gimple_switch_num_labels (stmt) * 2;
3034 break;
3036 case GIMPLE_CALL:
3038 tree decl = gimple_call_fndecl (stmt);
3039 tree addr = gimple_call_fn (stmt);
3040 tree funtype = TREE_TYPE (addr);
3042 if (POINTER_TYPE_P (funtype))
3043 funtype = TREE_TYPE (funtype);
3045 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
3046 cost = weights->target_builtin_call_cost;
3047 else
3048 cost = weights->call_cost;
3050 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3051 switch (DECL_FUNCTION_CODE (decl))
3053 case BUILT_IN_CONSTANT_P:
3054 return 0;
3055 case BUILT_IN_EXPECT:
3056 return 0;
3058 /* Prefetch instruction is not expensive. */
3059 case BUILT_IN_PREFETCH:
3060 cost = weights->target_builtin_call_cost;
3061 break;
3063 default:
3064 break;
3067 if (decl)
3068 funtype = TREE_TYPE (decl);
3070 if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3071 cost += estimate_move_cost (TREE_TYPE (funtype));
3072 /* Our cost must be kept in sync with
3073 cgraph_estimate_size_after_inlining that does use function
3074 declaration to figure out the arguments. */
3075 if (decl && DECL_ARGUMENTS (decl))
3077 tree arg;
3078 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3079 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3080 cost += estimate_move_cost (TREE_TYPE (arg));
3082 else if (funtype && prototype_p (funtype))
3084 tree t;
3085 for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3086 t = TREE_CHAIN (t))
3087 if (!VOID_TYPE_P (TREE_VALUE (t)))
3088 cost += estimate_move_cost (TREE_VALUE (t));
3090 else
3092 for (i = 0; i < gimple_call_num_args (stmt); i++)
3094 tree arg = gimple_call_arg (stmt, i);
3095 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3096 cost += estimate_move_cost (TREE_TYPE (arg));
3100 break;
3103 case GIMPLE_GOTO:
3104 case GIMPLE_LABEL:
3105 case GIMPLE_NOP:
3106 case GIMPLE_PHI:
3107 case GIMPLE_RETURN:
3108 case GIMPLE_PREDICT:
3109 return 0;
3111 case GIMPLE_ASM:
3112 case GIMPLE_RESX:
3113 return 1;
3115 case GIMPLE_BIND:
3116 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3118 case GIMPLE_EH_FILTER:
3119 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3121 case GIMPLE_CATCH:
3122 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3124 case GIMPLE_TRY:
3125 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3126 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3128 /* OpenMP directives are generally very expensive. */
3130 case GIMPLE_OMP_RETURN:
3131 case GIMPLE_OMP_SECTIONS_SWITCH:
3132 case GIMPLE_OMP_ATOMIC_STORE:
3133 case GIMPLE_OMP_CONTINUE:
3134 /* ...except these, which are cheap. */
3135 return 0;
3137 case GIMPLE_OMP_ATOMIC_LOAD:
3138 return weights->omp_cost;
3140 case GIMPLE_OMP_FOR:
3141 return (weights->omp_cost
3142 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3143 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3145 case GIMPLE_OMP_PARALLEL:
3146 case GIMPLE_OMP_TASK:
3147 case GIMPLE_OMP_CRITICAL:
3148 case GIMPLE_OMP_MASTER:
3149 case GIMPLE_OMP_ORDERED:
3150 case GIMPLE_OMP_SECTION:
3151 case GIMPLE_OMP_SECTIONS:
3152 case GIMPLE_OMP_SINGLE:
3153 return (weights->omp_cost
3154 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3156 default:
3157 gcc_unreachable ();
3160 return cost;
3163 /* Estimate number of instructions that will be created by expanding
3164 function FNDECL. WEIGHTS contains weights attributed to various
3165 constructs. */
3168 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3170 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3171 gimple_stmt_iterator bsi;
3172 basic_block bb;
3173 int n = 0;
3175 gcc_assert (my_function && my_function->cfg);
3176 FOR_EACH_BB_FN (bb, my_function)
3178 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3179 n += estimate_num_insns (gsi_stmt (bsi), weights);
3182 return n;
3186 /* Initializes weights used by estimate_num_insns. */
3188 void
3189 init_inline_once (void)
3191 eni_size_weights.call_cost = 1;
3192 eni_size_weights.target_builtin_call_cost = 1;
3193 eni_size_weights.div_mod_cost = 1;
3194 eni_size_weights.omp_cost = 40;
3195 eni_size_weights.time_based = false;
3197 /* Estimating time for call is difficult, since we have no idea what the
3198 called function does. In the current uses of eni_time_weights,
3199 underestimating the cost does less harm than overestimating it, so
3200 we choose a rather small value here. */
3201 eni_time_weights.call_cost = 10;
3202 eni_time_weights.target_builtin_call_cost = 10;
3203 eni_time_weights.div_mod_cost = 10;
3204 eni_time_weights.omp_cost = 40;
3205 eni_time_weights.time_based = true;
3208 /* Estimate the number of instructions in a gimple_seq. */
3211 count_insns_seq (gimple_seq seq, eni_weights *weights)
3213 gimple_stmt_iterator gsi;
3214 int n = 0;
3215 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3216 n += estimate_num_insns (gsi_stmt (gsi), weights);
3218 return n;
3222 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3224 static void
3225 prepend_lexical_block (tree current_block, tree new_block)
3227 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3228 BLOCK_SUBBLOCKS (current_block) = new_block;
3229 BLOCK_SUPERCONTEXT (new_block) = current_block;
3232 /* Fetch callee declaration from the call graph edge going from NODE and
3233 associated with STMR call statement. Return NULL_TREE if not found. */
3234 static tree
3235 get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3237 struct cgraph_edge *cs;
3239 cs = cgraph_edge (node, stmt);
3240 if (cs)
3241 return cs->callee->decl;
3243 return NULL_TREE;
3246 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3248 static bool
3249 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3251 tree retvar, use_retvar;
3252 tree fn;
3253 struct pointer_map_t *st;
3254 tree return_slot;
3255 tree modify_dest;
3256 location_t saved_location;
3257 struct cgraph_edge *cg_edge;
3258 cgraph_inline_failed_t reason;
3259 basic_block return_block;
3260 edge e;
3261 gimple_stmt_iterator gsi, stmt_gsi;
3262 bool successfully_inlined = FALSE;
3263 bool purge_dead_abnormal_edges;
3264 tree t_step;
3265 tree var;
3267 /* Set input_location here so we get the right instantiation context
3268 if we call instantiate_decl from inlinable_function_p. */
3269 saved_location = input_location;
3270 if (gimple_has_location (stmt))
3271 input_location = gimple_location (stmt);
3273 /* From here on, we're only interested in CALL_EXPRs. */
3274 if (gimple_code (stmt) != GIMPLE_CALL)
3275 goto egress;
3277 /* First, see if we can figure out what function is being called.
3278 If we cannot, then there is no hope of inlining the function. */
3279 fn = gimple_call_fndecl (stmt);
3280 if (!fn)
3282 fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3283 if (!fn)
3284 goto egress;
3287 /* Turn forward declarations into real ones. */
3288 fn = cgraph_node (fn)->decl;
3290 /* If FN is a declaration of a function in a nested scope that was
3291 globally declared inline, we don't set its DECL_INITIAL.
3292 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3293 C++ front-end uses it for cdtors to refer to their internal
3294 declarations, that are not real functions. Fortunately those
3295 don't have trees to be saved, so we can tell by checking their
3296 gimple_body. */
3297 if (!DECL_INITIAL (fn)
3298 && DECL_ABSTRACT_ORIGIN (fn)
3299 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3300 fn = DECL_ABSTRACT_ORIGIN (fn);
3302 /* Objective C and fortran still calls tree_rest_of_compilation directly.
3303 Kill this check once this is fixed. */
3304 if (!id->dst_node->analyzed)
3305 goto egress;
3307 cg_edge = cgraph_edge (id->dst_node, stmt);
3309 /* Don't try to inline functions that are not well-suited to
3310 inlining. */
3311 if (!cgraph_inline_p (cg_edge, &reason))
3313 /* If this call was originally indirect, we do not want to emit any
3314 inlining related warnings or sorry messages because there are no
3315 guarantees regarding those. */
3316 if (cg_edge->indirect_call)
3317 goto egress;
3319 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3320 /* Avoid warnings during early inline pass. */
3321 && cgraph_global_info_ready)
3323 sorry ("inlining failed in call to %q+F: %s", fn,
3324 cgraph_inline_failed_string (reason));
3325 sorry ("called from here");
3327 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3328 && !DECL_IN_SYSTEM_HEADER (fn)
3329 && reason != CIF_UNSPECIFIED
3330 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3331 /* Avoid warnings during early inline pass. */
3332 && cgraph_global_info_ready)
3334 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3335 fn, cgraph_inline_failed_string (reason));
3336 warning (OPT_Winline, "called from here");
3338 goto egress;
3340 fn = cg_edge->callee->decl;
3342 #ifdef ENABLE_CHECKING
3343 if (cg_edge->callee->decl != id->dst_node->decl)
3344 verify_cgraph_node (cg_edge->callee);
3345 #endif
3347 /* We will be inlining this callee. */
3348 id->eh_region = lookup_stmt_eh_region (stmt);
3350 /* Split the block holding the GIMPLE_CALL. */
3351 e = split_block (bb, stmt);
3352 bb = e->src;
3353 return_block = e->dest;
3354 remove_edge (e);
3356 /* split_block splits after the statement; work around this by
3357 moving the call into the second block manually. Not pretty,
3358 but seems easier than doing the CFG manipulation by hand
3359 when the GIMPLE_CALL is in the last statement of BB. */
3360 stmt_gsi = gsi_last_bb (bb);
3361 gsi_remove (&stmt_gsi, false);
3363 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3364 been the source of abnormal edges. In this case, schedule
3365 the removal of dead abnormal edges. */
3366 gsi = gsi_start_bb (return_block);
3367 if (gsi_end_p (gsi))
3369 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3370 purge_dead_abnormal_edges = true;
3372 else
3374 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3375 purge_dead_abnormal_edges = false;
3378 stmt_gsi = gsi_start_bb (return_block);
3380 /* Build a block containing code to initialize the arguments, the
3381 actual inline expansion of the body, and a label for the return
3382 statements within the function to jump to. The type of the
3383 statement expression is the return type of the function call. */
3384 id->block = make_node (BLOCK);
3385 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3386 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3387 prepend_lexical_block (gimple_block (stmt), id->block);
3389 /* Local declarations will be replaced by their equivalents in this
3390 map. */
3391 st = id->decl_map;
3392 id->decl_map = pointer_map_create ();
3394 /* Record the function we are about to inline. */
3395 id->src_fn = fn;
3396 id->src_node = cg_edge->callee;
3397 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3398 id->gimple_call = stmt;
3400 gcc_assert (!id->src_cfun->after_inlining);
3402 id->entry_bb = bb;
3403 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3405 gimple_stmt_iterator si = gsi_last_bb (bb);
3406 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3407 NOT_TAKEN),
3408 GSI_NEW_STMT);
3410 initialize_inlined_parameters (id, stmt, fn, bb);
3412 if (DECL_INITIAL (fn))
3413 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3415 /* Return statements in the function body will be replaced by jumps
3416 to the RET_LABEL. */
3417 gcc_assert (DECL_INITIAL (fn));
3418 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3420 /* Find the LHS to which the result of this call is assigned. */
3421 return_slot = NULL;
3422 if (gimple_call_lhs (stmt))
3424 modify_dest = gimple_call_lhs (stmt);
3426 /* The function which we are inlining might not return a value,
3427 in which case we should issue a warning that the function
3428 does not return a value. In that case the optimizers will
3429 see that the variable to which the value is assigned was not
3430 initialized. We do not want to issue a warning about that
3431 uninitialized variable. */
3432 if (DECL_P (modify_dest))
3433 TREE_NO_WARNING (modify_dest) = 1;
3435 if (gimple_call_return_slot_opt_p (stmt))
3437 return_slot = modify_dest;
3438 modify_dest = NULL;
3441 else
3442 modify_dest = NULL;
3444 /* If we are inlining a call to the C++ operator new, we don't want
3445 to use type based alias analysis on the return value. Otherwise
3446 we may get confused if the compiler sees that the inlined new
3447 function returns a pointer which was just deleted. See bug
3448 33407. */
3449 if (DECL_IS_OPERATOR_NEW (fn))
3451 return_slot = NULL;
3452 modify_dest = NULL;
3455 /* Declare the return variable for the function. */
3456 retvar = declare_return_variable (id, return_slot, modify_dest, &use_retvar);
3458 /* Add local vars in this inlined callee to caller. */
3459 t_step = id->src_cfun->local_decls;
3460 for (; t_step; t_step = TREE_CHAIN (t_step))
3462 var = TREE_VALUE (t_step);
3463 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3465 if (var_ann (var) && add_referenced_var (var))
3466 cfun->local_decls = tree_cons (NULL_TREE, var,
3467 cfun->local_decls);
3469 else if (!can_be_nonlocal (var, id))
3470 cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3471 cfun->local_decls);
3474 /* This is it. Duplicate the callee body. Assume callee is
3475 pre-gimplified. Note that we must not alter the caller
3476 function in any way before this point, as this CALL_EXPR may be
3477 a self-referential call; if we're calling ourselves, we need to
3478 duplicate our body before altering anything. */
3479 copy_body (id, bb->count, bb->frequency, bb, return_block);
3481 /* Reset the escaped and callused solutions. */
3482 if (cfun->gimple_df)
3484 pt_solution_reset (&cfun->gimple_df->escaped);
3485 pt_solution_reset (&cfun->gimple_df->callused);
3488 /* Clean up. */
3489 pointer_map_destroy (id->decl_map);
3490 id->decl_map = st;
3492 /* Unlink the calls virtual operands before replacing it. */
3493 unlink_stmt_vdef (stmt);
3495 /* If the inlined function returns a result that we care about,
3496 substitute the GIMPLE_CALL with an assignment of the return
3497 variable to the LHS of the call. That is, if STMT was
3498 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3499 if (use_retvar && gimple_call_lhs (stmt))
3501 gimple old_stmt = stmt;
3502 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3503 gsi_replace (&stmt_gsi, stmt, false);
3504 if (gimple_in_ssa_p (cfun))
3505 mark_symbols_for_renaming (stmt);
3506 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3508 else
3510 /* Handle the case of inlining a function with no return
3511 statement, which causes the return value to become undefined. */
3512 if (gimple_call_lhs (stmt)
3513 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
3515 tree name = gimple_call_lhs (stmt);
3516 tree var = SSA_NAME_VAR (name);
3517 tree def = gimple_default_def (cfun, var);
3519 if (def)
3521 /* If the variable is used undefined, make this name
3522 undefined via a move. */
3523 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3524 gsi_replace (&stmt_gsi, stmt, true);
3526 else
3528 /* Otherwise make this variable undefined. */
3529 gsi_remove (&stmt_gsi, true);
3530 set_default_def (var, name);
3531 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
3534 else
3535 gsi_remove (&stmt_gsi, true);
3538 if (purge_dead_abnormal_edges)
3539 gimple_purge_dead_abnormal_call_edges (return_block);
3541 /* If the value of the new expression is ignored, that's OK. We
3542 don't warn about this for CALL_EXPRs, so we shouldn't warn about
3543 the equivalent inlined version either. */
3544 if (is_gimple_assign (stmt))
3546 gcc_assert (gimple_assign_single_p (stmt)
3547 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
3548 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3551 /* Output the inlining info for this abstract function, since it has been
3552 inlined. If we don't do this now, we can lose the information about the
3553 variables in the function when the blocks get blown away as soon as we
3554 remove the cgraph node. */
3555 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
3557 /* Update callgraph if needed. */
3558 cgraph_remove_node (cg_edge->callee);
3560 id->block = NULL_TREE;
3561 successfully_inlined = TRUE;
3563 egress:
3564 input_location = saved_location;
3565 return successfully_inlined;
3568 /* Expand call statements reachable from STMT_P.
3569 We can only have CALL_EXPRs as the "toplevel" tree code or nested
3570 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
3571 unfortunately not use that function here because we need a pointer
3572 to the CALL_EXPR, not the tree itself. */
3574 static bool
3575 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
3577 gimple_stmt_iterator gsi;
3579 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3581 gimple stmt = gsi_stmt (gsi);
3583 if (is_gimple_call (stmt)
3584 && expand_call_inline (bb, stmt, id))
3585 return true;
3588 return false;
3592 /* Walk all basic blocks created after FIRST and try to fold every statement
3593 in the STATEMENTS pointer set. */
3595 static void
3596 fold_marked_statements (int first, struct pointer_set_t *statements)
3598 for (; first < n_basic_blocks; first++)
3599 if (BASIC_BLOCK (first))
3601 gimple_stmt_iterator gsi;
3603 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
3604 !gsi_end_p (gsi);
3605 gsi_next (&gsi))
3606 if (pointer_set_contains (statements, gsi_stmt (gsi)))
3608 gimple old_stmt = gsi_stmt (gsi);
3609 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
3611 if (fold_stmt (&gsi))
3613 /* Re-read the statement from GSI as fold_stmt() may
3614 have changed it. */
3615 gimple new_stmt = gsi_stmt (gsi);
3616 update_stmt (new_stmt);
3618 if (is_gimple_call (old_stmt)
3619 || is_gimple_call (new_stmt))
3620 cgraph_update_edges_for_call_stmt (old_stmt, old_decl, new_stmt);
3622 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
3623 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3629 /* Return true if BB has at least one abnormal outgoing edge. */
3631 static inline bool
3632 has_abnormal_outgoing_edge_p (basic_block bb)
3634 edge e;
3635 edge_iterator ei;
3637 FOR_EACH_EDGE (e, ei, bb->succs)
3638 if (e->flags & EDGE_ABNORMAL)
3639 return true;
3641 return false;
3644 /* Expand calls to inline functions in the body of FN. */
3646 unsigned int
3647 optimize_inline_calls (tree fn)
3649 copy_body_data id;
3650 tree prev_fn;
3651 basic_block bb;
3652 int last = n_basic_blocks;
3653 struct gimplify_ctx gctx;
3655 /* There is no point in performing inlining if errors have already
3656 occurred -- and we might crash if we try to inline invalid
3657 code. */
3658 if (errorcount || sorrycount)
3659 return 0;
3661 /* Clear out ID. */
3662 memset (&id, 0, sizeof (id));
3664 id.src_node = id.dst_node = cgraph_node (fn);
3665 id.dst_fn = fn;
3666 /* Or any functions that aren't finished yet. */
3667 prev_fn = NULL_TREE;
3668 if (current_function_decl)
3670 id.dst_fn = current_function_decl;
3671 prev_fn = current_function_decl;
3674 id.copy_decl = copy_decl_maybe_to_var;
3675 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3676 id.transform_new_cfg = false;
3677 id.transform_return_to_modify = true;
3678 id.transform_lang_insert_block = NULL;
3679 id.statements_to_fold = pointer_set_create ();
3681 push_gimplify_context (&gctx);
3683 /* We make no attempts to keep dominance info up-to-date. */
3684 free_dominance_info (CDI_DOMINATORS);
3685 free_dominance_info (CDI_POST_DOMINATORS);
3687 /* Register specific gimple functions. */
3688 gimple_register_cfg_hooks ();
3690 /* Reach the trees by walking over the CFG, and note the
3691 enclosing basic-blocks in the call edges. */
3692 /* We walk the blocks going forward, because inlined function bodies
3693 will split id->current_basic_block, and the new blocks will
3694 follow it; we'll trudge through them, processing their CALL_EXPRs
3695 along the way. */
3696 FOR_EACH_BB (bb)
3697 gimple_expand_calls_inline (bb, &id);
3699 pop_gimplify_context (NULL);
3701 #ifdef ENABLE_CHECKING
3703 struct cgraph_edge *e;
3705 verify_cgraph_node (id.dst_node);
3707 /* Double check that we inlined everything we are supposed to inline. */
3708 for (e = id.dst_node->callees; e; e = e->next_callee)
3709 gcc_assert (e->inline_failed);
3711 #endif
3713 /* Fold the statements before compacting/renumbering the basic blocks. */
3714 fold_marked_statements (last, id.statements_to_fold);
3715 pointer_set_destroy (id.statements_to_fold);
3717 /* Renumber the (code) basic_blocks consecutively. */
3718 compact_blocks ();
3719 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3720 number_blocks (fn);
3722 fold_cond_expr_cond ();
3723 delete_unreachable_blocks_update_callgraph (&id);
3724 #ifdef ENABLE_CHECKING
3725 verify_cgraph_node (id.dst_node);
3726 #endif
3728 /* It would be nice to check SSA/CFG/statement consistency here, but it is
3729 not possible yet - the IPA passes might make various functions to not
3730 throw and they don't care to proactively update local EH info. This is
3731 done later in fixup_cfg pass that also execute the verification. */
3732 return (TODO_update_ssa
3733 | TODO_cleanup_cfg
3734 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
3735 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
3738 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
3740 tree
3741 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3743 enum tree_code code = TREE_CODE (*tp);
3744 enum tree_code_class cl = TREE_CODE_CLASS (code);
3746 /* We make copies of most nodes. */
3747 if (IS_EXPR_CODE_CLASS (cl)
3748 || code == TREE_LIST
3749 || code == TREE_VEC
3750 || code == TYPE_DECL
3751 || code == OMP_CLAUSE)
3753 /* Because the chain gets clobbered when we make a copy, we save it
3754 here. */
3755 tree chain = NULL_TREE, new_tree;
3757 chain = TREE_CHAIN (*tp);
3759 /* Copy the node. */
3760 new_tree = copy_node (*tp);
3762 /* Propagate mudflap marked-ness. */
3763 if (flag_mudflap && mf_marked_p (*tp))
3764 mf_mark (new_tree);
3766 *tp = new_tree;
3768 /* Now, restore the chain, if appropriate. That will cause
3769 walk_tree to walk into the chain as well. */
3770 if (code == PARM_DECL
3771 || code == TREE_LIST
3772 || code == OMP_CLAUSE)
3773 TREE_CHAIN (*tp) = chain;
3775 /* For now, we don't update BLOCKs when we make copies. So, we
3776 have to nullify all BIND_EXPRs. */
3777 if (TREE_CODE (*tp) == BIND_EXPR)
3778 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
3780 else if (code == CONSTRUCTOR)
3782 /* CONSTRUCTOR nodes need special handling because
3783 we need to duplicate the vector of elements. */
3784 tree new_tree;
3786 new_tree = copy_node (*tp);
3788 /* Propagate mudflap marked-ness. */
3789 if (flag_mudflap && mf_marked_p (*tp))
3790 mf_mark (new_tree);
3792 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
3793 CONSTRUCTOR_ELTS (*tp));
3794 *tp = new_tree;
3796 else if (TREE_CODE_CLASS (code) == tcc_type)
3797 *walk_subtrees = 0;
3798 else if (TREE_CODE_CLASS (code) == tcc_declaration)
3799 *walk_subtrees = 0;
3800 else if (TREE_CODE_CLASS (code) == tcc_constant)
3801 *walk_subtrees = 0;
3802 else
3803 gcc_assert (code != STATEMENT_LIST);
3804 return NULL_TREE;
3807 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
3808 information indicating to what new SAVE_EXPR this one should be mapped,
3809 use that one. Otherwise, create a new node and enter it in ST. FN is
3810 the function into which the copy will be placed. */
3812 static void
3813 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
3815 struct pointer_map_t *st = (struct pointer_map_t *) st_;
3816 tree *n;
3817 tree t;
3819 /* See if we already encountered this SAVE_EXPR. */
3820 n = (tree *) pointer_map_contains (st, *tp);
3822 /* If we didn't already remap this SAVE_EXPR, do so now. */
3823 if (!n)
3825 t = copy_node (*tp);
3827 /* Remember this SAVE_EXPR. */
3828 *pointer_map_insert (st, *tp) = t;
3829 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
3830 *pointer_map_insert (st, t) = t;
3832 else
3834 /* We've already walked into this SAVE_EXPR; don't do it again. */
3835 *walk_subtrees = 0;
3836 t = *n;
3839 /* Replace this SAVE_EXPR with the copy. */
3840 *tp = t;
3843 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
3844 copies the declaration and enters it in the splay_tree in DATA (which is
3845 really an `copy_body_data *'). */
3847 static tree
3848 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
3849 void *data)
3851 copy_body_data *id = (copy_body_data *) data;
3853 /* Don't walk into types. */
3854 if (TYPE_P (*tp))
3855 *walk_subtrees = 0;
3857 else if (TREE_CODE (*tp) == LABEL_EXPR)
3859 tree decl = TREE_OPERAND (*tp, 0);
3861 /* Copy the decl and remember the copy. */
3862 insert_decl_map (id, decl, id->copy_decl (decl, id));
3865 return NULL_TREE;
3868 /* Perform any modifications to EXPR required when it is unsaved. Does
3869 not recurse into EXPR's subtrees. */
3871 static void
3872 unsave_expr_1 (tree expr)
3874 switch (TREE_CODE (expr))
3876 case TARGET_EXPR:
3877 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
3878 It's OK for this to happen if it was part of a subtree that
3879 isn't immediately expanded, such as operand 2 of another
3880 TARGET_EXPR. */
3881 if (TREE_OPERAND (expr, 1))
3882 break;
3884 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
3885 TREE_OPERAND (expr, 3) = NULL_TREE;
3886 break;
3888 default:
3889 break;
3893 /* Called via walk_tree when an expression is unsaved. Using the
3894 splay_tree pointed to by ST (which is really a `splay_tree'),
3895 remaps all local declarations to appropriate replacements. */
3897 static tree
3898 unsave_r (tree *tp, int *walk_subtrees, void *data)
3900 copy_body_data *id = (copy_body_data *) data;
3901 struct pointer_map_t *st = id->decl_map;
3902 tree *n;
3904 /* Only a local declaration (variable or label). */
3905 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
3906 || TREE_CODE (*tp) == LABEL_DECL)
3908 /* Lookup the declaration. */
3909 n = (tree *) pointer_map_contains (st, *tp);
3911 /* If it's there, remap it. */
3912 if (n)
3913 *tp = *n;
3916 else if (TREE_CODE (*tp) == STATEMENT_LIST)
3917 gcc_unreachable ();
3918 else if (TREE_CODE (*tp) == BIND_EXPR)
3919 copy_bind_expr (tp, walk_subtrees, id);
3920 else if (TREE_CODE (*tp) == SAVE_EXPR)
3921 remap_save_expr (tp, st, walk_subtrees);
3922 else
3924 copy_tree_r (tp, walk_subtrees, NULL);
3926 /* Do whatever unsaving is required. */
3927 unsave_expr_1 (*tp);
3930 /* Keep iterating. */
3931 return NULL_TREE;
3934 /* Copies everything in EXPR and replaces variables, labels
3935 and SAVE_EXPRs local to EXPR. */
3937 tree
3938 unsave_expr_now (tree expr)
3940 copy_body_data id;
3942 /* There's nothing to do for NULL_TREE. */
3943 if (expr == 0)
3944 return expr;
3946 /* Set up ID. */
3947 memset (&id, 0, sizeof (id));
3948 id.src_fn = current_function_decl;
3949 id.dst_fn = current_function_decl;
3950 id.decl_map = pointer_map_create ();
3952 id.copy_decl = copy_decl_no_change;
3953 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3954 id.transform_new_cfg = false;
3955 id.transform_return_to_modify = false;
3956 id.transform_lang_insert_block = NULL;
3958 /* Walk the tree once to find local labels. */
3959 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
3961 /* Walk the tree again, copying, remapping, and unsaving. */
3962 walk_tree (&expr, unsave_r, &id, NULL);
3964 /* Clean up. */
3965 pointer_map_destroy (id.decl_map);
3967 return expr;
3970 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
3971 label, copies the declaration and enters it in the splay_tree in DATA (which
3972 is really a 'copy_body_data *'. */
3974 static tree
3975 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
3976 bool *handled_ops_p ATTRIBUTE_UNUSED,
3977 struct walk_stmt_info *wi)
3979 copy_body_data *id = (copy_body_data *) wi->info;
3980 gimple stmt = gsi_stmt (*gsip);
3982 if (gimple_code (stmt) == GIMPLE_LABEL)
3984 tree decl = gimple_label_label (stmt);
3986 /* Copy the decl and remember the copy. */
3987 insert_decl_map (id, decl, id->copy_decl (decl, id));
3990 return NULL_TREE;
3994 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
3995 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
3996 remaps all local declarations to appropriate replacements in gimple
3997 operands. */
3999 static tree
4000 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4002 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4003 copy_body_data *id = (copy_body_data *) wi->info;
4004 struct pointer_map_t *st = id->decl_map;
4005 tree *n;
4006 tree expr = *tp;
4008 /* Only a local declaration (variable or label). */
4009 if ((TREE_CODE (expr) == VAR_DECL
4010 && !TREE_STATIC (expr))
4011 || TREE_CODE (expr) == LABEL_DECL)
4013 /* Lookup the declaration. */
4014 n = (tree *) pointer_map_contains (st, expr);
4016 /* If it's there, remap it. */
4017 if (n)
4018 *tp = *n;
4019 *walk_subtrees = 0;
4021 else if (TREE_CODE (expr) == STATEMENT_LIST
4022 || TREE_CODE (expr) == BIND_EXPR
4023 || TREE_CODE (expr) == SAVE_EXPR)
4024 gcc_unreachable ();
4025 else if (TREE_CODE (expr) == TARGET_EXPR)
4027 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4028 It's OK for this to happen if it was part of a subtree that
4029 isn't immediately expanded, such as operand 2 of another
4030 TARGET_EXPR. */
4031 if (!TREE_OPERAND (expr, 1))
4033 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4034 TREE_OPERAND (expr, 3) = NULL_TREE;
4038 /* Keep iterating. */
4039 return NULL_TREE;
4043 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4044 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4045 remaps all local declarations to appropriate replacements in gimple
4046 statements. */
4048 static tree
4049 replace_locals_stmt (gimple_stmt_iterator *gsip,
4050 bool *handled_ops_p ATTRIBUTE_UNUSED,
4051 struct walk_stmt_info *wi)
4053 copy_body_data *id = (copy_body_data *) wi->info;
4054 gimple stmt = gsi_stmt (*gsip);
4056 if (gimple_code (stmt) == GIMPLE_BIND)
4058 tree block = gimple_bind_block (stmt);
4060 if (block)
4062 remap_block (&block, id);
4063 gimple_bind_set_block (stmt, block);
4066 /* This will remap a lot of the same decls again, but this should be
4067 harmless. */
4068 if (gimple_bind_vars (stmt))
4069 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4072 /* Keep iterating. */
4073 return NULL_TREE;
4077 /* Copies everything in SEQ and replaces variables and labels local to
4078 current_function_decl. */
4080 gimple_seq
4081 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4083 copy_body_data id;
4084 struct walk_stmt_info wi;
4085 struct pointer_set_t *visited;
4086 gimple_seq copy;
4088 /* There's nothing to do for NULL_TREE. */
4089 if (seq == NULL)
4090 return seq;
4092 /* Set up ID. */
4093 memset (&id, 0, sizeof (id));
4094 id.src_fn = current_function_decl;
4095 id.dst_fn = current_function_decl;
4096 id.decl_map = pointer_map_create ();
4098 id.copy_decl = copy_decl_no_change;
4099 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4100 id.transform_new_cfg = false;
4101 id.transform_return_to_modify = false;
4102 id.transform_lang_insert_block = NULL;
4104 /* Walk the tree once to find local labels. */
4105 memset (&wi, 0, sizeof (wi));
4106 visited = pointer_set_create ();
4107 wi.info = &id;
4108 wi.pset = visited;
4109 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4110 pointer_set_destroy (visited);
4112 copy = gimple_seq_copy (seq);
4114 /* Walk the copy, remapping decls. */
4115 memset (&wi, 0, sizeof (wi));
4116 wi.info = &id;
4117 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4119 /* Clean up. */
4120 pointer_map_destroy (id.decl_map);
4122 return copy;
4126 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4128 static tree
4129 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4131 if (*tp == data)
4132 return (tree) data;
4133 else
4134 return NULL;
4137 bool
4138 debug_find_tree (tree top, tree search)
4140 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4144 /* Declare the variables created by the inliner. Add all the variables in
4145 VARS to BIND_EXPR. */
4147 static void
4148 declare_inline_vars (tree block, tree vars)
4150 tree t;
4151 for (t = vars; t; t = TREE_CHAIN (t))
4153 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4154 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4155 cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
4158 if (block)
4159 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4162 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4163 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4164 VAR_DECL translation. */
4166 static tree
4167 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4169 /* Don't generate debug information for the copy if we wouldn't have
4170 generated it for the copy either. */
4171 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4172 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4174 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4175 declaration inspired this copy. */
4176 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4178 /* The new variable/label has no RTL, yet. */
4179 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4180 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4181 SET_DECL_RTL (copy, NULL_RTX);
4183 /* These args would always appear unused, if not for this. */
4184 TREE_USED (copy) = 1;
4186 /* Set the context for the new declaration. */
4187 if (!DECL_CONTEXT (decl))
4188 /* Globals stay global. */
4190 else if (DECL_CONTEXT (decl) != id->src_fn)
4191 /* Things that weren't in the scope of the function we're inlining
4192 from aren't in the scope we're inlining to, either. */
4194 else if (TREE_STATIC (decl))
4195 /* Function-scoped static variables should stay in the original
4196 function. */
4198 else
4199 /* Ordinary automatic local variables are now in the scope of the
4200 new function. */
4201 DECL_CONTEXT (copy) = id->dst_fn;
4203 return copy;
4206 static tree
4207 copy_decl_to_var (tree decl, copy_body_data *id)
4209 tree copy, type;
4211 gcc_assert (TREE_CODE (decl) == PARM_DECL
4212 || TREE_CODE (decl) == RESULT_DECL);
4214 type = TREE_TYPE (decl);
4216 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4217 VAR_DECL, DECL_NAME (decl), type);
4218 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4219 TREE_READONLY (copy) = TREE_READONLY (decl);
4220 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4221 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4223 return copy_decl_for_dup_finish (id, decl, copy);
4226 /* Like copy_decl_to_var, but create a return slot object instead of a
4227 pointer variable for return by invisible reference. */
4229 static tree
4230 copy_result_decl_to_var (tree decl, copy_body_data *id)
4232 tree copy, type;
4234 gcc_assert (TREE_CODE (decl) == PARM_DECL
4235 || TREE_CODE (decl) == RESULT_DECL);
4237 type = TREE_TYPE (decl);
4238 if (DECL_BY_REFERENCE (decl))
4239 type = TREE_TYPE (type);
4241 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4242 VAR_DECL, DECL_NAME (decl), type);
4243 TREE_READONLY (copy) = TREE_READONLY (decl);
4244 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4245 if (!DECL_BY_REFERENCE (decl))
4247 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4248 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4251 return copy_decl_for_dup_finish (id, decl, copy);
4254 tree
4255 copy_decl_no_change (tree decl, copy_body_data *id)
4257 tree copy;
4259 copy = copy_node (decl);
4261 /* The COPY is not abstract; it will be generated in DST_FN. */
4262 DECL_ABSTRACT (copy) = 0;
4263 lang_hooks.dup_lang_specific_decl (copy);
4265 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4266 been taken; it's for internal bookkeeping in expand_goto_internal. */
4267 if (TREE_CODE (copy) == LABEL_DECL)
4269 TREE_ADDRESSABLE (copy) = 0;
4270 LABEL_DECL_UID (copy) = -1;
4273 return copy_decl_for_dup_finish (id, decl, copy);
4276 static tree
4277 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4279 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4280 return copy_decl_to_var (decl, id);
4281 else
4282 return copy_decl_no_change (decl, id);
4285 /* Return a copy of the function's argument tree. */
4286 static tree
4287 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4288 bitmap args_to_skip, tree *vars)
4290 tree arg, *parg;
4291 tree new_parm = NULL;
4292 int i = 0;
4294 parg = &new_parm;
4296 for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4297 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4299 tree new_tree = remap_decl (arg, id);
4300 lang_hooks.dup_lang_specific_decl (new_tree);
4301 *parg = new_tree;
4302 parg = &TREE_CHAIN (new_tree);
4304 else if (!pointer_map_contains (id->decl_map, arg))
4306 /* Make an equivalent VAR_DECL. If the argument was used
4307 as temporary variable later in function, the uses will be
4308 replaced by local variable. */
4309 tree var = copy_decl_to_var (arg, id);
4310 get_var_ann (var);
4311 add_referenced_var (var);
4312 insert_decl_map (id, arg, var);
4313 /* Declare this new variable. */
4314 TREE_CHAIN (var) = *vars;
4315 *vars = var;
4317 return new_parm;
4320 /* Return a copy of the function's static chain. */
4321 static tree
4322 copy_static_chain (tree static_chain, copy_body_data * id)
4324 tree *chain_copy, *pvar;
4326 chain_copy = &static_chain;
4327 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4329 tree new_tree = remap_decl (*pvar, id);
4330 lang_hooks.dup_lang_specific_decl (new_tree);
4331 TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4332 *pvar = new_tree;
4334 return static_chain;
4337 /* Return true if the function is allowed to be versioned.
4338 This is a guard for the versioning functionality. */
4340 bool
4341 tree_versionable_function_p (tree fndecl)
4343 return copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL;
4346 /* Delete all unreachable basic blocks and update callgraph.
4347 Doing so is somewhat nontrivial because we need to update all clones and
4348 remove inline function that become unreachable. */
4350 static bool
4351 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4353 bool changed = false;
4354 basic_block b, next_bb;
4356 find_unreachable_blocks ();
4358 /* Delete all unreachable basic blocks. */
4360 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4362 next_bb = b->next_bb;
4364 if (!(b->flags & BB_REACHABLE))
4366 gimple_stmt_iterator bsi;
4368 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4369 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4371 struct cgraph_edge *e;
4372 struct cgraph_node *node;
4374 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4376 if (!e->inline_failed)
4377 cgraph_remove_node_and_inline_clones (e->callee);
4378 else
4379 cgraph_remove_edge (e);
4381 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4382 && id->dst_node->clones)
4383 for (node = id->dst_node->clones; node != id->dst_node;)
4385 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4387 if (!e->inline_failed)
4388 cgraph_remove_node_and_inline_clones (e->callee);
4389 else
4390 cgraph_remove_edge (e);
4393 if (node->clones)
4394 node = node->clones;
4395 else if (node->next_sibling_clone)
4396 node = node->next_sibling_clone;
4397 else
4399 while (node != id->dst_node && !node->next_sibling_clone)
4400 node = node->clone_of;
4401 if (node != id->dst_node)
4402 node = node->next_sibling_clone;
4406 delete_basic_block (b);
4407 changed = true;
4411 if (changed)
4412 tidy_fallthru_edges ();
4413 #ifdef ENABLE_CHECKING0
4414 verify_cgraph_node (id->dst_node);
4415 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4416 && id->dst_node->clones)
4418 struct cgraph_node *node;
4419 for (node = id->dst_node->clones; node != id->dst_node;)
4421 verify_cgraph_node (node);
4423 if (node->clones)
4424 node = node->clones;
4425 else if (node->next_sibling_clone)
4426 node = node->next_sibling_clone;
4427 else
4429 while (node != id->dst_node && !node->next_sibling_clone)
4430 node = node->clone_of;
4431 if (node != id->dst_node)
4432 node = node->next_sibling_clone;
4436 #endif
4437 return changed;
4440 /* Create a copy of a function's tree.
4441 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4442 of the original function and the new copied function
4443 respectively. In case we want to replace a DECL
4444 tree with another tree while duplicating the function's
4445 body, TREE_MAP represents the mapping between these
4446 trees. If UPDATE_CLONES is set, the call_stmt fields
4447 of edges of clones of the function will be updated. */
4448 void
4449 tree_function_versioning (tree old_decl, tree new_decl,
4450 VEC(ipa_replace_map_p,gc)* tree_map,
4451 bool update_clones, bitmap args_to_skip)
4453 struct cgraph_node *old_version_node;
4454 struct cgraph_node *new_version_node;
4455 copy_body_data id;
4456 tree p;
4457 unsigned i;
4458 struct ipa_replace_map *replace_info;
4459 basic_block old_entry_block;
4460 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4462 tree t_step;
4463 tree old_current_function_decl = current_function_decl;
4464 tree vars = NULL_TREE;
4466 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4467 && TREE_CODE (new_decl) == FUNCTION_DECL);
4468 DECL_POSSIBLY_INLINED (old_decl) = 1;
4470 old_version_node = cgraph_node (old_decl);
4471 new_version_node = cgraph_node (new_decl);
4473 /* Output the inlining info for this abstract function, since it has been
4474 inlined. If we don't do this now, we can lose the information about the
4475 variables in the function when the blocks get blown away as soon as we
4476 remove the cgraph node. */
4477 (*debug_hooks->outlining_inline_function) (old_decl);
4479 DECL_ARTIFICIAL (new_decl) = 1;
4480 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
4482 /* Prepare the data structures for the tree copy. */
4483 memset (&id, 0, sizeof (id));
4485 /* Generate a new name for the new version. */
4486 id.statements_to_fold = pointer_set_create ();
4488 id.decl_map = pointer_map_create ();
4489 id.src_fn = old_decl;
4490 id.dst_fn = new_decl;
4491 id.src_node = old_version_node;
4492 id.dst_node = new_version_node;
4493 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
4495 id.copy_decl = copy_decl_no_change;
4496 id.transform_call_graph_edges
4497 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
4498 id.transform_new_cfg = true;
4499 id.transform_return_to_modify = false;
4500 id.transform_lang_insert_block = NULL;
4502 current_function_decl = new_decl;
4503 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
4504 (DECL_STRUCT_FUNCTION (old_decl));
4505 initialize_cfun (new_decl, old_decl,
4506 old_entry_block->count,
4507 old_entry_block->frequency);
4508 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
4510 /* Copy the function's static chain. */
4511 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
4512 if (p)
4513 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
4514 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
4515 &id);
4517 /* If there's a tree_map, prepare for substitution. */
4518 if (tree_map)
4519 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
4521 gimple init;
4522 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
4523 if (replace_info->replace_p)
4525 tree op = replace_info->new_tree;
4527 STRIP_NOPS (op);
4529 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
4530 op = TREE_OPERAND (op, 0);
4532 if (TREE_CODE (op) == ADDR_EXPR)
4534 op = TREE_OPERAND (op, 0);
4535 while (handled_component_p (op))
4536 op = TREE_OPERAND (op, 0);
4537 if (TREE_CODE (op) == VAR_DECL)
4538 add_referenced_var (op);
4540 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
4541 init = setup_one_parameter (&id, replace_info->old_tree,
4542 replace_info->new_tree, id.src_fn,
4543 NULL,
4544 &vars);
4545 if (init)
4546 VEC_safe_push (gimple, heap, init_stmts, init);
4549 /* Copy the function's arguments. */
4550 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
4551 DECL_ARGUMENTS (new_decl) =
4552 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
4553 args_to_skip, &vars);
4555 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
4557 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4558 number_blocks (id.dst_fn);
4560 declare_inline_vars (DECL_INITIAL (new_decl), vars);
4562 if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
4563 /* Add local vars. */
4564 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
4565 t_step; t_step = TREE_CHAIN (t_step))
4567 tree var = TREE_VALUE (t_step);
4568 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
4569 cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
4570 else if (!can_be_nonlocal (var, &id))
4571 cfun->local_decls =
4572 tree_cons (NULL_TREE, remap_decl (var, &id),
4573 cfun->local_decls);
4576 /* Copy the Function's body. */
4577 copy_body (&id, old_entry_block->count, old_entry_block->frequency,
4578 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
4580 if (DECL_RESULT (old_decl) != NULL_TREE)
4582 tree *res_decl = &DECL_RESULT (old_decl);
4583 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
4584 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
4587 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4588 number_blocks (new_decl);
4590 if (VEC_length (gimple, init_stmts))
4592 basic_block bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
4593 while (VEC_length (gimple, init_stmts))
4594 insert_init_stmt (bb, VEC_pop (gimple, init_stmts));
4597 /* Remap the nonlocal_goto_save_area, if any. */
4598 if (cfun->nonlocal_goto_save_area)
4600 struct walk_stmt_info wi;
4602 memset (&wi, 0, sizeof (wi));
4603 wi.info = &id;
4604 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
4607 /* Clean up. */
4608 pointer_map_destroy (id.decl_map);
4609 free_dominance_info (CDI_DOMINATORS);
4610 free_dominance_info (CDI_POST_DOMINATORS);
4612 fold_marked_statements (0, id.statements_to_fold);
4613 pointer_set_destroy (id.statements_to_fold);
4614 fold_cond_expr_cond ();
4615 delete_unreachable_blocks_update_callgraph (&id);
4616 update_ssa (TODO_update_ssa);
4617 free_dominance_info (CDI_DOMINATORS);
4618 free_dominance_info (CDI_POST_DOMINATORS);
4620 VEC_free (gimple, heap, init_stmts);
4621 pop_cfun ();
4622 current_function_decl = old_current_function_decl;
4623 gcc_assert (!current_function_decl
4624 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
4625 return;
4628 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
4629 the callee and return the inlined body on success. */
4631 tree
4632 maybe_inline_call_in_expr (tree exp)
4634 tree fn = get_callee_fndecl (exp);
4636 /* We can only try to inline "const" functions. */
4637 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
4639 struct pointer_map_t *decl_map = pointer_map_create ();
4640 call_expr_arg_iterator iter;
4641 copy_body_data id;
4642 tree param, arg, t;
4644 /* Remap the parameters. */
4645 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
4646 param;
4647 param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
4648 *pointer_map_insert (decl_map, param) = arg;
4650 memset (&id, 0, sizeof (id));
4651 id.src_fn = fn;
4652 id.dst_fn = current_function_decl;
4653 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
4654 id.decl_map = decl_map;
4656 id.copy_decl = copy_decl_no_change;
4657 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4658 id.transform_new_cfg = false;
4659 id.transform_return_to_modify = true;
4660 id.transform_lang_insert_block = false;
4662 /* Make sure not to unshare trees behind the front-end's back
4663 since front-end specific mechanisms may rely on sharing. */
4664 id.regimplify = false;
4665 id.do_not_unshare = true;
4667 /* We're not inside any EH region. */
4668 id.eh_region = -1;
4670 t = copy_tree_body (&id);
4671 pointer_map_destroy (decl_map);
4673 /* We can only return something suitable for use in a GENERIC
4674 expression tree. */
4675 if (TREE_CODE (t) == MODIFY_EXPR)
4676 return TREE_OPERAND (t, 1);
4679 return NULL_TREE;
4682 /* Duplicate a type, fields and all. */
4684 tree
4685 build_duplicate_type (tree type)
4687 struct copy_body_data id;
4689 memset (&id, 0, sizeof (id));
4690 id.src_fn = current_function_decl;
4691 id.dst_fn = current_function_decl;
4692 id.src_cfun = cfun;
4693 id.decl_map = pointer_map_create ();
4694 id.copy_decl = copy_decl_no_change;
4696 type = remap_type_1 (type, &id);
4698 pointer_map_destroy (id.decl_map);
4700 TYPE_CANONICAL (type) = type;
4702 return type;
4705 /* Return whether it is safe to inline a function because it used different
4706 target specific options or different optimization options. */
4707 bool
4708 tree_can_inline_p (tree caller, tree callee)
4710 #if 0
4711 /* This causes a regression in SPEC in that it prevents a cold function from
4712 inlining a hot function. Perhaps this should only apply to functions
4713 that the user declares hot/cold/optimize explicitly. */
4715 /* Don't inline a function with a higher optimization level than the
4716 caller, or with different space constraints (hot/cold functions). */
4717 tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
4718 tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
4720 if (caller_tree != callee_tree)
4722 struct cl_optimization *caller_opt
4723 = TREE_OPTIMIZATION ((caller_tree)
4724 ? caller_tree
4725 : optimization_default_node);
4727 struct cl_optimization *callee_opt
4728 = TREE_OPTIMIZATION ((callee_tree)
4729 ? callee_tree
4730 : optimization_default_node);
4732 if ((caller_opt->optimize > callee_opt->optimize)
4733 || (caller_opt->optimize_size != callee_opt->optimize_size))
4734 return false;
4736 #endif
4738 /* Allow the backend to decide if inlining is ok. */
4739 return targetm.target_option.can_inline_p (caller, callee);