Daily bump.
[official-gcc.git] / gcc / tree-inline.c
blob4b1c70919f5a084c7de68755ec61a61e814b7c13
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "varray.h"
36 #include "hashtab.h"
37 #include "langhooks.h"
38 #include "basic-block.h"
39 #include "tree-iterator.h"
40 #include "cgraph.h"
41 #include "intl.h"
42 #include "tree-mudflap.h"
43 #include "tree-flow.h"
44 #include "function.h"
45 #include "ggc.h"
46 #include "tree-flow.h"
47 #include "diagnostic.h"
48 #include "except.h"
49 #include "debug.h"
50 #include "pointer-set.h"
51 #include "ipa-prop.h"
53 /* I'm not real happy about this, but we need to handle gimple and
54 non-gimple trees. */
55 #include "tree-gimple.h"
57 /* Inlining, Cloning, Versioning, Parallelization
59 Inlining: a function body is duplicated, but the PARM_DECLs are
60 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
61 MODIFY_EXPRs that store to a dedicated returned-value variable.
62 The duplicated eh_region info of the copy will later be appended
63 to the info for the caller; the eh_region info in copied throwing
64 statements and RESX_EXPRs is adjusted accordingly.
66 Cloning: (only in C++) We have one body for a con/de/structor, and
67 multiple function decls, each with a unique parameter list.
68 Duplicate the body, using the given splay tree; some parameters
69 will become constants (like 0 or 1).
71 Versioning: a function body is duplicated and the result is a new
72 function rather than into blocks of an existing function as with
73 inlining. Some parameters will become constants.
75 Parallelization: a region of a function is duplicated resulting in
76 a new function. Variables may be replaced with complex expressions
77 to enable shared variable semantics.
79 All of these will simultaneously lookup any callgraph edges. If
80 we're going to inline the duplicated function body, and the given
81 function has some cloned callgraph nodes (one for each place this
82 function will be inlined) those callgraph edges will be duplicated.
83 If we're cloning the body, those callgraph edges will be
84 updated to point into the new body. (Note that the original
85 callgraph node and edge list will not be altered.)
87 See the CALL_EXPR handling case in copy_body_r (). */
89 /* 0 if we should not perform inlining.
90 1 if we should expand functions calls inline at the tree level.
91 2 if we should consider *all* functions to be inline
92 candidates. */
94 int flag_inline_trees = 0;
96 /* To Do:
98 o In order to make inlining-on-trees work, we pessimized
99 function-local static constants. In particular, they are now
100 always output, even when not addressed. Fix this by treating
101 function-local static constants just like global static
102 constants; the back-end already knows not to output them if they
103 are not needed.
105 o Provide heuristics to clamp inlining of recursive template
106 calls? */
108 /* Prototypes. */
110 static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
111 static tree copy_generic_body (copy_body_data *);
112 static bool inlinable_function_p (tree);
113 static void remap_block (tree *, copy_body_data *);
114 static tree remap_decls (tree, copy_body_data *);
115 static void copy_bind_expr (tree *, int *, copy_body_data *);
116 static tree mark_local_for_remap_r (tree *, int *, void *);
117 static void unsave_expr_1 (tree);
118 static tree unsave_r (tree *, int *, void *);
119 static void declare_inline_vars (tree, tree);
120 static void remap_save_expr (tree *, void *, int *);
121 static void add_lexical_block (tree current_block, tree new_block);
122 static tree copy_decl_to_var (tree, copy_body_data *);
123 static tree copy_result_decl_to_var (tree, copy_body_data *);
124 static tree copy_decl_no_change (tree, copy_body_data *);
125 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
127 /* Insert a tree->tree mapping for ID. Despite the name suggests
128 that the trees should be variables, it is used for more than that. */
130 void
131 insert_decl_map (copy_body_data *id, tree key, tree value)
133 splay_tree_insert (id->decl_map, (splay_tree_key) key,
134 (splay_tree_value) value);
136 /* Always insert an identity map as well. If we see this same new
137 node again, we won't want to duplicate it a second time. */
138 if (key != value)
139 splay_tree_insert (id->decl_map, (splay_tree_key) value,
140 (splay_tree_value) value);
143 /* Remap DECL during the copying of the BLOCK tree for the function. */
145 tree
146 remap_decl (tree decl, copy_body_data *id)
148 splay_tree_node n;
149 tree fn;
151 /* We only remap local variables in the current function. */
152 fn = id->src_fn;
154 /* See if we have remapped this declaration. */
156 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
158 /* If we didn't already have an equivalent for this declaration,
159 create one now. */
160 if (!n)
162 /* Make a copy of the variable or label. */
163 tree t = id->copy_decl (decl, id);
165 /* Remember it, so that if we encounter this local entity again
166 we can reuse this copy. Do this early because remap_type may
167 need this decl for TYPE_STUB_DECL. */
168 insert_decl_map (id, decl, t);
170 if (!DECL_P (t))
171 return t;
173 /* Remap types, if necessary. */
174 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
175 if (TREE_CODE (t) == TYPE_DECL)
176 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
178 /* Remap sizes as necessary. */
179 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
180 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
182 /* If fields, do likewise for offset and qualifier. */
183 if (TREE_CODE (t) == FIELD_DECL)
185 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
186 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
187 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
190 return t;
193 return unshare_expr ((tree) n->value);
196 static tree
197 remap_type_1 (tree type, copy_body_data *id)
199 splay_tree_node node;
200 tree new, t;
202 if (type == NULL)
203 return type;
205 /* See if we have remapped this type. */
206 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
207 if (node)
208 return (tree) node->value;
210 /* The type only needs remapping if it's variably modified. */
211 if (! variably_modified_type_p (type, id->src_fn))
213 insert_decl_map (id, type, type);
214 return type;
217 /* We do need a copy. build and register it now. If this is a pointer or
218 reference type, remap the designated type and make a new pointer or
219 reference type. */
220 if (TREE_CODE (type) == POINTER_TYPE)
222 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
223 TYPE_MODE (type),
224 TYPE_REF_CAN_ALIAS_ALL (type));
225 insert_decl_map (id, type, new);
226 return new;
228 else if (TREE_CODE (type) == REFERENCE_TYPE)
230 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
231 TYPE_MODE (type),
232 TYPE_REF_CAN_ALIAS_ALL (type));
233 insert_decl_map (id, type, new);
234 return new;
236 else
237 new = copy_node (type);
239 insert_decl_map (id, type, new);
241 /* This is a new type, not a copy of an old type. Need to reassociate
242 variants. We can handle everything except the main variant lazily. */
243 t = TYPE_MAIN_VARIANT (type);
244 if (type != t)
246 t = remap_type (t, id);
247 TYPE_MAIN_VARIANT (new) = t;
248 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
249 TYPE_NEXT_VARIANT (t) = new;
251 else
253 TYPE_MAIN_VARIANT (new) = new;
254 TYPE_NEXT_VARIANT (new) = NULL;
257 if (TYPE_STUB_DECL (type))
258 TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
260 /* Lazily create pointer and reference types. */
261 TYPE_POINTER_TO (new) = NULL;
262 TYPE_REFERENCE_TO (new) = NULL;
264 switch (TREE_CODE (new))
266 case INTEGER_TYPE:
267 case REAL_TYPE:
268 case ENUMERAL_TYPE:
269 case BOOLEAN_TYPE:
270 t = TYPE_MIN_VALUE (new);
271 if (t && TREE_CODE (t) != INTEGER_CST)
272 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
274 t = TYPE_MAX_VALUE (new);
275 if (t && TREE_CODE (t) != INTEGER_CST)
276 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
277 return new;
279 case FUNCTION_TYPE:
280 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
281 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
282 return new;
284 case ARRAY_TYPE:
285 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
286 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
287 break;
289 case RECORD_TYPE:
290 case UNION_TYPE:
291 case QUAL_UNION_TYPE:
293 tree f, nf = NULL;
295 for (f = TYPE_FIELDS (new); f ; f = TREE_CHAIN (f))
297 t = remap_decl (f, id);
298 DECL_CONTEXT (t) = new;
299 TREE_CHAIN (t) = nf;
300 nf = t;
302 TYPE_FIELDS (new) = nreverse (nf);
304 break;
306 case OFFSET_TYPE:
307 default:
308 /* Shouldn't have been thought variable sized. */
309 gcc_unreachable ();
312 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
313 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
315 return new;
318 tree
319 remap_type (tree type, copy_body_data *id)
321 splay_tree_node node;
323 if (type == NULL)
324 return type;
326 /* See if we have remapped this type. */
327 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
328 if (node)
329 return (tree) node->value;
331 /* The type only needs remapping if it's variably modified. */
332 if (! variably_modified_type_p (type, id->src_fn))
334 insert_decl_map (id, type, type);
335 return type;
338 return remap_type_1 (type, id);
341 static tree
342 remap_decls (tree decls, copy_body_data *id)
344 tree old_var;
345 tree new_decls = NULL_TREE;
347 /* Remap its variables. */
348 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
350 tree new_var;
352 /* We can not chain the local static declarations into the unexpanded_var_list
353 as we can't duplicate them or break one decl rule. Go ahead and link
354 them into unexpanded_var_list. */
355 if (!lang_hooks.tree_inlining.auto_var_in_fn_p (old_var, id->src_fn)
356 && !DECL_EXTERNAL (old_var))
358 cfun->unexpanded_var_list = tree_cons (NULL_TREE, old_var,
359 cfun->unexpanded_var_list);
360 continue;
363 /* Remap the variable. */
364 new_var = remap_decl (old_var, id);
366 /* If we didn't remap this variable, so we can't mess with its
367 TREE_CHAIN. If we remapped this variable to the return slot, it's
368 already declared somewhere else, so don't declare it here. */
369 if (!new_var || new_var == id->retvar)
371 else
373 gcc_assert (DECL_P (new_var));
374 TREE_CHAIN (new_var) = new_decls;
375 new_decls = new_var;
379 return nreverse (new_decls);
382 /* Copy the BLOCK to contain remapped versions of the variables
383 therein. And hook the new block into the block-tree. */
385 static void
386 remap_block (tree *block, copy_body_data *id)
388 tree old_block;
389 tree new_block;
390 tree fn;
392 /* Make the new block. */
393 old_block = *block;
394 new_block = make_node (BLOCK);
395 TREE_USED (new_block) = TREE_USED (old_block);
396 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
397 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
398 *block = new_block;
400 /* Remap its variables. */
401 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
403 fn = id->dst_fn;
405 if (id->transform_lang_insert_block)
406 lang_hooks.decls.insert_block (new_block);
408 /* Remember the remapped block. */
409 insert_decl_map (id, old_block, new_block);
412 /* Copy the whole block tree and root it in id->block. */
413 static tree
414 remap_blocks (tree block, copy_body_data *id)
416 tree t;
417 tree new = block;
419 if (!block)
420 return NULL;
422 remap_block (&new, id);
423 gcc_assert (new != block);
424 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
425 add_lexical_block (new, remap_blocks (t, id));
426 return new;
429 static void
430 copy_statement_list (tree *tp)
432 tree_stmt_iterator oi, ni;
433 tree new;
435 new = alloc_stmt_list ();
436 ni = tsi_start (new);
437 oi = tsi_start (*tp);
438 *tp = new;
440 for (; !tsi_end_p (oi); tsi_next (&oi))
441 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
444 static void
445 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
447 tree block = BIND_EXPR_BLOCK (*tp);
448 /* Copy (and replace) the statement. */
449 copy_tree_r (tp, walk_subtrees, NULL);
450 if (block)
452 remap_block (&block, id);
453 BIND_EXPR_BLOCK (*tp) = block;
456 if (BIND_EXPR_VARS (*tp))
457 /* This will remap a lot of the same decls again, but this should be
458 harmless. */
459 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
462 /* Called from copy_body_id via walk_tree. DATA is really an
463 `copy_body_data *'. */
465 tree
466 copy_body_r (tree *tp, int *walk_subtrees, void *data)
468 copy_body_data *id = (copy_body_data *) data;
469 tree fn = id->src_fn;
470 tree new_block;
472 /* Begin by recognizing trees that we'll completely rewrite for the
473 inlining context. Our output for these trees is completely
474 different from out input (e.g. RETURN_EXPR is deleted, and morphs
475 into an edge). Further down, we'll handle trees that get
476 duplicated and/or tweaked. */
478 /* When requested, RETURN_EXPRs should be transformed to just the
479 contained MODIFY_EXPR. The branch semantics of the return will
480 be handled elsewhere by manipulating the CFG rather than a statement. */
481 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
483 tree assignment = TREE_OPERAND (*tp, 0);
485 /* If we're returning something, just turn that into an
486 assignment into the equivalent of the original RESULT_DECL.
487 If the "assignment" is just the result decl, the result
488 decl has already been set (e.g. a recent "foo (&result_decl,
489 ...)"); just toss the entire RETURN_EXPR. */
490 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
492 /* Replace the RETURN_EXPR with (a copy of) the
493 MODIFY_EXPR hanging underneath. */
494 *tp = copy_node (assignment);
496 else /* Else the RETURN_EXPR returns no value. */
498 *tp = NULL;
499 return (tree) (void *)1;
503 /* Local variables and labels need to be replaced by equivalent
504 variables. We don't want to copy static variables; there's only
505 one of those, no matter how many times we inline the containing
506 function. Similarly for globals from an outer function. */
507 else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
509 tree new_decl;
511 /* Remap the declaration. */
512 new_decl = remap_decl (*tp, id);
513 gcc_assert (new_decl);
514 /* Replace this variable with the copy. */
515 STRIP_TYPE_NOPS (new_decl);
516 *tp = new_decl;
517 *walk_subtrees = 0;
519 else if (TREE_CODE (*tp) == STATEMENT_LIST)
520 copy_statement_list (tp);
521 else if (TREE_CODE (*tp) == SAVE_EXPR)
522 remap_save_expr (tp, id->decl_map, walk_subtrees);
523 else if (TREE_CODE (*tp) == LABEL_DECL
524 && (! DECL_CONTEXT (*tp)
525 || decl_function_context (*tp) == id->src_fn))
526 /* These may need to be remapped for EH handling. */
527 *tp = remap_decl (*tp, id);
528 else if (TREE_CODE (*tp) == BIND_EXPR)
529 copy_bind_expr (tp, walk_subtrees, id);
530 /* Types may need remapping as well. */
531 else if (TYPE_P (*tp))
532 *tp = remap_type (*tp, id);
534 /* If this is a constant, we have to copy the node iff the type will be
535 remapped. copy_tree_r will not copy a constant. */
536 else if (CONSTANT_CLASS_P (*tp))
538 tree new_type = remap_type (TREE_TYPE (*tp), id);
540 if (new_type == TREE_TYPE (*tp))
541 *walk_subtrees = 0;
543 else if (TREE_CODE (*tp) == INTEGER_CST)
544 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
545 TREE_INT_CST_HIGH (*tp));
546 else
548 *tp = copy_node (*tp);
549 TREE_TYPE (*tp) = new_type;
553 /* Otherwise, just copy the node. Note that copy_tree_r already
554 knows not to copy VAR_DECLs, etc., so this is safe. */
555 else
557 /* Here we handle trees that are not completely rewritten.
558 First we detect some inlining-induced bogosities for
559 discarding. */
560 if (TREE_CODE (*tp) == MODIFY_EXPR
561 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
562 && (lang_hooks.tree_inlining.auto_var_in_fn_p
563 (TREE_OPERAND (*tp, 0), fn)))
565 /* Some assignments VAR = VAR; don't generate any rtl code
566 and thus don't count as variable modification. Avoid
567 keeping bogosities like 0 = 0. */
568 tree decl = TREE_OPERAND (*tp, 0), value;
569 splay_tree_node n;
571 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
572 if (n)
574 value = (tree) n->value;
575 STRIP_TYPE_NOPS (value);
576 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
578 *tp = build_empty_stmt ();
579 return copy_body_r (tp, walk_subtrees, data);
583 else if (TREE_CODE (*tp) == INDIRECT_REF)
585 /* Get rid of *& from inline substitutions that can happen when a
586 pointer argument is an ADDR_EXPR. */
587 tree decl = TREE_OPERAND (*tp, 0);
588 splay_tree_node n;
590 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
591 if (n)
593 tree new;
594 tree old;
595 /* If we happen to get an ADDR_EXPR in n->value, strip
596 it manually here as we'll eventually get ADDR_EXPRs
597 which lie about their types pointed to. In this case
598 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
599 but we absolutely rely on that. As fold_indirect_ref
600 does other useful transformations, try that first, though. */
601 tree type = TREE_TYPE (TREE_TYPE ((tree)n->value));
602 new = unshare_expr ((tree)n->value);
603 old = *tp;
604 *tp = fold_indirect_ref_1 (type, new);
605 if (! *tp)
607 if (TREE_CODE (new) == ADDR_EXPR)
608 *tp = TREE_OPERAND (new, 0);
609 else
611 *tp = build1 (INDIRECT_REF, type, new);
612 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
615 *walk_subtrees = 0;
616 return NULL;
620 /* Here is the "usual case". Copy this tree node, and then
621 tweak some special cases. */
622 copy_tree_r (tp, walk_subtrees, NULL);
624 /* If EXPR has block defined, map it to newly constructed block.
625 When inlining we want EXPRs without block appear in the block
626 of function call. */
627 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (*tp))))
629 new_block = id->block;
630 if (TREE_BLOCK (*tp))
632 splay_tree_node n;
633 n = splay_tree_lookup (id->decl_map,
634 (splay_tree_key) TREE_BLOCK (*tp));
635 gcc_assert (n);
636 new_block = (tree) n->value;
638 TREE_BLOCK (*tp) = new_block;
641 if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
642 TREE_OPERAND (*tp, 0) =
643 build_int_cst
644 (NULL_TREE,
645 id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
647 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
649 /* The copied TARGET_EXPR has never been expanded, even if the
650 original node was expanded already. */
651 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
653 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
654 TREE_OPERAND (*tp, 3) = NULL_TREE;
657 /* Variable substitution need not be simple. In particular, the
658 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
659 and friends are up-to-date. */
660 else if (TREE_CODE (*tp) == ADDR_EXPR)
662 walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
663 /* Handle the case where we substituted an INDIRECT_REF
664 into the operand of the ADDR_EXPR. */
665 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
666 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
667 else
668 recompute_tree_invariant_for_addr_expr (*tp);
669 *walk_subtrees = 0;
673 /* Keep iterating. */
674 return NULL_TREE;
677 /* Copy basic block, scale profile accordingly. Edges will be taken care of
678 later */
680 static basic_block
681 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, int count_scale)
683 block_stmt_iterator bsi, copy_bsi;
684 basic_block copy_basic_block;
686 /* create_basic_block() will append every new block to
687 basic_block_info automatically. */
688 copy_basic_block = create_basic_block (NULL, (void *) 0,
689 (basic_block) bb->prev_bb->aux);
690 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
691 copy_basic_block->frequency = (bb->frequency
692 * frequency_scale / REG_BR_PROB_BASE);
693 copy_bsi = bsi_start (copy_basic_block);
695 for (bsi = bsi_start (bb);
696 !bsi_end_p (bsi); bsi_next (&bsi))
698 tree stmt = bsi_stmt (bsi);
699 tree orig_stmt = stmt;
701 walk_tree (&stmt, copy_body_r, id, NULL);
703 /* RETURN_EXPR might be removed,
704 this is signalled by making stmt pointer NULL. */
705 if (stmt)
707 tree call, decl;
709 /* With return slot optimization we can end up with
710 non-gimple (foo *)&this->m, fix that here. */
711 if (TREE_CODE (stmt) == MODIFY_EXPR
712 && TREE_CODE (TREE_OPERAND (stmt, 1)) == NOP_EXPR
713 && !is_gimple_val (TREE_OPERAND (TREE_OPERAND (stmt, 1), 0)))
714 gimplify_stmt (&stmt);
716 bsi_insert_after (&copy_bsi, stmt, BSI_NEW_STMT);
717 call = get_call_expr_in (stmt);
718 /* We're duplicating a CALL_EXPR. Find any corresponding
719 callgraph edges and update or duplicate them. */
720 if (call && (decl = get_callee_fndecl (call)))
722 struct cgraph_node *node;
723 struct cgraph_edge *edge;
725 switch (id->transform_call_graph_edges)
727 case CB_CGE_DUPLICATE:
728 edge = cgraph_edge (id->src_node, orig_stmt);
729 if (edge)
730 cgraph_clone_edge (edge, id->dst_node, stmt,
731 REG_BR_PROB_BASE, 1, true);
732 break;
734 case CB_CGE_MOVE_CLONES:
735 for (node = id->dst_node->next_clone;
736 node;
737 node = node->next_clone)
739 edge = cgraph_edge (node, orig_stmt);
740 gcc_assert (edge);
741 cgraph_set_call_stmt (edge, stmt);
743 /* FALLTHRU */
745 case CB_CGE_MOVE:
746 edge = cgraph_edge (id->dst_node, orig_stmt);
747 if (edge)
748 cgraph_set_call_stmt (edge, stmt);
749 break;
751 default:
752 gcc_unreachable ();
755 /* If you think we can abort here, you are wrong.
756 There is no region 0 in tree land. */
757 gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt)
758 != 0);
760 if (tree_could_throw_p (stmt))
762 int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
763 /* Add an entry for the copied tree in the EH hashtable.
764 When cloning or versioning, use the hashtable in
765 cfun, and just copy the EH number. When inlining, use the
766 hashtable in the caller, and adjust the region number. */
767 if (region > 0)
768 add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
770 /* If this tree doesn't have a region associated with it,
771 and there is a "current region,"
772 then associate this tree with the current region
773 and add edges associated with this region. */
774 if ((lookup_stmt_eh_region_fn (id->src_cfun,
775 orig_stmt) <= 0
776 && id->eh_region > 0)
777 && tree_could_throw_p (stmt))
778 add_stmt_to_eh_region (stmt, id->eh_region);
782 return copy_basic_block;
785 /* Copy edges from BB into its copy constructed earlier, scale profile
786 accordingly. Edges will be taken care of later. Assume aux
787 pointers to point to the copies of each BB. */
788 static void
789 copy_edges_for_bb (basic_block bb, int count_scale)
791 basic_block new_bb = (basic_block) bb->aux;
792 edge_iterator ei;
793 edge old_edge;
794 block_stmt_iterator bsi;
795 int flags;
797 /* Use the indices from the original blocks to create edges for the
798 new ones. */
799 FOR_EACH_EDGE (old_edge, ei, bb->succs)
800 if (!(old_edge->flags & EDGE_EH))
802 edge new;
804 flags = old_edge->flags;
806 /* Return edges do get a FALLTHRU flag when the get inlined. */
807 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
808 && old_edge->dest->aux != EXIT_BLOCK_PTR)
809 flags |= EDGE_FALLTHRU;
810 new = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
811 new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
812 new->probability = old_edge->probability;
815 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
816 return;
818 for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
820 tree copy_stmt;
822 copy_stmt = bsi_stmt (bsi);
823 update_stmt (copy_stmt);
824 /* Do this before the possible split_block. */
825 bsi_next (&bsi);
827 /* If this tree could throw an exception, there are two
828 cases where we need to add abnormal edge(s): the
829 tree wasn't in a region and there is a "current
830 region" in the caller; or the original tree had
831 EH edges. In both cases split the block after the tree,
832 and add abnormal edge(s) as needed; we need both
833 those from the callee and the caller.
834 We check whether the copy can throw, because the const
835 propagation can change an INDIRECT_REF which throws
836 into a COMPONENT_REF which doesn't. If the copy
837 can throw, the original could also throw. */
839 if (tree_can_throw_internal (copy_stmt))
841 if (!bsi_end_p (bsi))
842 /* Note that bb's predecessor edges aren't necessarily
843 right at this point; split_block doesn't care. */
845 edge e = split_block (new_bb, copy_stmt);
846 new_bb = e->dest;
847 bsi = bsi_start (new_bb);
850 make_eh_edges (copy_stmt);
855 /* Wrapper for remap_decl so it can be used as a callback. */
856 static tree
857 remap_decl_1 (tree decl, void *data)
859 return remap_decl (decl, (copy_body_data *) data);
862 /* Make a copy of the body of FN so that it can be inserted inline in
863 another function. Walks FN via CFG, returns new fndecl. */
865 static tree
866 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
867 basic_block entry_block_map, basic_block exit_block_map)
869 tree callee_fndecl = id->src_fn;
870 /* Original cfun for the callee, doesn't change. */
871 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
872 /* Copy, built by this function. */
873 struct function *new_cfun;
874 /* Place to copy from; when a copy of the function was saved off earlier,
875 use that instead of the main copy. */
876 struct function *cfun_to_copy =
877 (struct function *) ggc_alloc_cleared (sizeof (struct function));
878 basic_block bb;
879 tree new_fndecl = NULL;
880 int count_scale, frequency_scale;
882 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
883 count_scale = (REG_BR_PROB_BASE * count
884 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
885 else
886 count_scale = 1;
888 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
889 frequency_scale = (REG_BR_PROB_BASE * frequency
891 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
892 else
893 frequency_scale = count_scale;
895 /* Register specific tree functions. */
896 tree_register_cfg_hooks ();
898 /* Must have a CFG here at this point. */
899 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
900 (DECL_STRUCT_FUNCTION (callee_fndecl)));
902 *cfun_to_copy = *DECL_STRUCT_FUNCTION (callee_fndecl);
904 id->src_cfun = cfun_to_copy;
906 /* If requested, create new basic_block_info and label_to_block_maps.
907 Otherwise, insert our new blocks and labels into the existing cfg. */
908 if (id->transform_new_cfg)
910 new_cfun =
911 (struct function *) ggc_alloc_cleared (sizeof (struct function));
912 *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
913 new_cfun->cfg = NULL;
914 new_cfun->decl = new_fndecl = copy_node (callee_fndecl);
915 new_cfun->ib_boundaries_block = NULL;
916 DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
917 push_cfun (new_cfun);
918 init_empty_tree_cfg ();
920 ENTRY_BLOCK_PTR->count =
921 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
922 REG_BR_PROB_BASE);
923 ENTRY_BLOCK_PTR->frequency =
924 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
925 frequency_scale / REG_BR_PROB_BASE);
926 EXIT_BLOCK_PTR->count =
927 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
928 REG_BR_PROB_BASE);
929 EXIT_BLOCK_PTR->frequency =
930 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
931 frequency_scale / REG_BR_PROB_BASE);
933 entry_block_map = ENTRY_BLOCK_PTR;
934 exit_block_map = EXIT_BLOCK_PTR;
937 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
938 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
940 /* Duplicate any exception-handling regions. */
941 if (cfun->eh)
943 if (id->transform_new_cfg)
944 init_eh_for_function ();
945 id->eh_region_offset
946 = duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
947 0, id->eh_region);
949 /* Use aux pointers to map the original blocks to copy. */
950 FOR_EACH_BB_FN (bb, cfun_to_copy)
951 bb->aux = copy_bb (id, bb, frequency_scale, count_scale);
952 /* Now that we've duplicated the blocks, duplicate their edges. */
953 FOR_ALL_BB_FN (bb, cfun_to_copy)
954 copy_edges_for_bb (bb, count_scale);
955 FOR_ALL_BB_FN (bb, cfun_to_copy)
956 bb->aux = NULL;
958 if (id->transform_new_cfg)
959 pop_cfun ();
961 return new_fndecl;
964 /* Make a copy of the body of FN so that it can be inserted inline in
965 another function. */
967 static tree
968 copy_generic_body (copy_body_data *id)
970 tree body;
971 tree fndecl = id->src_fn;
973 body = DECL_SAVED_TREE (fndecl);
974 walk_tree (&body, copy_body_r, id, NULL);
976 return body;
979 static tree
980 copy_body (copy_body_data *id, gcov_type count, int frequency,
981 basic_block entry_block_map, basic_block exit_block_map)
983 tree fndecl = id->src_fn;
984 tree body;
986 /* If this body has a CFG, walk CFG and copy. */
987 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
988 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
990 return body;
993 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
994 defined in function FN, or of a data member thereof. */
996 static bool
997 self_inlining_addr_expr (tree value, tree fn)
999 tree var;
1001 if (TREE_CODE (value) != ADDR_EXPR)
1002 return false;
1004 var = get_base_address (TREE_OPERAND (value, 0));
1006 return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
1009 static void
1010 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
1011 basic_block bb, tree *vars)
1013 tree init_stmt;
1014 tree var;
1015 tree var_sub;
1017 /* If the parameter is never assigned to, we may not need to
1018 create a new variable here at all. Instead, we may be able
1019 to just use the argument value. */
1020 if (TREE_READONLY (p)
1021 && !TREE_ADDRESSABLE (p)
1022 && value && !TREE_SIDE_EFFECTS (value))
1024 /* We may produce non-gimple trees by adding NOPs or introduce
1025 invalid sharing when operand is not really constant.
1026 It is not big deal to prohibit constant propagation here as
1027 we will constant propagate in DOM1 pass anyway. */
1028 if (is_gimple_min_invariant (value)
1029 && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
1030 /* We have to be very careful about ADDR_EXPR. Make sure
1031 the base variable isn't a local variable of the inlined
1032 function, e.g., when doing recursive inlining, direct or
1033 mutually-recursive or whatever, which is why we don't
1034 just test whether fn == current_function_decl. */
1035 && ! self_inlining_addr_expr (value, fn))
1037 insert_decl_map (id, p, value);
1038 return;
1042 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
1043 here since the type of this decl must be visible to the calling
1044 function. */
1045 var = copy_decl_to_var (p, id);
1047 /* See if the frontend wants to pass this by invisible reference. If
1048 so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
1049 replace uses of the PARM_DECL with dereferences. */
1050 if (TREE_TYPE (var) != TREE_TYPE (p)
1051 && POINTER_TYPE_P (TREE_TYPE (var))
1052 && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
1054 insert_decl_map (id, var, var);
1055 var_sub = build_fold_indirect_ref (var);
1057 else
1058 var_sub = var;
1060 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
1061 that way, when the PARM_DECL is encountered, it will be
1062 automatically replaced by the VAR_DECL. */
1063 insert_decl_map (id, p, var_sub);
1065 /* Declare this new variable. */
1066 TREE_CHAIN (var) = *vars;
1067 *vars = var;
1069 /* Make gimplifier happy about this variable. */
1070 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1072 /* Even if P was TREE_READONLY, the new VAR should not be.
1073 In the original code, we would have constructed a
1074 temporary, and then the function body would have never
1075 changed the value of P. However, now, we will be
1076 constructing VAR directly. The constructor body may
1077 change its value multiple times as it is being
1078 constructed. Therefore, it must not be TREE_READONLY;
1079 the back-end assumes that TREE_READONLY variable is
1080 assigned to only once. */
1081 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
1082 TREE_READONLY (var) = 0;
1084 /* Initialize this VAR_DECL from the equivalent argument. Convert
1085 the argument to the proper type in case it was promoted. */
1086 if (value)
1088 tree rhs = fold_convert (TREE_TYPE (var), value);
1089 block_stmt_iterator bsi = bsi_last (bb);
1091 if (rhs == error_mark_node)
1092 return;
1094 STRIP_USELESS_TYPE_CONVERSION (rhs);
1096 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
1097 keep our trees in gimple form. */
1098 init_stmt = build2 (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
1100 /* If we did not create a gimple value and we did not create a gimple
1101 cast of a gimple value, then we will need to gimplify INIT_STMTS
1102 at the end. Note that is_gimple_cast only checks the outer
1103 tree code, not its operand. Thus the explicit check that its
1104 operand is a gimple value. */
1105 if (!is_gimple_val (rhs)
1106 && (!is_gimple_cast (rhs)
1107 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
1108 gimplify_stmt (&init_stmt);
1110 /* If VAR represents a zero-sized variable, it's possible that the
1111 assignment statment may result in no gimple statements. */
1112 if (init_stmt)
1113 bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
1117 /* Generate code to initialize the parameters of the function at the
1118 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
1120 static void
1121 initialize_inlined_parameters (copy_body_data *id, tree args, tree static_chain,
1122 tree fn, basic_block bb)
1124 tree parms;
1125 tree a;
1126 tree p;
1127 tree vars = NULL_TREE;
1128 int argnum = 0;
1130 /* Figure out what the parameters are. */
1131 parms = DECL_ARGUMENTS (fn);
1133 /* Loop through the parameter declarations, replacing each with an
1134 equivalent VAR_DECL, appropriately initialized. */
1135 for (p = parms, a = args; p;
1136 a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
1138 tree value;
1140 ++argnum;
1142 /* Find the initializer. */
1143 value = lang_hooks.tree_inlining.convert_parm_for_inlining
1144 (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
1146 setup_one_parameter (id, p, value, fn, bb, &vars);
1149 /* Initialize the static chain. */
1150 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1151 gcc_assert (fn != current_function_decl);
1152 if (p)
1154 /* No static chain? Seems like a bug in tree-nested.c. */
1155 gcc_assert (static_chain);
1157 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
1160 declare_inline_vars (id->block, vars);
1163 /* Declare a return variable to replace the RESULT_DECL for the
1164 function we are calling. An appropriate DECL_STMT is returned.
1165 The USE_STMT is filled to contain a use of the declaration to
1166 indicate the return value of the function.
1168 RETURN_SLOT_ADDR, if non-null, was a fake parameter that
1169 took the address of the result. MODIFY_DEST, if non-null, was the LHS of
1170 the MODIFY_EXPR to which this call is the RHS.
1172 The return value is a (possibly null) value that is the result of the
1173 function as seen by the callee. *USE_P is a (possibly null) value that
1174 holds the result as seen by the caller. */
1176 static tree
1177 declare_return_variable (copy_body_data *id, tree return_slot_addr,
1178 tree modify_dest, tree *use_p)
1180 tree callee = id->src_fn;
1181 tree caller = id->dst_fn;
1182 tree result = DECL_RESULT (callee);
1183 tree callee_type = TREE_TYPE (result);
1184 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
1185 tree var, use;
1187 /* We don't need to do anything for functions that don't return
1188 anything. */
1189 if (!result || VOID_TYPE_P (callee_type))
1191 *use_p = NULL_TREE;
1192 return NULL_TREE;
1195 /* If there was a return slot, then the return value is the
1196 dereferenced address of that object. */
1197 if (return_slot_addr)
1199 /* The front end shouldn't have used both return_slot_addr and
1200 a modify expression. */
1201 gcc_assert (!modify_dest);
1202 if (DECL_BY_REFERENCE (result))
1203 var = return_slot_addr;
1204 else
1205 var = build_fold_indirect_ref (return_slot_addr);
1206 if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1207 && !DECL_COMPLEX_GIMPLE_REG_P (result)
1208 && DECL_P (var))
1209 DECL_COMPLEX_GIMPLE_REG_P (var) = 0;
1210 use = NULL;
1211 goto done;
1214 /* All types requiring non-trivial constructors should have been handled. */
1215 gcc_assert (!TREE_ADDRESSABLE (callee_type));
1217 /* Attempt to avoid creating a new temporary variable. */
1218 if (modify_dest)
1220 bool use_it = false;
1222 /* We can't use MODIFY_DEST if there's type promotion involved. */
1223 if (!lang_hooks.types_compatible_p (caller_type, callee_type))
1224 use_it = false;
1226 /* ??? If we're assigning to a variable sized type, then we must
1227 reuse the destination variable, because we've no good way to
1228 create variable sized temporaries at this point. */
1229 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
1230 use_it = true;
1232 /* If the callee cannot possibly modify MODIFY_DEST, then we can
1233 reuse it as the result of the call directly. Don't do this if
1234 it would promote MODIFY_DEST to addressable. */
1235 else if (TREE_ADDRESSABLE (result))
1236 use_it = false;
1237 else
1239 tree base_m = get_base_address (modify_dest);
1241 /* If the base isn't a decl, then it's a pointer, and we don't
1242 know where that's going to go. */
1243 if (!DECL_P (base_m))
1244 use_it = false;
1245 else if (is_global_var (base_m))
1246 use_it = false;
1247 else if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1248 && !DECL_COMPLEX_GIMPLE_REG_P (result)
1249 && DECL_COMPLEX_GIMPLE_REG_P (base_m))
1250 use_it = false;
1251 else if (!TREE_ADDRESSABLE (base_m))
1252 use_it = true;
1255 if (use_it)
1257 var = modify_dest;
1258 use = NULL;
1259 goto done;
1263 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
1265 var = copy_result_decl_to_var (result, id);
1267 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1268 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
1269 = tree_cons (NULL_TREE, var,
1270 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
1272 /* Do not have the rest of GCC warn about this variable as it should
1273 not be visible to the user. */
1274 TREE_NO_WARNING (var) = 1;
1276 declare_inline_vars (id->block, var);
1278 /* Build the use expr. If the return type of the function was
1279 promoted, convert it back to the expected type. */
1280 use = var;
1281 if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
1282 use = fold_convert (caller_type, var);
1284 STRIP_USELESS_TYPE_CONVERSION (use);
1286 if (DECL_BY_REFERENCE (result))
1287 var = build_fold_addr_expr (var);
1289 done:
1290 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
1291 way, when the RESULT_DECL is encountered, it will be
1292 automatically replaced by the VAR_DECL. */
1293 insert_decl_map (id, result, var);
1295 /* Remember this so we can ignore it in remap_decls. */
1296 id->retvar = var;
1298 *use_p = use;
1299 return var;
1302 /* Returns nonzero if a function can be inlined as a tree. */
1304 bool
1305 tree_inlinable_function_p (tree fn)
1307 return inlinable_function_p (fn);
1310 static const char *inline_forbidden_reason;
1312 static tree
1313 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
1314 void *fnp)
1316 tree node = *nodep;
1317 tree fn = (tree) fnp;
1318 tree t;
1320 switch (TREE_CODE (node))
1322 case CALL_EXPR:
1323 /* Refuse to inline alloca call unless user explicitly forced so as
1324 this may change program's memory overhead drastically when the
1325 function using alloca is called in loop. In GCC present in
1326 SPEC2000 inlining into schedule_block cause it to require 2GB of
1327 RAM instead of 256MB. */
1328 if (alloca_call_p (node)
1329 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1331 inline_forbidden_reason
1332 = G_("function %q+F can never be inlined because it uses "
1333 "alloca (override using the always_inline attribute)");
1334 return node;
1336 t = get_callee_fndecl (node);
1337 if (! t)
1338 break;
1340 /* We cannot inline functions that call setjmp. */
1341 if (setjmp_call_p (t))
1343 inline_forbidden_reason
1344 = G_("function %q+F can never be inlined because it uses setjmp");
1345 return node;
1348 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
1349 switch (DECL_FUNCTION_CODE (t))
1351 /* We cannot inline functions that take a variable number of
1352 arguments. */
1353 case BUILT_IN_VA_START:
1354 case BUILT_IN_STDARG_START:
1355 case BUILT_IN_NEXT_ARG:
1356 case BUILT_IN_VA_END:
1357 inline_forbidden_reason
1358 = G_("function %q+F can never be inlined because it "
1359 "uses variable argument lists");
1360 return node;
1362 case BUILT_IN_LONGJMP:
1363 /* We can't inline functions that call __builtin_longjmp at
1364 all. The non-local goto machinery really requires the
1365 destination be in a different function. If we allow the
1366 function calling __builtin_longjmp to be inlined into the
1367 function calling __builtin_setjmp, Things will Go Awry. */
1368 inline_forbidden_reason
1369 = G_("function %q+F can never be inlined because "
1370 "it uses setjmp-longjmp exception handling");
1371 return node;
1373 case BUILT_IN_NONLOCAL_GOTO:
1374 /* Similarly. */
1375 inline_forbidden_reason
1376 = G_("function %q+F can never be inlined because "
1377 "it uses non-local goto");
1378 return node;
1380 case BUILT_IN_RETURN:
1381 case BUILT_IN_APPLY_ARGS:
1382 /* If a __builtin_apply_args caller would be inlined,
1383 it would be saving arguments of the function it has
1384 been inlined into. Similarly __builtin_return would
1385 return from the function the inline has been inlined into. */
1386 inline_forbidden_reason
1387 = G_("function %q+F can never be inlined because "
1388 "it uses __builtin_return or __builtin_apply_args");
1389 return node;
1391 default:
1392 break;
1394 break;
1396 case GOTO_EXPR:
1397 t = TREE_OPERAND (node, 0);
1399 /* We will not inline a function which uses computed goto. The
1400 addresses of its local labels, which may be tucked into
1401 global storage, are of course not constant across
1402 instantiations, which causes unexpected behavior. */
1403 if (TREE_CODE (t) != LABEL_DECL)
1405 inline_forbidden_reason
1406 = G_("function %q+F can never be inlined "
1407 "because it contains a computed goto");
1408 return node;
1410 break;
1412 case LABEL_EXPR:
1413 t = TREE_OPERAND (node, 0);
1414 if (DECL_NONLOCAL (t))
1416 /* We cannot inline a function that receives a non-local goto
1417 because we cannot remap the destination label used in the
1418 function that is performing the non-local goto. */
1419 inline_forbidden_reason
1420 = G_("function %q+F can never be inlined "
1421 "because it receives a non-local goto");
1422 return node;
1424 break;
1426 case RECORD_TYPE:
1427 case UNION_TYPE:
1428 /* We cannot inline a function of the form
1430 void F (int i) { struct S { int ar[i]; } s; }
1432 Attempting to do so produces a catch-22.
1433 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1434 UNION_TYPE nodes, then it goes into infinite recursion on a
1435 structure containing a pointer to its own type. If it doesn't,
1436 then the type node for S doesn't get adjusted properly when
1437 F is inlined.
1439 ??? This is likely no longer true, but it's too late in the 4.0
1440 cycle to try to find out. This should be checked for 4.1. */
1441 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1442 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1444 inline_forbidden_reason
1445 = G_("function %q+F can never be inlined "
1446 "because it uses variable sized variables");
1447 return node;
1450 default:
1451 break;
1454 return NULL_TREE;
1457 /* Return subexpression representing possible alloca call, if any. */
1458 static tree
1459 inline_forbidden_p (tree fndecl)
1461 location_t saved_loc = input_location;
1462 block_stmt_iterator bsi;
1463 basic_block bb;
1464 tree ret = NULL_TREE;
1466 FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (fndecl))
1467 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1469 ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
1470 inline_forbidden_p_1, fndecl);
1471 if (ret)
1472 goto egress;
1475 egress:
1476 input_location = saved_loc;
1477 return ret;
1480 /* Returns nonzero if FN is a function that does not have any
1481 fundamental inline blocking properties. */
1483 static bool
1484 inlinable_function_p (tree fn)
1486 bool inlinable = true;
1488 /* If we've already decided this function shouldn't be inlined,
1489 there's no need to check again. */
1490 if (DECL_UNINLINABLE (fn))
1491 return false;
1493 /* See if there is any language-specific reason it cannot be
1494 inlined. (It is important that this hook be called early because
1495 in C++ it may result in template instantiation.)
1496 If the function is not inlinable for language-specific reasons,
1497 it is left up to the langhook to explain why. */
1498 inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1500 /* If we don't have the function body available, we can't inline it.
1501 However, this should not be recorded since we also get here for
1502 forward declared inline functions. Therefore, return at once. */
1503 if (!DECL_SAVED_TREE (fn))
1504 return false;
1506 /* If we're not inlining at all, then we cannot inline this function. */
1507 else if (!flag_inline_trees)
1508 inlinable = false;
1510 /* Only try to inline functions if DECL_INLINE is set. This should be
1511 true for all functions declared `inline', and for all other functions
1512 as well with -finline-functions.
1514 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1515 it's the front-end that must set DECL_INLINE in this case, because
1516 dwarf2out loses if a function that does not have DECL_INLINE set is
1517 inlined anyway. That is why we have both DECL_INLINE and
1518 DECL_DECLARED_INLINE_P. */
1519 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1520 here should be redundant. */
1521 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1522 inlinable = false;
1524 else if (inline_forbidden_p (fn))
1526 /* See if we should warn about uninlinable functions. Previously,
1527 some of these warnings would be issued while trying to expand
1528 the function inline, but that would cause multiple warnings
1529 about functions that would for example call alloca. But since
1530 this a property of the function, just one warning is enough.
1531 As a bonus we can now give more details about the reason why a
1532 function is not inlinable.
1533 We only warn for functions declared `inline' by the user. */
1534 bool do_warning = (warn_inline
1535 && DECL_INLINE (fn)
1536 && DECL_DECLARED_INLINE_P (fn)
1537 && !DECL_IN_SYSTEM_HEADER (fn));
1539 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1540 sorry (inline_forbidden_reason, fn);
1541 else if (do_warning)
1542 warning (OPT_Winline, inline_forbidden_reason, fn);
1544 inlinable = false;
1547 /* Squirrel away the result so that we don't have to check again. */
1548 DECL_UNINLINABLE (fn) = !inlinable;
1550 return inlinable;
1553 /* Estimate the cost of a memory move. Use machine dependent
1554 word size and take possible memcpy call into account. */
1557 estimate_move_cost (tree type)
1559 HOST_WIDE_INT size;
1561 size = int_size_in_bytes (type);
1563 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1564 /* Cost of a memcpy call, 3 arguments and the call. */
1565 return 4;
1566 else
1567 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1570 /* Used by estimate_num_insns. Estimate number of instructions seen
1571 by given statement. */
1573 static tree
1574 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1576 int *count = (int *) data;
1577 tree x = *tp;
1579 if (IS_TYPE_OR_DECL_P (x))
1581 *walk_subtrees = 0;
1582 return NULL;
1584 /* Assume that constants and references counts nothing. These should
1585 be majorized by amount of operations among them we count later
1586 and are common target of CSE and similar optimizations. */
1587 else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
1588 return NULL;
1590 switch (TREE_CODE (x))
1592 /* Containers have no cost. */
1593 case TREE_LIST:
1594 case TREE_VEC:
1595 case BLOCK:
1596 case COMPONENT_REF:
1597 case BIT_FIELD_REF:
1598 case INDIRECT_REF:
1599 case ALIGN_INDIRECT_REF:
1600 case MISALIGNED_INDIRECT_REF:
1601 case ARRAY_REF:
1602 case ARRAY_RANGE_REF:
1603 case OBJ_TYPE_REF:
1604 case EXC_PTR_EXPR: /* ??? */
1605 case FILTER_EXPR: /* ??? */
1606 case COMPOUND_EXPR:
1607 case BIND_EXPR:
1608 case WITH_CLEANUP_EXPR:
1609 case NOP_EXPR:
1610 case VIEW_CONVERT_EXPR:
1611 case SAVE_EXPR:
1612 case ADDR_EXPR:
1613 case COMPLEX_EXPR:
1614 case RANGE_EXPR:
1615 case CASE_LABEL_EXPR:
1616 case SSA_NAME:
1617 case CATCH_EXPR:
1618 case EH_FILTER_EXPR:
1619 case STATEMENT_LIST:
1620 case ERROR_MARK:
1621 case NON_LVALUE_EXPR:
1622 case FDESC_EXPR:
1623 case VA_ARG_EXPR:
1624 case TRY_CATCH_EXPR:
1625 case TRY_FINALLY_EXPR:
1626 case LABEL_EXPR:
1627 case GOTO_EXPR:
1628 case RETURN_EXPR:
1629 case EXIT_EXPR:
1630 case LOOP_EXPR:
1631 case PHI_NODE:
1632 case WITH_SIZE_EXPR:
1633 case OMP_CLAUSE:
1634 case OMP_RETURN:
1635 case OMP_CONTINUE:
1636 break;
1638 /* We don't account constants for now. Assume that the cost is amortized
1639 by operations that do use them. We may re-consider this decision once
1640 we are able to optimize the tree before estimating its size and break
1641 out static initializers. */
1642 case IDENTIFIER_NODE:
1643 case INTEGER_CST:
1644 case REAL_CST:
1645 case COMPLEX_CST:
1646 case VECTOR_CST:
1647 case STRING_CST:
1648 *walk_subtrees = 0;
1649 return NULL;
1651 /* Try to estimate the cost of assignments. We have three cases to
1652 deal with:
1653 1) Simple assignments to registers;
1654 2) Stores to things that must live in memory. This includes
1655 "normal" stores to scalars, but also assignments of large
1656 structures, or constructors of big arrays;
1657 3) TARGET_EXPRs.
1659 Let us look at the first two cases, assuming we have "a = b + C":
1660 <modify_expr <var_decl "a"> <plus_expr <var_decl "b"> <constant C>>
1661 If "a" is a GIMPLE register, the assignment to it is free on almost
1662 any target, because "a" usually ends up in a real register. Hence
1663 the only cost of this expression comes from the PLUS_EXPR, and we
1664 can ignore the MODIFY_EXPR.
1665 If "a" is not a GIMPLE register, the assignment to "a" will most
1666 likely be a real store, so the cost of the MODIFY_EXPR is the cost
1667 of moving something into "a", which we compute using the function
1668 estimate_move_cost.
1670 The third case deals with TARGET_EXPRs, for which the semantics are
1671 that a temporary is assigned, unless the TARGET_EXPR itself is being
1672 assigned to something else. In the latter case we do not need the
1673 temporary. E.g. in <modify_expr <var_decl "a"> <target_expr>>, the
1674 MODIFY_EXPR is free. */
1675 case INIT_EXPR:
1676 case MODIFY_EXPR:
1677 /* Is the right and side a TARGET_EXPR? */
1678 if (TREE_CODE (TREE_OPERAND (x, 1)) == TARGET_EXPR)
1679 break;
1680 /* ... fall through ... */
1682 case TARGET_EXPR:
1683 x = TREE_OPERAND (x, 0);
1684 /* Is this an assignments to a register? */
1685 if (is_gimple_reg (x))
1686 break;
1687 /* Otherwise it's a store, so fall through to compute the move cost. */
1689 case CONSTRUCTOR:
1690 *count += estimate_move_cost (TREE_TYPE (x));
1691 break;
1693 /* Assign cost of 1 to usual operations.
1694 ??? We may consider mapping RTL costs to this. */
1695 case COND_EXPR:
1696 case VEC_COND_EXPR:
1698 case PLUS_EXPR:
1699 case MINUS_EXPR:
1700 case MULT_EXPR:
1702 case FIX_TRUNC_EXPR:
1704 case NEGATE_EXPR:
1705 case FLOAT_EXPR:
1706 case MIN_EXPR:
1707 case MAX_EXPR:
1708 case ABS_EXPR:
1710 case LSHIFT_EXPR:
1711 case RSHIFT_EXPR:
1712 case LROTATE_EXPR:
1713 case RROTATE_EXPR:
1714 case VEC_LSHIFT_EXPR:
1715 case VEC_RSHIFT_EXPR:
1717 case BIT_IOR_EXPR:
1718 case BIT_XOR_EXPR:
1719 case BIT_AND_EXPR:
1720 case BIT_NOT_EXPR:
1722 case TRUTH_ANDIF_EXPR:
1723 case TRUTH_ORIF_EXPR:
1724 case TRUTH_AND_EXPR:
1725 case TRUTH_OR_EXPR:
1726 case TRUTH_XOR_EXPR:
1727 case TRUTH_NOT_EXPR:
1729 case LT_EXPR:
1730 case LE_EXPR:
1731 case GT_EXPR:
1732 case GE_EXPR:
1733 case EQ_EXPR:
1734 case NE_EXPR:
1735 case ORDERED_EXPR:
1736 case UNORDERED_EXPR:
1738 case UNLT_EXPR:
1739 case UNLE_EXPR:
1740 case UNGT_EXPR:
1741 case UNGE_EXPR:
1742 case UNEQ_EXPR:
1743 case LTGT_EXPR:
1745 case CONVERT_EXPR:
1747 case CONJ_EXPR:
1749 case PREDECREMENT_EXPR:
1750 case PREINCREMENT_EXPR:
1751 case POSTDECREMENT_EXPR:
1752 case POSTINCREMENT_EXPR:
1754 case SWITCH_EXPR:
1756 case ASM_EXPR:
1758 case REALIGN_LOAD_EXPR:
1760 case REDUC_MAX_EXPR:
1761 case REDUC_MIN_EXPR:
1762 case REDUC_PLUS_EXPR:
1763 case WIDEN_SUM_EXPR:
1764 case DOT_PROD_EXPR:
1765 case VEC_WIDEN_MULT_HI_EXPR:
1766 case VEC_WIDEN_MULT_LO_EXPR:
1767 case VEC_UNPACK_HI_EXPR:
1768 case VEC_UNPACK_LO_EXPR:
1769 case VEC_PACK_MOD_EXPR:
1770 case VEC_PACK_SAT_EXPR:
1772 case WIDEN_MULT_EXPR:
1774 case VEC_EXTRACT_EVEN_EXPR:
1775 case VEC_EXTRACT_ODD_EXPR:
1776 case VEC_INTERLEAVE_HIGH_EXPR:
1777 case VEC_INTERLEAVE_LOW_EXPR:
1779 case RESX_EXPR:
1780 *count += 1;
1781 break;
1783 /* Few special cases of expensive operations. This is useful
1784 to avoid inlining on functions having too many of these. */
1785 case TRUNC_DIV_EXPR:
1786 case CEIL_DIV_EXPR:
1787 case FLOOR_DIV_EXPR:
1788 case ROUND_DIV_EXPR:
1789 case EXACT_DIV_EXPR:
1790 case TRUNC_MOD_EXPR:
1791 case CEIL_MOD_EXPR:
1792 case FLOOR_MOD_EXPR:
1793 case ROUND_MOD_EXPR:
1794 case RDIV_EXPR:
1795 *count += 10;
1796 break;
1797 case CALL_EXPR:
1799 tree decl = get_callee_fndecl (x);
1800 tree arg;
1802 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1803 switch (DECL_FUNCTION_CODE (decl))
1805 case BUILT_IN_CONSTANT_P:
1806 *walk_subtrees = 0;
1807 return NULL_TREE;
1808 case BUILT_IN_EXPECT:
1809 return NULL_TREE;
1810 default:
1811 break;
1814 /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
1815 that does use function declaration to figure out the arguments. */
1816 if (!decl)
1818 for (arg = TREE_OPERAND (x, 1); arg; arg = TREE_CHAIN (arg))
1819 *count += estimate_move_cost (TREE_TYPE (TREE_VALUE (arg)));
1821 else
1823 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1824 *count += estimate_move_cost (TREE_TYPE (arg));
1827 *count += PARAM_VALUE (PARAM_INLINE_CALL_COST);
1828 break;
1831 case OMP_PARALLEL:
1832 case OMP_FOR:
1833 case OMP_SECTIONS:
1834 case OMP_SINGLE:
1835 case OMP_SECTION:
1836 case OMP_MASTER:
1837 case OMP_ORDERED:
1838 case OMP_CRITICAL:
1839 case OMP_ATOMIC:
1840 /* OpenMP directives are generally very expensive. */
1841 *count += 40;
1842 break;
1844 default:
1845 gcc_unreachable ();
1847 return NULL;
1850 /* Estimate number of instructions that will be created by expanding EXPR. */
1853 estimate_num_insns (tree expr)
1855 int num = 0;
1856 struct pointer_set_t *visited_nodes;
1857 basic_block bb;
1858 block_stmt_iterator bsi;
1859 struct function *my_function;
1861 /* If we're given an entire function, walk the CFG. */
1862 if (TREE_CODE (expr) == FUNCTION_DECL)
1864 my_function = DECL_STRUCT_FUNCTION (expr);
1865 gcc_assert (my_function && my_function->cfg);
1866 visited_nodes = pointer_set_create ();
1867 FOR_EACH_BB_FN (bb, my_function)
1869 for (bsi = bsi_start (bb);
1870 !bsi_end_p (bsi);
1871 bsi_next (&bsi))
1873 walk_tree (bsi_stmt_ptr (bsi), estimate_num_insns_1,
1874 &num, visited_nodes);
1877 pointer_set_destroy (visited_nodes);
1879 else
1880 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
1882 return num;
1885 typedef struct function *function_p;
1887 DEF_VEC_P(function_p);
1888 DEF_VEC_ALLOC_P(function_p,heap);
1890 /* Initialized with NOGC, making this poisonous to the garbage collector. */
1891 static VEC(function_p,heap) *cfun_stack;
1893 void
1894 push_cfun (struct function *new_cfun)
1896 VEC_safe_push (function_p, heap, cfun_stack, cfun);
1897 cfun = new_cfun;
1900 void
1901 pop_cfun (void)
1903 cfun = VEC_pop (function_p, cfun_stack);
1906 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
1907 static void
1908 add_lexical_block (tree current_block, tree new_block)
1910 tree *blk_p;
1912 /* Walk to the last sub-block. */
1913 for (blk_p = &BLOCK_SUBBLOCKS (current_block);
1914 *blk_p;
1915 blk_p = &TREE_CHAIN (*blk_p))
1917 *blk_p = new_block;
1918 BLOCK_SUPERCONTEXT (new_block) = current_block;
1921 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
1923 static bool
1924 expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
1926 copy_body_data *id;
1927 tree t;
1928 tree use_retvar;
1929 tree fn;
1930 splay_tree st;
1931 tree args;
1932 tree return_slot_addr;
1933 tree modify_dest;
1934 location_t saved_location;
1935 struct cgraph_edge *cg_edge;
1936 const char *reason;
1937 basic_block return_block;
1938 edge e;
1939 block_stmt_iterator bsi, stmt_bsi;
1940 bool successfully_inlined = FALSE;
1941 bool purge_dead_abnormal_edges;
1942 tree t_step;
1943 tree var;
1945 /* See what we've got. */
1946 id = (copy_body_data *) data;
1947 t = *tp;
1949 /* Set input_location here so we get the right instantiation context
1950 if we call instantiate_decl from inlinable_function_p. */
1951 saved_location = input_location;
1952 if (EXPR_HAS_LOCATION (t))
1953 input_location = EXPR_LOCATION (t);
1955 /* From here on, we're only interested in CALL_EXPRs. */
1956 if (TREE_CODE (t) != CALL_EXPR)
1957 goto egress;
1959 /* First, see if we can figure out what function is being called.
1960 If we cannot, then there is no hope of inlining the function. */
1961 fn = get_callee_fndecl (t);
1962 if (!fn)
1963 goto egress;
1965 /* Turn forward declarations into real ones. */
1966 fn = cgraph_node (fn)->decl;
1968 /* If fn is a declaration of a function in a nested scope that was
1969 globally declared inline, we don't set its DECL_INITIAL.
1970 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1971 C++ front-end uses it for cdtors to refer to their internal
1972 declarations, that are not real functions. Fortunately those
1973 don't have trees to be saved, so we can tell by checking their
1974 DECL_SAVED_TREE. */
1975 if (! DECL_INITIAL (fn)
1976 && DECL_ABSTRACT_ORIGIN (fn)
1977 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
1978 fn = DECL_ABSTRACT_ORIGIN (fn);
1980 /* Objective C and fortran still calls tree_rest_of_compilation directly.
1981 Kill this check once this is fixed. */
1982 if (!id->dst_node->analyzed)
1983 goto egress;
1985 cg_edge = cgraph_edge (id->dst_node, stmt);
1987 /* Constant propagation on argument done during previous inlining
1988 may create new direct call. Produce an edge for it. */
1989 if (!cg_edge)
1991 struct cgraph_node *dest = cgraph_node (fn);
1993 /* We have missing edge in the callgraph. This can happen in one case
1994 where previous inlining turned indirect call into direct call by
1995 constant propagating arguments. In all other cases we hit a bug
1996 (incorrect node sharing is most common reason for missing edges. */
1997 gcc_assert (dest->needed || !flag_unit_at_a_time);
1998 cgraph_create_edge (id->dst_node, dest, stmt,
1999 bb->count, bb->loop_depth)->inline_failed
2000 = N_("originally indirect function call not considered for inlining");
2001 goto egress;
2004 /* Don't try to inline functions that are not well-suited to
2005 inlining. */
2006 if (!cgraph_inline_p (cg_edge, &reason))
2008 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
2009 /* Avoid warnings during early inline pass. */
2010 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2012 sorry ("inlining failed in call to %q+F: %s", fn, reason);
2013 sorry ("called from here");
2015 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
2016 && !DECL_IN_SYSTEM_HEADER (fn)
2017 && strlen (reason)
2018 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
2019 /* Avoid warnings during early inline pass. */
2020 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2022 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
2023 fn, reason);
2024 warning (OPT_Winline, "called from here");
2026 goto egress;
2028 fn = cg_edge->callee->decl;
2030 #ifdef ENABLE_CHECKING
2031 if (cg_edge->callee->decl != id->dst_node->decl)
2032 verify_cgraph_node (cg_edge->callee);
2033 #endif
2035 /* We will be inlining this callee. */
2036 id->eh_region = lookup_stmt_eh_region (stmt);
2038 /* Split the block holding the CALL_EXPR. */
2039 e = split_block (bb, stmt);
2040 bb = e->src;
2041 return_block = e->dest;
2042 remove_edge (e);
2044 /* split_block splits after the statement; work around this by
2045 moving the call into the second block manually. Not pretty,
2046 but seems easier than doing the CFG manipulation by hand
2047 when the CALL_EXPR is in the last statement of BB. */
2048 stmt_bsi = bsi_last (bb);
2049 bsi_remove (&stmt_bsi, false);
2051 /* If the CALL_EXPR was in the last statement of BB, it may have
2052 been the source of abnormal edges. In this case, schedule
2053 the removal of dead abnormal edges. */
2054 bsi = bsi_start (return_block);
2055 if (bsi_end_p (bsi))
2057 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2058 purge_dead_abnormal_edges = true;
2060 else
2062 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
2063 purge_dead_abnormal_edges = false;
2066 stmt_bsi = bsi_start (return_block);
2068 /* Build a block containing code to initialize the arguments, the
2069 actual inline expansion of the body, and a label for the return
2070 statements within the function to jump to. The type of the
2071 statement expression is the return type of the function call. */
2072 id->block = make_node (BLOCK);
2073 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
2074 BLOCK_SOURCE_LOCATION (id->block) = input_location;
2075 add_lexical_block (TREE_BLOCK (stmt), id->block);
2077 /* Local declarations will be replaced by their equivalents in this
2078 map. */
2079 st = id->decl_map;
2080 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
2081 NULL, NULL);
2083 /* Initialize the parameters. */
2084 args = TREE_OPERAND (t, 1);
2086 /* Record the function we are about to inline. */
2087 id->src_fn = fn;
2088 id->src_node = cg_edge->callee;
2090 initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2), fn, bb);
2092 if (DECL_INITIAL (fn))
2093 add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
2095 /* Return statements in the function body will be replaced by jumps
2096 to the RET_LABEL. */
2098 gcc_assert (DECL_INITIAL (fn));
2099 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
2101 /* Find the lhs to which the result of this call is assigned. */
2102 return_slot_addr = NULL;
2103 if (TREE_CODE (stmt) == MODIFY_EXPR)
2105 modify_dest = TREE_OPERAND (stmt, 0);
2107 /* The function which we are inlining might not return a value,
2108 in which case we should issue a warning that the function
2109 does not return a value. In that case the optimizers will
2110 see that the variable to which the value is assigned was not
2111 initialized. We do not want to issue a warning about that
2112 uninitialized variable. */
2113 if (DECL_P (modify_dest))
2114 TREE_NO_WARNING (modify_dest) = 1;
2115 if (CALL_EXPR_RETURN_SLOT_OPT (t))
2117 return_slot_addr = build_fold_addr_expr (modify_dest);
2118 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2119 modify_dest = NULL;
2122 else
2123 modify_dest = NULL;
2125 /* Declare the return variable for the function. */
2126 declare_return_variable (id, return_slot_addr,
2127 modify_dest, &use_retvar);
2129 /* This is it. Duplicate the callee body. Assume callee is
2130 pre-gimplified. Note that we must not alter the caller
2131 function in any way before this point, as this CALL_EXPR may be
2132 a self-referential call; if we're calling ourselves, we need to
2133 duplicate our body before altering anything. */
2134 copy_body (id, bb->count, bb->frequency, bb, return_block);
2136 /* Add local vars in this inlined callee to caller. */
2137 t_step = id->src_cfun->unexpanded_var_list;
2138 for (; t_step; t_step = TREE_CHAIN (t_step))
2140 var = TREE_VALUE (t_step);
2141 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2142 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2143 cfun->unexpanded_var_list);
2144 else
2145 cfun->unexpanded_var_list = tree_cons (NULL_TREE, remap_decl (var, id),
2146 cfun->unexpanded_var_list);
2149 /* Clean up. */
2150 splay_tree_delete (id->decl_map);
2151 id->decl_map = st;
2153 /* If the inlined function returns a result that we care about,
2154 clobber the CALL_EXPR with a reference to the return variable. */
2155 if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
2157 *tp = use_retvar;
2158 maybe_clean_or_replace_eh_stmt (stmt, stmt);
2160 else
2161 /* We're modifying a TSI owned by gimple_expand_calls_inline();
2162 tsi_delink() will leave the iterator in a sane state. */
2163 bsi_remove (&stmt_bsi, true);
2165 if (purge_dead_abnormal_edges)
2166 tree_purge_dead_abnormal_call_edges (return_block);
2168 /* If the value of the new expression is ignored, that's OK. We
2169 don't warn about this for CALL_EXPRs, so we shouldn't warn about
2170 the equivalent inlined version either. */
2171 TREE_USED (*tp) = 1;
2173 /* Output the inlining info for this abstract function, since it has been
2174 inlined. If we don't do this now, we can lose the information about the
2175 variables in the function when the blocks get blown away as soon as we
2176 remove the cgraph node. */
2177 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
2179 /* Update callgraph if needed. */
2180 cgraph_remove_node (cg_edge->callee);
2182 id->block = NULL_TREE;
2183 successfully_inlined = TRUE;
2185 egress:
2186 input_location = saved_location;
2187 return successfully_inlined;
2190 /* Expand call statements reachable from STMT_P.
2191 We can only have CALL_EXPRs as the "toplevel" tree code or nested
2192 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
2193 unfortunately not use that function here because we need a pointer
2194 to the CALL_EXPR, not the tree itself. */
2196 static bool
2197 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
2199 block_stmt_iterator bsi;
2201 /* Register specific tree functions. */
2202 tree_register_cfg_hooks ();
2203 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2205 tree *expr_p = bsi_stmt_ptr (bsi);
2206 tree stmt = *expr_p;
2208 if (TREE_CODE (*expr_p) == MODIFY_EXPR)
2209 expr_p = &TREE_OPERAND (*expr_p, 1);
2210 if (TREE_CODE (*expr_p) == WITH_SIZE_EXPR)
2211 expr_p = &TREE_OPERAND (*expr_p, 0);
2212 if (TREE_CODE (*expr_p) == CALL_EXPR)
2213 if (expand_call_inline (bb, stmt, expr_p, id))
2214 return true;
2216 return false;
2219 /* Expand calls to inline functions in the body of FN. */
2221 void
2222 optimize_inline_calls (tree fn)
2224 copy_body_data id;
2225 tree prev_fn;
2226 basic_block bb;
2227 /* There is no point in performing inlining if errors have already
2228 occurred -- and we might crash if we try to inline invalid
2229 code. */
2230 if (errorcount || sorrycount)
2231 return;
2233 /* Clear out ID. */
2234 memset (&id, 0, sizeof (id));
2236 id.src_node = id.dst_node = cgraph_node (fn);
2237 id.dst_fn = fn;
2238 /* Or any functions that aren't finished yet. */
2239 prev_fn = NULL_TREE;
2240 if (current_function_decl)
2242 id.dst_fn = current_function_decl;
2243 prev_fn = current_function_decl;
2246 id.copy_decl = copy_decl_maybe_to_var;
2247 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2248 id.transform_new_cfg = false;
2249 id.transform_return_to_modify = true;
2250 id.transform_lang_insert_block = false;
2252 push_gimplify_context ();
2254 /* Reach the trees by walking over the CFG, and note the
2255 enclosing basic-blocks in the call edges. */
2256 /* We walk the blocks going forward, because inlined function bodies
2257 will split id->current_basic_block, and the new blocks will
2258 follow it; we'll trudge through them, processing their CALL_EXPRs
2259 along the way. */
2260 FOR_EACH_BB (bb)
2261 gimple_expand_calls_inline (bb, &id);
2263 pop_gimplify_context (NULL);
2264 /* Renumber the (code) basic_blocks consecutively. */
2265 compact_blocks ();
2266 /* Renumber the lexical scoping (non-code) blocks consecutively. */
2267 number_blocks (fn);
2269 #ifdef ENABLE_CHECKING
2271 struct cgraph_edge *e;
2273 verify_cgraph_node (id.dst_node);
2275 /* Double check that we inlined everything we are supposed to inline. */
2276 for (e = id.dst_node->callees; e; e = e->next_callee)
2277 gcc_assert (e->inline_failed);
2279 #endif
2280 /* We need to rescale frequencies again to peak at REG_BR_PROB_BASE
2281 as inlining loops might increase the maximum. */
2282 if (ENTRY_BLOCK_PTR->count)
2283 counts_to_freqs ();
2284 fold_cond_expr_cond ();
2287 /* FN is a function that has a complete body, and CLONE is a function whose
2288 body is to be set to a copy of FN, mapping argument declarations according
2289 to the ARG_MAP splay_tree. */
2291 void
2292 clone_body (tree clone, tree fn, void *arg_map)
2294 copy_body_data id;
2296 /* Clone the body, as if we were making an inline call. But, remap the
2297 parameters in the callee to the parameters of caller. */
2298 memset (&id, 0, sizeof (id));
2299 id.src_fn = fn;
2300 id.dst_fn = clone;
2301 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
2302 id.decl_map = (splay_tree)arg_map;
2304 id.copy_decl = copy_decl_no_change;
2305 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2306 id.transform_new_cfg = true;
2307 id.transform_return_to_modify = false;
2308 id.transform_lang_insert_block = true;
2310 /* We're not inside any EH region. */
2311 id.eh_region = -1;
2313 /* Actually copy the body. */
2314 append_to_statement_list_force (copy_generic_body (&id), &DECL_SAVED_TREE (clone));
2317 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2319 tree
2320 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2322 enum tree_code code = TREE_CODE (*tp);
2324 /* We make copies of most nodes. */
2325 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
2326 || code == TREE_LIST
2327 || code == TREE_VEC
2328 || code == TYPE_DECL
2329 || code == OMP_CLAUSE)
2331 /* Because the chain gets clobbered when we make a copy, we save it
2332 here. */
2333 tree chain = TREE_CHAIN (*tp);
2334 tree new;
2336 /* Copy the node. */
2337 new = copy_node (*tp);
2339 /* Propagate mudflap marked-ness. */
2340 if (flag_mudflap && mf_marked_p (*tp))
2341 mf_mark (new);
2343 *tp = new;
2345 /* Now, restore the chain, if appropriate. That will cause
2346 walk_tree to walk into the chain as well. */
2347 if (code == PARM_DECL
2348 || code == TREE_LIST
2349 || code == OMP_CLAUSE)
2350 TREE_CHAIN (*tp) = chain;
2352 /* For now, we don't update BLOCKs when we make copies. So, we
2353 have to nullify all BIND_EXPRs. */
2354 if (TREE_CODE (*tp) == BIND_EXPR)
2355 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2357 else if (code == CONSTRUCTOR)
2359 /* CONSTRUCTOR nodes need special handling because
2360 we need to duplicate the vector of elements. */
2361 tree new;
2363 new = copy_node (*tp);
2365 /* Propagate mudflap marked-ness. */
2366 if (flag_mudflap && mf_marked_p (*tp))
2367 mf_mark (new);
2369 CONSTRUCTOR_ELTS (new) = VEC_copy (constructor_elt, gc,
2370 CONSTRUCTOR_ELTS (*tp));
2371 *tp = new;
2373 else if (TREE_CODE_CLASS (code) == tcc_type)
2374 *walk_subtrees = 0;
2375 else if (TREE_CODE_CLASS (code) == tcc_declaration)
2376 *walk_subtrees = 0;
2377 else if (TREE_CODE_CLASS (code) == tcc_constant)
2378 *walk_subtrees = 0;
2379 else
2380 gcc_assert (code != STATEMENT_LIST);
2381 return NULL_TREE;
2384 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2385 information indicating to what new SAVE_EXPR this one should be mapped,
2386 use that one. Otherwise, create a new node and enter it in ST. FN is
2387 the function into which the copy will be placed. */
2389 static void
2390 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2392 splay_tree st = (splay_tree) st_;
2393 splay_tree_node n;
2394 tree t;
2396 /* See if we already encountered this SAVE_EXPR. */
2397 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2399 /* If we didn't already remap this SAVE_EXPR, do so now. */
2400 if (!n)
2402 t = copy_node (*tp);
2404 /* Remember this SAVE_EXPR. */
2405 splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2406 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2407 splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2409 else
2411 /* We've already walked into this SAVE_EXPR; don't do it again. */
2412 *walk_subtrees = 0;
2413 t = (tree) n->value;
2416 /* Replace this SAVE_EXPR with the copy. */
2417 *tp = t;
2420 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
2421 copies the declaration and enters it in the splay_tree in DATA (which is
2422 really an `copy_body_data *'). */
2424 static tree
2425 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2426 void *data)
2428 copy_body_data *id = (copy_body_data *) data;
2430 /* Don't walk into types. */
2431 if (TYPE_P (*tp))
2432 *walk_subtrees = 0;
2434 else if (TREE_CODE (*tp) == LABEL_EXPR)
2436 tree decl = TREE_OPERAND (*tp, 0);
2438 /* Copy the decl and remember the copy. */
2439 insert_decl_map (id, decl, id->copy_decl (decl, id));
2442 return NULL_TREE;
2445 /* Perform any modifications to EXPR required when it is unsaved. Does
2446 not recurse into EXPR's subtrees. */
2448 static void
2449 unsave_expr_1 (tree expr)
2451 switch (TREE_CODE (expr))
2453 case TARGET_EXPR:
2454 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
2455 It's OK for this to happen if it was part of a subtree that
2456 isn't immediately expanded, such as operand 2 of another
2457 TARGET_EXPR. */
2458 if (TREE_OPERAND (expr, 1))
2459 break;
2461 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
2462 TREE_OPERAND (expr, 3) = NULL_TREE;
2463 break;
2465 default:
2466 break;
2470 /* Called via walk_tree when an expression is unsaved. Using the
2471 splay_tree pointed to by ST (which is really a `splay_tree'),
2472 remaps all local declarations to appropriate replacements. */
2474 static tree
2475 unsave_r (tree *tp, int *walk_subtrees, void *data)
2477 copy_body_data *id = (copy_body_data *) data;
2478 splay_tree st = id->decl_map;
2479 splay_tree_node n;
2481 /* Only a local declaration (variable or label). */
2482 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
2483 || TREE_CODE (*tp) == LABEL_DECL)
2485 /* Lookup the declaration. */
2486 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2488 /* If it's there, remap it. */
2489 if (n)
2490 *tp = (tree) n->value;
2493 else if (TREE_CODE (*tp) == STATEMENT_LIST)
2494 copy_statement_list (tp);
2495 else if (TREE_CODE (*tp) == BIND_EXPR)
2496 copy_bind_expr (tp, walk_subtrees, id);
2497 else if (TREE_CODE (*tp) == SAVE_EXPR)
2498 remap_save_expr (tp, st, walk_subtrees);
2499 else
2501 copy_tree_r (tp, walk_subtrees, NULL);
2503 /* Do whatever unsaving is required. */
2504 unsave_expr_1 (*tp);
2507 /* Keep iterating. */
2508 return NULL_TREE;
2511 /* Copies everything in EXPR and replaces variables, labels
2512 and SAVE_EXPRs local to EXPR. */
2514 tree
2515 unsave_expr_now (tree expr)
2517 copy_body_data id;
2519 /* There's nothing to do for NULL_TREE. */
2520 if (expr == 0)
2521 return expr;
2523 /* Set up ID. */
2524 memset (&id, 0, sizeof (id));
2525 id.src_fn = current_function_decl;
2526 id.dst_fn = current_function_decl;
2527 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2529 id.copy_decl = copy_decl_no_change;
2530 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2531 id.transform_new_cfg = false;
2532 id.transform_return_to_modify = false;
2533 id.transform_lang_insert_block = false;
2535 /* Walk the tree once to find local labels. */
2536 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
2538 /* Walk the tree again, copying, remapping, and unsaving. */
2539 walk_tree (&expr, unsave_r, &id, NULL);
2541 /* Clean up. */
2542 splay_tree_delete (id.decl_map);
2544 return expr;
2547 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
2549 static tree
2550 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
2552 if (*tp == data)
2553 return (tree) data;
2554 else
2555 return NULL;
2558 bool
2559 debug_find_tree (tree top, tree search)
2561 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
2565 /* Declare the variables created by the inliner. Add all the variables in
2566 VARS to BIND_EXPR. */
2568 static void
2569 declare_inline_vars (tree block, tree vars)
2571 tree t;
2572 for (t = vars; t; t = TREE_CHAIN (t))
2574 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
2575 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
2576 cfun->unexpanded_var_list =
2577 tree_cons (NULL_TREE, t,
2578 cfun->unexpanded_var_list);
2581 if (block)
2582 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
2586 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
2587 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
2588 VAR_DECL translation. */
2590 static tree
2591 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
2593 /* Don't generate debug information for the copy if we wouldn't have
2594 generated it for the copy either. */
2595 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
2596 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
2598 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
2599 declaration inspired this copy. */
2600 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
2602 /* The new variable/label has no RTL, yet. */
2603 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
2604 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
2605 SET_DECL_RTL (copy, NULL_RTX);
2607 /* These args would always appear unused, if not for this. */
2608 TREE_USED (copy) = 1;
2610 /* Set the context for the new declaration. */
2611 if (!DECL_CONTEXT (decl))
2612 /* Globals stay global. */
2614 else if (DECL_CONTEXT (decl) != id->src_fn)
2615 /* Things that weren't in the scope of the function we're inlining
2616 from aren't in the scope we're inlining to, either. */
2618 else if (TREE_STATIC (decl))
2619 /* Function-scoped static variables should stay in the original
2620 function. */
2622 else
2623 /* Ordinary automatic local variables are now in the scope of the
2624 new function. */
2625 DECL_CONTEXT (copy) = id->dst_fn;
2627 return copy;
2630 static tree
2631 copy_decl_to_var (tree decl, copy_body_data *id)
2633 tree copy, type;
2635 gcc_assert (TREE_CODE (decl) == PARM_DECL
2636 || TREE_CODE (decl) == RESULT_DECL);
2638 type = TREE_TYPE (decl);
2640 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
2641 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
2642 TREE_READONLY (copy) = TREE_READONLY (decl);
2643 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
2644 DECL_COMPLEX_GIMPLE_REG_P (copy) = DECL_COMPLEX_GIMPLE_REG_P (decl);
2646 return copy_decl_for_dup_finish (id, decl, copy);
2649 /* Like copy_decl_to_var, but create a return slot object instead of a
2650 pointer variable for return by invisible reference. */
2652 static tree
2653 copy_result_decl_to_var (tree decl, copy_body_data *id)
2655 tree copy, type;
2657 gcc_assert (TREE_CODE (decl) == PARM_DECL
2658 || TREE_CODE (decl) == RESULT_DECL);
2660 type = TREE_TYPE (decl);
2661 if (DECL_BY_REFERENCE (decl))
2662 type = TREE_TYPE (type);
2664 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
2665 TREE_READONLY (copy) = TREE_READONLY (decl);
2666 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
2667 if (!DECL_BY_REFERENCE (decl))
2669 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
2670 DECL_COMPLEX_GIMPLE_REG_P (copy) = DECL_COMPLEX_GIMPLE_REG_P (decl);
2673 return copy_decl_for_dup_finish (id, decl, copy);
2677 static tree
2678 copy_decl_no_change (tree decl, copy_body_data *id)
2680 tree copy;
2682 copy = copy_node (decl);
2684 /* The COPY is not abstract; it will be generated in DST_FN. */
2685 DECL_ABSTRACT (copy) = 0;
2686 lang_hooks.dup_lang_specific_decl (copy);
2688 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
2689 been taken; it's for internal bookkeeping in expand_goto_internal. */
2690 if (TREE_CODE (copy) == LABEL_DECL)
2692 TREE_ADDRESSABLE (copy) = 0;
2693 LABEL_DECL_UID (copy) = -1;
2696 return copy_decl_for_dup_finish (id, decl, copy);
2699 static tree
2700 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
2702 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
2703 return copy_decl_to_var (decl, id);
2704 else
2705 return copy_decl_no_change (decl, id);
2708 /* Return a copy of the function's argument tree. */
2709 static tree
2710 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id)
2712 tree *arg_copy, *parg;
2714 arg_copy = &orig_parm;
2715 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
2717 tree new = remap_decl (*parg, id);
2718 lang_hooks.dup_lang_specific_decl (new);
2719 TREE_CHAIN (new) = TREE_CHAIN (*parg);
2720 *parg = new;
2722 return orig_parm;
2725 /* Return a copy of the function's static chain. */
2726 static tree
2727 copy_static_chain (tree static_chain, copy_body_data * id)
2729 tree *chain_copy, *pvar;
2731 chain_copy = &static_chain;
2732 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
2734 tree new = remap_decl (*pvar, id);
2735 lang_hooks.dup_lang_specific_decl (new);
2736 TREE_CHAIN (new) = TREE_CHAIN (*pvar);
2737 *pvar = new;
2739 return static_chain;
2742 /* Return true if the function is allowed to be versioned.
2743 This is a guard for the versioning functionality. */
2744 bool
2745 tree_versionable_function_p (tree fndecl)
2747 if (fndecl == NULL_TREE)
2748 return false;
2749 /* ??? There are cases where a function is
2750 uninlinable but can be versioned. */
2751 if (!tree_inlinable_function_p (fndecl))
2752 return false;
2754 return true;
2757 /* Create a copy of a function's tree.
2758 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
2759 of the original function and the new copied function
2760 respectively. In case we want to replace a DECL
2761 tree with another tree while duplicating the function's
2762 body, TREE_MAP represents the mapping between these
2763 trees. If UPDATE_CLONES is set, the call_stmt fields
2764 of edges of clones of the function will be updated. */
2765 void
2766 tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
2767 bool update_clones)
2769 struct cgraph_node *old_version_node;
2770 struct cgraph_node *new_version_node;
2771 copy_body_data id;
2772 tree p, new_fndecl;
2773 unsigned i;
2774 struct ipa_replace_map *replace_info;
2775 basic_block old_entry_block;
2776 tree t_step;
2778 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
2779 && TREE_CODE (new_decl) == FUNCTION_DECL);
2780 DECL_POSSIBLY_INLINED (old_decl) = 1;
2782 old_version_node = cgraph_node (old_decl);
2783 new_version_node = cgraph_node (new_decl);
2785 allocate_struct_function (new_decl);
2786 /* Cfun points to the new allocated function struct at this point. */
2787 cfun->function_end_locus = DECL_SOURCE_LOCATION (new_decl);
2789 DECL_ARTIFICIAL (new_decl) = 1;
2790 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
2792 /* Generate a new name for the new version. */
2793 if (!update_clones)
2795 DECL_NAME (new_decl) = create_tmp_var_name (NULL);
2796 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2797 SET_DECL_RTL (new_decl, NULL_RTX);
2800 /* Prepare the data structures for the tree copy. */
2801 memset (&id, 0, sizeof (id));
2803 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2804 id.src_fn = old_decl;
2805 id.dst_fn = new_decl;
2806 id.src_node = old_version_node;
2807 id.dst_node = new_version_node;
2808 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
2810 id.copy_decl = copy_decl_no_change;
2811 id.transform_call_graph_edges
2812 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
2813 id.transform_new_cfg = true;
2814 id.transform_return_to_modify = false;
2815 id.transform_lang_insert_block = false;
2817 current_function_decl = new_decl;
2819 /* Copy the function's static chain. */
2820 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
2821 if (p)
2822 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
2823 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
2824 &id);
2825 /* Copy the function's arguments. */
2826 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
2827 DECL_ARGUMENTS (new_decl) =
2828 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id);
2830 /* If there's a tree_map, prepare for substitution. */
2831 if (tree_map)
2832 for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
2834 replace_info = VARRAY_GENERIC_PTR (tree_map, i);
2835 if (replace_info->replace_p)
2836 insert_decl_map (&id, replace_info->old_tree,
2837 replace_info->new_tree);
2840 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
2842 /* Renumber the lexical scoping (non-code) blocks consecutively. */
2843 number_blocks (id.dst_fn);
2845 if (DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list != NULL_TREE)
2846 /* Add local vars. */
2847 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list;
2848 t_step; t_step = TREE_CHAIN (t_step))
2850 tree var = TREE_VALUE (t_step);
2851 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2852 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2853 cfun->unexpanded_var_list);
2854 else
2855 cfun->unexpanded_var_list =
2856 tree_cons (NULL_TREE, remap_decl (var, &id),
2857 cfun->unexpanded_var_list);
2860 /* Copy the Function's body. */
2861 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
2862 (DECL_STRUCT_FUNCTION (old_decl));
2863 new_fndecl = copy_body (&id,
2864 old_entry_block->count,
2865 old_entry_block->frequency, NULL, NULL);
2867 DECL_SAVED_TREE (new_decl) = DECL_SAVED_TREE (new_fndecl);
2869 DECL_STRUCT_FUNCTION (new_decl)->cfg =
2870 DECL_STRUCT_FUNCTION (new_fndecl)->cfg;
2871 DECL_STRUCT_FUNCTION (new_decl)->eh = DECL_STRUCT_FUNCTION (new_fndecl)->eh;
2872 DECL_STRUCT_FUNCTION (new_decl)->ib_boundaries_block =
2873 DECL_STRUCT_FUNCTION (new_fndecl)->ib_boundaries_block;
2874 DECL_STRUCT_FUNCTION (new_decl)->last_label_uid =
2875 DECL_STRUCT_FUNCTION (new_fndecl)->last_label_uid;
2877 if (DECL_RESULT (old_decl) != NULL_TREE)
2879 tree *res_decl = &DECL_RESULT (old_decl);
2880 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
2881 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
2884 current_function_decl = NULL;
2885 /* Renumber the lexical scoping (non-code) blocks consecutively. */
2886 number_blocks (new_decl);
2888 /* Clean up. */
2889 splay_tree_delete (id.decl_map);
2890 fold_cond_expr_cond ();
2891 return;
2894 /* Duplicate a type, fields and all. */
2896 tree
2897 build_duplicate_type (tree type)
2899 struct copy_body_data id;
2901 memset (&id, 0, sizeof (id));
2902 id.src_fn = current_function_decl;
2903 id.dst_fn = current_function_decl;
2904 id.src_cfun = cfun;
2905 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2907 type = remap_type_1 (type, &id);
2909 splay_tree_delete (id.decl_map);
2911 return type;