* tree-cfg.c (tree_find_edge_insert_loc): Handle naked RETURN_EXPR.
[official-gcc.git] / gcc / tree-inline.c
blob0f7ea978b7221c0624d8de39fd58e90d2259bb6c
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "varray.h"
36 #include "hashtab.h"
37 #include "splay-tree.h"
38 #include "langhooks.h"
39 #include "basic-block.h"
40 #include "tree-iterator.h"
41 #include "cgraph.h"
42 #include "intl.h"
43 #include "tree-mudflap.h"
44 #include "tree-flow.h"
45 #include "function.h"
46 #include "ggc.h"
47 #include "tree-flow.h"
48 #include "diagnostic.h"
49 #include "except.h"
50 #include "debug.h"
51 #include "pointer-set.h"
52 #include "ipa-prop.h"
54 /* I'm not real happy about this, but we need to handle gimple and
55 non-gimple trees. */
56 #include "tree-gimple.h"
58 /* Inlining, Saving, Cloning
60 Inlining: a function body is duplicated, but the PARM_DECLs are
61 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
62 MODIFY_EXPRs that store to a dedicated returned-value variable.
63 The duplicated eh_region info of the copy will later be appended
64 to the info for the caller; the eh_region info in copied throwing
65 statements and RESX_EXPRs is adjusted accordingly.
67 Saving: make a semantically-identical copy of the function body.
68 Necessary when we want to generate code for the body (a destructive
69 operation), but we expect to need this body in the future (e.g. for
70 inlining into another function).
72 Cloning: (only in C++) We have one body for a con/de/structor, and
73 multiple function decls, each with a unique parameter list.
74 Duplicate the body, using the given splay tree; some parameters
75 will become constants (like 0 or 1).
77 All of these will simultaneously lookup any callgraph edges. If
78 we're going to inline the duplicated function body, and the given
79 function has some cloned callgraph nodes (one for each place this
80 function will be inlined) those callgraph edges will be duplicated.
81 If we're saving or cloning the body, those callgraph edges will be
82 updated to point into the new body. (Note that the original
83 callgraph node and edge list will not be altered.)
85 See the CALL_EXPR handling case in copy_body_r (). */
87 /* 0 if we should not perform inlining.
88 1 if we should expand functions calls inline at the tree level.
89 2 if we should consider *all* functions to be inline
90 candidates. */
92 int flag_inline_trees = 0;
94 /* To Do:
96 o In order to make inlining-on-trees work, we pessimized
97 function-local static constants. In particular, they are now
98 always output, even when not addressed. Fix this by treating
99 function-local static constants just like global static
100 constants; the back-end already knows not to output them if they
101 are not needed.
103 o Provide heuristics to clamp inlining of recursive template
104 calls? */
106 /* Data required for function inlining. */
108 typedef struct inline_data
110 /* FUNCTION_DECL for function being inlined. */
111 tree callee;
112 /* FUNCTION_DECL for function being inlined into. */
113 tree caller;
114 /* struct function for function being inlined. Usually this is the same
115 as DECL_STRUCT_FUNCTION (callee), but can be different if saved_cfg
116 and saved_eh are in use. */
117 struct function *callee_cfun;
118 /* The VAR_DECL for the return value. */
119 tree retvar;
120 /* The map from local declarations in the inlined function to
121 equivalents in the function into which it is being inlined. */
122 splay_tree decl_map;
123 /* We use the same mechanism to build clones that we do to perform
124 inlining. However, there are a few places where we need to
125 distinguish between those two situations. This flag is true if
126 we are cloning, rather than inlining. */
127 bool cloning_p;
128 /* Similarly for saving function body. */
129 bool saving_p;
130 /* Versioning function is slightly different from inlining. */
131 bool versioning_p;
132 /* Callgraph node of function we are inlining into. */
133 struct cgraph_node *node;
134 /* Callgraph node of currently inlined function. */
135 struct cgraph_node *current_node;
136 /* Current BLOCK. */
137 tree block;
138 varray_type ipa_info;
139 /* Exception region the inlined call lie in. */
140 int eh_region;
141 /* Take region number in the function being copied, add this value and
142 get eh region number of the duplicate in the function we inline into. */
143 int eh_region_offset;
144 } inline_data;
146 /* Prototypes. */
148 static tree declare_return_variable (inline_data *, tree, tree, tree *);
149 static tree copy_body_r (tree *, int *, void *);
150 static tree copy_generic_body (inline_data *);
151 static bool inlinable_function_p (tree);
152 static tree remap_decl (tree, inline_data *);
153 static tree remap_type (tree, inline_data *);
154 static void remap_block (tree *, inline_data *);
155 static tree remap_decl (tree, inline_data *);
156 static tree remap_decls (tree, inline_data *);
157 static void copy_bind_expr (tree *, int *, inline_data *);
158 static tree mark_local_for_remap_r (tree *, int *, void *);
159 static void unsave_expr_1 (tree);
160 static tree unsave_r (tree *, int *, void *);
161 static void declare_inline_vars (tree, tree);
162 static void remap_save_expr (tree *, void *, int *);
163 static bool replace_ref_tree (inline_data *, tree *);
164 static inline bool inlining_p (inline_data *);
165 static void add_lexical_block (tree current_block, tree new_block);
167 /* Insert a tree->tree mapping for ID. Despite the name suggests
168 that the trees should be variables, it is used for more than that. */
170 static void
171 insert_decl_map (inline_data *id, tree key, tree value)
173 splay_tree_insert (id->decl_map, (splay_tree_key) key,
174 (splay_tree_value) value);
176 /* Always insert an identity map as well. If we see this same new
177 node again, we won't want to duplicate it a second time. */
178 if (key != value)
179 splay_tree_insert (id->decl_map, (splay_tree_key) value,
180 (splay_tree_value) value);
183 /* Remap DECL during the copying of the BLOCK tree for the function. */
185 static tree
186 remap_decl (tree decl, inline_data *id)
188 splay_tree_node n;
189 tree fn;
191 /* We only remap local variables in the current function. */
192 fn = id->callee;
194 /* See if we have remapped this declaration. */
196 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
198 /* If we didn't already have an equivalent for this declaration,
199 create one now. */
200 if (!n)
202 /* Make a copy of the variable or label. */
203 tree t;
204 t = copy_decl_for_dup (decl, fn, id->caller, id->versioning_p);
206 /* Remember it, so that if we encounter this local entity again
207 we can reuse this copy. Do this early because remap_type may
208 need this decl for TYPE_STUB_DECL. */
209 insert_decl_map (id, decl, t);
211 /* Remap types, if necessary. */
212 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
213 if (TREE_CODE (t) == TYPE_DECL)
214 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
216 /* Remap sizes as necessary. */
217 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
218 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
220 /* If fields, do likewise for offset and qualifier. */
221 if (TREE_CODE (t) == FIELD_DECL)
223 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
224 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
225 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
228 #if 0
229 /* FIXME handle anon aggrs. */
230 if (! DECL_NAME (t) && TREE_TYPE (t)
231 && lang_hooks.tree_inlining.anon_aggr_type_p (TREE_TYPE (t)))
233 /* For a VAR_DECL of anonymous type, we must also copy the
234 member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS. */
235 tree members = NULL;
236 tree src;
238 for (src = DECL_ANON_UNION_ELEMS (t); src;
239 src = TREE_CHAIN (src))
241 tree member = remap_decl (TREE_VALUE (src), id);
243 gcc_assert (!TREE_PURPOSE (src));
244 members = tree_cons (NULL, member, members);
246 DECL_ANON_UNION_ELEMS (t) = nreverse (members);
248 #endif
250 /* Remember it, so that if we encounter this local entity
251 again we can reuse this copy. */
252 insert_decl_map (id, decl, t);
253 return t;
256 return unshare_expr ((tree) n->value);
259 static tree
260 remap_type_1 (tree type, inline_data *id)
262 tree new, t;
264 /* We do need a copy. build and register it now. If this is a pointer or
265 reference type, remap the designated type and make a new pointer or
266 reference type. */
267 if (TREE_CODE (type) == POINTER_TYPE)
269 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
270 TYPE_MODE (type),
271 TYPE_REF_CAN_ALIAS_ALL (type));
272 insert_decl_map (id, type, new);
273 return new;
275 else if (TREE_CODE (type) == REFERENCE_TYPE)
277 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
278 TYPE_MODE (type),
279 TYPE_REF_CAN_ALIAS_ALL (type));
280 insert_decl_map (id, type, new);
281 return new;
283 else
284 new = copy_node (type);
286 insert_decl_map (id, type, new);
288 /* This is a new type, not a copy of an old type. Need to reassociate
289 variants. We can handle everything except the main variant lazily. */
290 t = TYPE_MAIN_VARIANT (type);
291 if (type != t)
293 t = remap_type (t, id);
294 TYPE_MAIN_VARIANT (new) = t;
295 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
296 TYPE_NEXT_VARIANT (t) = new;
298 else
300 TYPE_MAIN_VARIANT (new) = new;
301 TYPE_NEXT_VARIANT (new) = NULL;
304 if (TYPE_STUB_DECL (type))
305 TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
307 /* Lazily create pointer and reference types. */
308 TYPE_POINTER_TO (new) = NULL;
309 TYPE_REFERENCE_TO (new) = NULL;
311 switch (TREE_CODE (new))
313 case INTEGER_TYPE:
314 case REAL_TYPE:
315 case ENUMERAL_TYPE:
316 case BOOLEAN_TYPE:
317 case CHAR_TYPE:
318 t = TYPE_MIN_VALUE (new);
319 if (t && TREE_CODE (t) != INTEGER_CST)
320 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
322 t = TYPE_MAX_VALUE (new);
323 if (t && TREE_CODE (t) != INTEGER_CST)
324 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
325 return new;
327 case FUNCTION_TYPE:
328 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
329 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
330 return new;
332 case ARRAY_TYPE:
333 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
334 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
335 break;
337 case RECORD_TYPE:
338 case UNION_TYPE:
339 case QUAL_UNION_TYPE:
341 tree f, nf = NULL;
343 for (f = TYPE_FIELDS (new); f ; f = TREE_CHAIN (f))
345 t = remap_decl (f, id);
346 DECL_CONTEXT (t) = new;
347 TREE_CHAIN (t) = nf;
348 nf = t;
350 TYPE_FIELDS (new) = nreverse (nf);
352 break;
354 case OFFSET_TYPE:
355 default:
356 /* Shouldn't have been thought variable sized. */
357 gcc_unreachable ();
360 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
361 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
363 return new;
366 static tree
367 remap_type (tree type, inline_data *id)
369 splay_tree_node node;
371 if (type == NULL)
372 return type;
374 /* See if we have remapped this type. */
375 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
376 if (node)
377 return (tree) node->value;
379 /* The type only needs remapping if it's variably modified. */
380 if (! variably_modified_type_p (type, id->callee))
382 insert_decl_map (id, type, type);
383 return type;
386 return remap_type_1 (type, id);
389 static tree
390 remap_decls (tree decls, inline_data *id)
392 tree old_var;
393 tree new_decls = NULL_TREE;
395 /* Remap its variables. */
396 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
398 tree new_var;
400 /* We can not chain the local static declarations into the unexpanded_var_list
401 as we can't duplicate them or break one decl rule. Go ahead and link
402 them into unexpanded_var_list. */
403 if (!lang_hooks.tree_inlining.auto_var_in_fn_p (old_var, id->callee)
404 && !DECL_EXTERNAL (old_var))
406 cfun->unexpanded_var_list = tree_cons (NULL_TREE, old_var,
407 cfun->unexpanded_var_list);
408 continue;
411 /* Remap the variable. */
412 new_var = remap_decl (old_var, id);
414 /* If we didn't remap this variable, so we can't mess with its
415 TREE_CHAIN. If we remapped this variable to the return slot, it's
416 already declared somewhere else, so don't declare it here. */
417 if (!new_var || new_var == id->retvar)
419 else
421 gcc_assert (DECL_P (new_var));
422 TREE_CHAIN (new_var) = new_decls;
423 new_decls = new_var;
427 return nreverse (new_decls);
430 /* Copy the BLOCK to contain remapped versions of the variables
431 therein. And hook the new block into the block-tree. */
433 static void
434 remap_block (tree *block, inline_data *id)
436 tree old_block;
437 tree new_block;
438 tree fn;
440 /* Make the new block. */
441 old_block = *block;
442 new_block = make_node (BLOCK);
443 TREE_USED (new_block) = TREE_USED (old_block);
444 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
445 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
446 *block = new_block;
448 /* Remap its variables. */
449 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
451 fn = id->caller;
452 if (id->cloning_p)
453 /* We're building a clone; DECL_INITIAL is still
454 error_mark_node, and current_binding_level is the parm
455 binding level. */
456 lang_hooks.decls.insert_block (new_block);
457 /* Remember the remapped block. */
458 insert_decl_map (id, old_block, new_block);
461 /* Copy the whole block tree and root it in id->block. */
462 static tree
463 remap_blocks (tree block, inline_data *id)
465 tree t;
466 tree new = block;
468 if (!block)
469 return NULL;
471 remap_block (&new, id);
472 gcc_assert (new != block);
473 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
474 add_lexical_block (new, remap_blocks (t, id));
475 return new;
478 static void
479 copy_statement_list (tree *tp)
481 tree_stmt_iterator oi, ni;
482 tree new;
484 new = alloc_stmt_list ();
485 ni = tsi_start (new);
486 oi = tsi_start (*tp);
487 *tp = new;
489 for (; !tsi_end_p (oi); tsi_next (&oi))
490 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
493 static void
494 copy_bind_expr (tree *tp, int *walk_subtrees, inline_data *id)
496 tree block = BIND_EXPR_BLOCK (*tp);
497 /* Copy (and replace) the statement. */
498 copy_tree_r (tp, walk_subtrees, NULL);
499 if (block)
501 remap_block (&block, id);
502 BIND_EXPR_BLOCK (*tp) = block;
505 if (BIND_EXPR_VARS (*tp))
506 /* This will remap a lot of the same decls again, but this should be
507 harmless. */
508 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
511 /* Called from copy_body_id via walk_tree. DATA is really an
512 `inline_data *'. */
514 static tree
515 copy_body_r (tree *tp, int *walk_subtrees, void *data)
517 inline_data *id = (inline_data *) data;
518 tree fn = id->callee;
519 tree new_block;
521 /* Begin by recognizing trees that we'll completely rewrite for the
522 inlining context. Our output for these trees is completely
523 different from out input (e.g. RETURN_EXPR is deleted, and morphs
524 into an edge). Further down, we'll handle trees that get
525 duplicated and/or tweaked. */
527 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
528 GOTO_STMT with the RET_LABEL as its target. */
529 if (TREE_CODE (*tp) == RETURN_EXPR && inlining_p (id))
531 tree assignment = TREE_OPERAND (*tp, 0);
533 /* If we're returning something, just turn that into an
534 assignment into the equivalent of the original RESULT_DECL.
535 If the "assignment" is just the result decl, the result
536 decl has already been set (e.g. a recent "foo (&result_decl,
537 ...)"); just toss the entire RETURN_EXPR. */
538 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
540 /* Replace the RETURN_EXPR with (a copy of) the
541 MODIFY_EXPR hanging underneath. */
542 *tp = copy_node (assignment);
544 else /* Else the RETURN_EXPR returns no value. */
546 *tp = NULL;
547 return (void *)1;
551 /* Local variables and labels need to be replaced by equivalent
552 variables. We don't want to copy static variables; there's only
553 one of those, no matter how many times we inline the containing
554 function. Similarly for globals from an outer function. */
555 else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
557 tree new_decl;
559 /* Remap the declaration. */
560 new_decl = remap_decl (*tp, id);
561 gcc_assert (new_decl);
562 /* Replace this variable with the copy. */
563 STRIP_TYPE_NOPS (new_decl);
564 *tp = new_decl;
565 *walk_subtrees = 0;
567 else if (TREE_CODE (*tp) == STATEMENT_LIST)
568 copy_statement_list (tp);
569 else if (TREE_CODE (*tp) == SAVE_EXPR)
570 remap_save_expr (tp, id->decl_map, walk_subtrees);
571 else if (TREE_CODE (*tp) == LABEL_DECL
572 && (! DECL_CONTEXT (*tp)
573 || decl_function_context (*tp) == id->callee))
574 /* These may need to be remapped for EH handling. */
575 *tp = remap_decl (*tp, id);
576 else if (TREE_CODE (*tp) == BIND_EXPR)
577 copy_bind_expr (tp, walk_subtrees, id);
578 /* Types may need remapping as well. */
579 else if (TYPE_P (*tp))
580 *tp = remap_type (*tp, id);
582 /* If this is a constant, we have to copy the node iff the type will be
583 remapped. copy_tree_r will not copy a constant. */
584 else if (CONSTANT_CLASS_P (*tp))
586 tree new_type = remap_type (TREE_TYPE (*tp), id);
588 if (new_type == TREE_TYPE (*tp))
589 *walk_subtrees = 0;
591 else if (TREE_CODE (*tp) == INTEGER_CST)
592 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
593 TREE_INT_CST_HIGH (*tp));
594 else
596 *tp = copy_node (*tp);
597 TREE_TYPE (*tp) = new_type;
601 /* Otherwise, just copy the node. Note that copy_tree_r already
602 knows not to copy VAR_DECLs, etc., so this is safe. */
603 else
605 /* Here we handle trees that are not completely rewritten.
606 First we detect some inlining-induced bogosities for
607 discarding. */
608 if (TREE_CODE (*tp) == MODIFY_EXPR
609 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
610 && (lang_hooks.tree_inlining.auto_var_in_fn_p
611 (TREE_OPERAND (*tp, 0), fn)))
613 /* Some assignments VAR = VAR; don't generate any rtl code
614 and thus don't count as variable modification. Avoid
615 keeping bogosities like 0 = 0. */
616 tree decl = TREE_OPERAND (*tp, 0), value;
617 splay_tree_node n;
619 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
620 if (n)
622 value = (tree) n->value;
623 STRIP_TYPE_NOPS (value);
624 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
626 *tp = build_empty_stmt ();
627 return copy_body_r (tp, walk_subtrees, data);
631 else if (TREE_CODE (*tp) == INDIRECT_REF
632 && !id->versioning_p)
634 /* Get rid of *& from inline substitutions that can happen when a
635 pointer argument is an ADDR_EXPR. */
636 tree decl = TREE_OPERAND (*tp, 0);
637 splay_tree_node n;
639 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
640 if (n)
642 /* If we happen to get an ADDR_EXPR in n->value, strip
643 it manually here as we'll eventually get ADDR_EXPRs
644 which lie about their types pointed to. In this case
645 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
646 but we absolutely rely on that. As fold_indirect_ref
647 does other useful transformations, try that first, though. */
648 tree type = TREE_TYPE (TREE_TYPE ((tree)n->value));
649 *tp = fold_indirect_ref_1 (type, (tree)n->value);
650 if (! *tp)
652 if (TREE_CODE ((tree)n->value) == ADDR_EXPR)
653 *tp = TREE_OPERAND ((tree)n->value, 0);
654 else
655 *tp = build1 (INDIRECT_REF, type, (tree)n->value);
657 *walk_subtrees = 0;
658 return NULL;
662 /* Here is the "usual case". Copy this tree node, and then
663 tweak some special cases. */
664 copy_tree_r (tp, walk_subtrees, id->versioning_p ? data : NULL);
666 /* If EXPR has block defined, map it to newly constructed block.
667 When inlining we want EXPRs without block appear in the block
668 of function call. */
669 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (*tp))))
671 new_block = id->block;
672 if (TREE_BLOCK (*tp))
674 splay_tree_node n;
675 n = splay_tree_lookup (id->decl_map,
676 (splay_tree_key) TREE_BLOCK (*tp));
677 gcc_assert (n);
678 new_block = (tree) n->value;
680 TREE_BLOCK (*tp) = new_block;
683 if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
684 TREE_OPERAND (*tp, 0) =
685 build_int_cst
686 (NULL_TREE,
687 id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
689 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
691 /* The copied TARGET_EXPR has never been expanded, even if the
692 original node was expanded already. */
693 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
695 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
696 TREE_OPERAND (*tp, 3) = NULL_TREE;
699 /* Variable substitution need not be simple. In particular, the
700 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
701 and friends are up-to-date. */
702 else if (TREE_CODE (*tp) == ADDR_EXPR)
704 walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
705 recompute_tree_invarant_for_addr_expr (*tp);
706 *walk_subtrees = 0;
710 /* Keep iterating. */
711 return NULL_TREE;
714 /* Copy basic block, scale profile accordingly. Edges will be taken care of
715 later */
717 static basic_block
718 copy_bb (inline_data *id, basic_block bb, int frequency_scale, int count_scale)
720 block_stmt_iterator bsi, copy_bsi;
721 basic_block copy_basic_block;
723 /* create_basic_block() will append every new block to
724 basic_block_info automatically. */
725 copy_basic_block = create_basic_block (NULL, (void *) 0, bb->prev_bb->aux);
726 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
727 copy_basic_block->frequency = (bb->frequency
728 * frequency_scale / REG_BR_PROB_BASE);
729 copy_bsi = bsi_start (copy_basic_block);
731 for (bsi = bsi_start (bb);
732 !bsi_end_p (bsi); bsi_next (&bsi))
734 tree stmt = bsi_stmt (bsi);
735 tree orig_stmt = stmt;
737 walk_tree (&stmt, copy_body_r, id, NULL);
739 /* RETURN_EXPR might be removed,
740 this is signalled by making stmt pointer NULL. */
741 if (stmt)
743 tree call, decl;
744 bsi_insert_after (&copy_bsi, stmt, BSI_NEW_STMT);
745 call = get_call_expr_in (stmt);
746 /* We're duplicating a CALL_EXPR. Find any corresponding
747 callgraph edges and update or duplicate them. */
748 if (call && (decl = get_callee_fndecl (call)))
750 if (id->saving_p)
752 struct cgraph_node *node;
753 struct cgraph_edge *edge;
755 /* We're saving a copy of the body, so we'll update the
756 callgraph nodes in place. Note that we avoid
757 altering the original callgraph node; we begin with
758 the first clone. */
759 for (node = id->node->next_clone;
760 node;
761 node = node->next_clone)
763 edge = cgraph_edge (node, orig_stmt);
764 gcc_assert (edge);
765 edge->call_stmt = stmt;
768 else
770 struct cgraph_edge *edge;
772 /* We're cloning or inlining this body; duplicate the
773 associate callgraph nodes. */
774 if (!id->versioning_p)
776 edge = cgraph_edge (id->current_node, orig_stmt);
777 if (edge)
778 cgraph_clone_edge (edge, id->node, stmt,
779 REG_BR_PROB_BASE, 1, true);
782 if (id->versioning_p)
784 /* Update the call_expr on the edges from the new version
785 to its callees. */
786 struct cgraph_edge *edge;
787 edge = cgraph_edge (id->node, orig_stmt);
788 if (edge)
789 edge->call_stmt = stmt;
792 /* If you think we can abort here, you are wrong.
793 There is no region 0 in tree land. */
794 gcc_assert (lookup_stmt_eh_region_fn (id->callee_cfun, orig_stmt)
795 != 0);
797 if (tree_could_throw_p (stmt))
799 int region = lookup_stmt_eh_region_fn (id->callee_cfun, orig_stmt);
800 /* Add an entry for the copied tree in the EH hashtable.
801 When saving or cloning or versioning, use the hashtable in
802 cfun, and just copy the EH number. When inlining, use the
803 hashtable in the caller, and adjust the region number. */
804 if (region > 0)
805 add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
807 /* If this tree doesn't have a region associated with it,
808 and there is a "current region,"
809 then associate this tree with the current region
810 and add edges associated with this region. */
811 if ((lookup_stmt_eh_region_fn (id->callee_cfun,
812 orig_stmt) <= 0
813 && id->eh_region > 0)
814 && tree_could_throw_p (stmt))
815 add_stmt_to_eh_region (stmt, id->eh_region);
819 return copy_basic_block;
822 /* Copy edges from BB into its copy constructed earlier, scale profile
823 accordingly. Edges will be taken care of later. Assume aux
824 pointers to point to the copies of each BB. */
825 static void
826 copy_edges_for_bb (basic_block bb, int count_scale)
828 basic_block new_bb = bb->aux;
829 edge_iterator ei;
830 edge old_edge;
831 block_stmt_iterator bsi;
832 int flags;
834 /* Use the indices from the original blocks to create edges for the
835 new ones. */
836 FOR_EACH_EDGE (old_edge, ei, bb->succs)
837 if (!(old_edge->flags & EDGE_EH))
839 edge new;
841 flags = old_edge->flags;
843 /* Return edges do get a FALLTHRU flag when the get inlined. */
844 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
845 && old_edge->dest->aux != EXIT_BLOCK_PTR)
846 flags |= EDGE_FALLTHRU;
847 new = make_edge (new_bb, old_edge->dest->aux, flags);
848 new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
849 new->probability = old_edge->probability;
852 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
853 return;
855 for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
857 tree copy_stmt;
859 copy_stmt = bsi_stmt (bsi);
860 update_stmt (copy_stmt);
861 /* Do this before the possible split_block. */
862 bsi_next (&bsi);
864 /* If this tree could throw an exception, there are two
865 cases where we need to add abnormal edge(s): the
866 tree wasn't in a region and there is a "current
867 region" in the caller; or the original tree had
868 EH edges. In both cases split the block after the tree,
869 and add abnormal edge(s) as needed; we need both
870 those from the callee and the caller.
871 We check whether the copy can throw, because the const
872 propagation can change an INDIRECT_REF which throws
873 into a COMPONENT_REF which doesn't. If the copy
874 can throw, the original could also throw. */
876 if (tree_can_throw_internal (copy_stmt))
878 if (!bsi_end_p (bsi))
879 /* Note that bb's predecessor edges aren't necessarily
880 right at this point; split_block doesn't care. */
882 edge e = split_block (new_bb, copy_stmt);
883 new_bb = e->dest;
884 bsi = bsi_start (new_bb);
887 make_eh_edges (copy_stmt);
892 /* Wrapper for remap_decl so it can be used as a callback. */
893 static tree
894 remap_decl_1 (tree decl, void *data)
896 return remap_decl (decl, data);
899 /* Make a copy of the body of FN so that it can be inserted inline in
900 another function. Walks FN via CFG, returns new fndecl. */
902 static tree
903 copy_cfg_body (inline_data * id, gcov_type count, int frequency,
904 basic_block entry_block_map, basic_block exit_block_map)
906 tree callee_fndecl = id->callee;
907 /* Original cfun for the callee, doesn't change. */
908 struct function *callee_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
909 /* Copy, built by this function. */
910 struct function *new_cfun;
911 /* Place to copy from; when a copy of the function was saved off earlier,
912 use that instead of the main copy. */
913 struct function *cfun_to_copy =
914 (struct function *) ggc_alloc_cleared (sizeof (struct function));
915 basic_block bb;
916 tree new_fndecl = NULL;
917 bool saving_or_cloning;
918 int count_scale, frequency_scale;
920 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count)
921 count_scale = (REG_BR_PROB_BASE * count
922 / ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count);
923 else
924 count_scale = 1;
926 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency)
927 frequency_scale = (REG_BR_PROB_BASE * frequency
929 ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency);
930 else
931 frequency_scale = count_scale;
933 /* Register specific tree functions. */
934 tree_register_cfg_hooks ();
936 /* Must have a CFG here at this point. */
937 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
938 (DECL_STRUCT_FUNCTION (callee_fndecl)));
940 *cfun_to_copy = *DECL_STRUCT_FUNCTION (callee_fndecl);
942 /* If there is a saved_cfg+saved_args lurking in the
943 struct function, a copy of the callee body was saved there, and
944 the 'struct cgraph edge' nodes have been fudged to point into the
945 saved body. Accordingly, we want to copy that saved body so the
946 callgraph edges will be recognized and cloned properly. */
947 if (cfun_to_copy->saved_cfg)
949 cfun_to_copy->cfg = cfun_to_copy->saved_cfg;
950 cfun_to_copy->eh = cfun_to_copy->saved_eh;
952 id->callee_cfun = cfun_to_copy;
954 /* If saving or cloning a function body, create new basic_block_info
955 and label_to_block_maps. Otherwise, we're duplicating a function
956 body for inlining; insert our new blocks and labels into the
957 existing varrays. */
958 saving_or_cloning = (id->saving_p || id->cloning_p || id->versioning_p);
959 if (saving_or_cloning)
961 new_cfun =
962 (struct function *) ggc_alloc_cleared (sizeof (struct function));
963 *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
964 new_cfun->cfg = NULL;
965 new_cfun->decl = new_fndecl = copy_node (callee_fndecl);
966 new_cfun->ib_boundaries_block = (varray_type) 0;
967 DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
968 push_cfun (new_cfun);
969 init_empty_tree_cfg ();
971 ENTRY_BLOCK_PTR->count =
972 (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count * count_scale /
973 REG_BR_PROB_BASE);
974 ENTRY_BLOCK_PTR->frequency =
975 (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency *
976 frequency_scale / REG_BR_PROB_BASE);
977 EXIT_BLOCK_PTR->count =
978 (EXIT_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count * count_scale /
979 REG_BR_PROB_BASE);
980 EXIT_BLOCK_PTR->frequency =
981 (EXIT_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency *
982 frequency_scale / REG_BR_PROB_BASE);
984 entry_block_map = ENTRY_BLOCK_PTR;
985 exit_block_map = EXIT_BLOCK_PTR;
988 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
989 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
992 /* Duplicate any exception-handling regions. */
993 if (cfun->eh)
995 if (saving_or_cloning)
996 init_eh_for_function ();
997 id->eh_region_offset = duplicate_eh_regions (cfun_to_copy,
998 remap_decl_1,
999 id, id->eh_region);
1000 gcc_assert (inlining_p (id) || !id->eh_region_offset);
1002 /* Use aux pointers to map the original blocks to copy. */
1003 FOR_EACH_BB_FN (bb, cfun_to_copy)
1004 bb->aux = copy_bb (id, bb, frequency_scale, count_scale);
1005 /* Now that we've duplicated the blocks, duplicate their edges. */
1006 FOR_ALL_BB_FN (bb, cfun_to_copy)
1007 copy_edges_for_bb (bb, count_scale);
1008 FOR_ALL_BB_FN (bb, cfun_to_copy)
1009 bb->aux = NULL;
1011 if (saving_or_cloning)
1012 pop_cfun ();
1014 return new_fndecl;
1017 /* Make a copy of the body of FN so that it can be inserted inline in
1018 another function. */
1020 static tree
1021 copy_generic_body (inline_data *id)
1023 tree body;
1024 tree fndecl = id->callee;
1026 body = DECL_SAVED_TREE (fndecl);
1027 walk_tree (&body, copy_body_r, id, NULL);
1029 return body;
1032 static tree
1033 copy_body (inline_data *id, gcov_type count, int frequency,
1034 basic_block entry_block_map, basic_block exit_block_map)
1036 tree fndecl = id->callee;
1037 tree body;
1039 /* If this body has a CFG, walk CFG and copy. */
1040 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
1041 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
1043 return body;
1046 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
1047 defined in function FN, or of a data member thereof. */
1049 static bool
1050 self_inlining_addr_expr (tree value, tree fn)
1052 tree var;
1054 if (TREE_CODE (value) != ADDR_EXPR)
1055 return false;
1057 var = get_base_address (TREE_OPERAND (value, 0));
1059 return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
1062 static void
1063 setup_one_parameter (inline_data *id, tree p, tree value, tree fn,
1064 basic_block bb, tree *vars)
1066 tree init_stmt;
1067 tree var;
1068 tree var_sub;
1070 /* If the parameter is never assigned to, we may not need to
1071 create a new variable here at all. Instead, we may be able
1072 to just use the argument value. */
1073 if (TREE_READONLY (p)
1074 && !TREE_ADDRESSABLE (p)
1075 && value && !TREE_SIDE_EFFECTS (value))
1077 /* We may produce non-gimple trees by adding NOPs or introduce
1078 invalid sharing when operand is not really constant.
1079 It is not big deal to prohibit constant propagation here as
1080 we will constant propagate in DOM1 pass anyway. */
1081 if (is_gimple_min_invariant (value)
1082 && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
1083 /* We have to be very careful about ADDR_EXPR. Make sure
1084 the base variable isn't a local variable of the inlined
1085 function, e.g., when doing recursive inlining, direct or
1086 mutually-recursive or whatever, which is why we don't
1087 just test whether fn == current_function_decl. */
1088 && ! self_inlining_addr_expr (value, fn))
1090 insert_decl_map (id, p, value);
1091 return;
1095 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
1096 here since the type of this decl must be visible to the calling
1097 function. */
1098 var = copy_decl_for_dup (p, fn, id->caller, /*versioning=*/false);
1100 /* See if the frontend wants to pass this by invisible reference. If
1101 so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
1102 replace uses of the PARM_DECL with dereferences. */
1103 if (TREE_TYPE (var) != TREE_TYPE (p)
1104 && POINTER_TYPE_P (TREE_TYPE (var))
1105 && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
1107 insert_decl_map (id, var, var);
1108 var_sub = build_fold_indirect_ref (var);
1110 else
1111 var_sub = var;
1113 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
1114 that way, when the PARM_DECL is encountered, it will be
1115 automatically replaced by the VAR_DECL. */
1116 insert_decl_map (id, p, var_sub);
1118 /* Declare this new variable. */
1119 TREE_CHAIN (var) = *vars;
1120 *vars = var;
1122 /* Make gimplifier happy about this variable. */
1123 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1125 /* Even if P was TREE_READONLY, the new VAR should not be.
1126 In the original code, we would have constructed a
1127 temporary, and then the function body would have never
1128 changed the value of P. However, now, we will be
1129 constructing VAR directly. The constructor body may
1130 change its value multiple times as it is being
1131 constructed. Therefore, it must not be TREE_READONLY;
1132 the back-end assumes that TREE_READONLY variable is
1133 assigned to only once. */
1134 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
1135 TREE_READONLY (var) = 0;
1137 /* Initialize this VAR_DECL from the equivalent argument. Convert
1138 the argument to the proper type in case it was promoted. */
1139 if (value)
1141 tree rhs = fold_convert (TREE_TYPE (var), value);
1142 block_stmt_iterator bsi = bsi_last (bb);
1144 if (rhs == error_mark_node)
1145 return;
1147 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
1148 keep our trees in gimple form. */
1149 init_stmt = build (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
1151 /* If we did not create a gimple value and we did not create a gimple
1152 cast of a gimple value, then we will need to gimplify INIT_STMTS
1153 at the end. Note that is_gimple_cast only checks the outer
1154 tree code, not its operand. Thus the explicit check that its
1155 operand is a gimple value. */
1156 if (!is_gimple_val (rhs)
1157 && (!is_gimple_cast (rhs)
1158 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
1159 gimplify_stmt (&init_stmt);
1160 bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
1164 /* Generate code to initialize the parameters of the function at the
1165 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
1167 static void
1168 initialize_inlined_parameters (inline_data *id, tree args, tree static_chain,
1169 tree fn, basic_block bb)
1171 tree parms;
1172 tree a;
1173 tree p;
1174 tree vars = NULL_TREE;
1175 int argnum = 0;
1177 /* Figure out what the parameters are. */
1178 parms = DECL_ARGUMENTS (fn);
1179 if (fn == current_function_decl)
1180 parms = cfun->saved_args;
1182 /* Loop through the parameter declarations, replacing each with an
1183 equivalent VAR_DECL, appropriately initialized. */
1184 for (p = parms, a = args; p;
1185 a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
1187 tree value;
1189 ++argnum;
1191 /* Find the initializer. */
1192 value = lang_hooks.tree_inlining.convert_parm_for_inlining
1193 (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
1195 setup_one_parameter (id, p, value, fn, bb, &vars);
1198 /* Initialize the static chain. */
1199 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1200 if (fn == current_function_decl)
1201 p = DECL_STRUCT_FUNCTION (fn)->saved_static_chain_decl;
1202 if (p)
1204 /* No static chain? Seems like a bug in tree-nested.c. */
1205 gcc_assert (static_chain);
1207 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
1210 declare_inline_vars (id->block, vars);
1213 /* Declare a return variable to replace the RESULT_DECL for the
1214 function we are calling. An appropriate DECL_STMT is returned.
1215 The USE_STMT is filled to contain a use of the declaration to
1216 indicate the return value of the function.
1218 RETURN_SLOT_ADDR, if non-null, was a fake parameter that
1219 took the address of the result. MODIFY_DEST, if non-null, was the LHS of
1220 the MODIFY_EXPR to which this call is the RHS.
1222 The return value is a (possibly null) value that is the result of the
1223 function as seen by the callee. *USE_P is a (possibly null) value that
1224 holds the result as seen by the caller. */
1226 static tree
1227 declare_return_variable (inline_data *id, tree return_slot_addr,
1228 tree modify_dest, tree *use_p)
1230 tree callee = id->callee;
1231 tree caller = id->caller;
1232 tree result = DECL_RESULT (callee);
1233 tree callee_type = TREE_TYPE (result);
1234 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
1235 tree var, use;
1237 /* We don't need to do anything for functions that don't return
1238 anything. */
1239 if (!result || VOID_TYPE_P (callee_type))
1241 *use_p = NULL_TREE;
1242 return NULL_TREE;
1245 /* If there was a return slot, then the return value is the
1246 dereferenced address of that object. */
1247 if (return_slot_addr)
1249 /* The front end shouldn't have used both return_slot_addr and
1250 a modify expression. */
1251 gcc_assert (!modify_dest);
1252 if (DECL_BY_REFERENCE (result))
1253 var = return_slot_addr;
1254 else
1255 var = build_fold_indirect_ref (return_slot_addr);
1256 use = NULL;
1257 goto done;
1260 /* All types requiring non-trivial constructors should have been handled. */
1261 gcc_assert (!TREE_ADDRESSABLE (callee_type));
1263 /* Attempt to avoid creating a new temporary variable. */
1264 if (modify_dest)
1266 bool use_it = false;
1268 /* We can't use MODIFY_DEST if there's type promotion involved. */
1269 if (!lang_hooks.types_compatible_p (caller_type, callee_type))
1270 use_it = false;
1272 /* ??? If we're assigning to a variable sized type, then we must
1273 reuse the destination variable, because we've no good way to
1274 create variable sized temporaries at this point. */
1275 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
1276 use_it = true;
1278 /* If the callee cannot possibly modify MODIFY_DEST, then we can
1279 reuse it as the result of the call directly. Don't do this if
1280 it would promote MODIFY_DEST to addressable. */
1281 else if (TREE_ADDRESSABLE (result))
1282 use_it = false;
1283 else
1285 tree base_m = get_base_address (modify_dest);
1287 /* If the base isn't a decl, then it's a pointer, and we don't
1288 know where that's going to go. */
1289 if (!DECL_P (base_m))
1290 use_it = false;
1291 else if (is_global_var (base_m))
1292 use_it = false;
1293 else if (!TREE_ADDRESSABLE (base_m))
1294 use_it = true;
1297 if (use_it)
1299 var = modify_dest;
1300 use = NULL;
1301 goto done;
1305 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
1307 var = copy_decl_for_dup (result, callee, caller, /*versioning=*/false);
1309 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1310 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
1311 = tree_cons (NULL_TREE, var,
1312 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
1314 /* Do not have the rest of GCC warn about this variable as it should
1315 not be visible to the user. */
1316 TREE_NO_WARNING (var) = 1;
1318 /* Build the use expr. If the return type of the function was
1319 promoted, convert it back to the expected type. */
1320 use = var;
1321 if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
1322 use = fold_convert (caller_type, var);
1324 done:
1325 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
1326 way, when the RESULT_DECL is encountered, it will be
1327 automatically replaced by the VAR_DECL. */
1328 insert_decl_map (id, result, var);
1330 /* Remember this so we can ignore it in remap_decls. */
1331 id->retvar = var;
1333 *use_p = use;
1334 return var;
1337 /* Returns nonzero if a function can be inlined as a tree. */
1339 bool
1340 tree_inlinable_function_p (tree fn)
1342 return inlinable_function_p (fn);
1345 static const char *inline_forbidden_reason;
1347 static tree
1348 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
1349 void *fnp)
1351 tree node = *nodep;
1352 tree fn = (tree) fnp;
1353 tree t;
1355 switch (TREE_CODE (node))
1357 case CALL_EXPR:
1358 /* Refuse to inline alloca call unless user explicitly forced so as
1359 this may change program's memory overhead drastically when the
1360 function using alloca is called in loop. In GCC present in
1361 SPEC2000 inlining into schedule_block cause it to require 2GB of
1362 RAM instead of 256MB. */
1363 if (alloca_call_p (node)
1364 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1366 inline_forbidden_reason
1367 = G_("function %q+F can never be inlined because it uses "
1368 "alloca (override using the always_inline attribute)");
1369 return node;
1371 t = get_callee_fndecl (node);
1372 if (! t)
1373 break;
1375 /* We cannot inline functions that call setjmp. */
1376 if (setjmp_call_p (t))
1378 inline_forbidden_reason
1379 = G_("function %q+F can never be inlined because it uses setjmp");
1380 return node;
1383 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
1384 switch (DECL_FUNCTION_CODE (t))
1386 /* We cannot inline functions that take a variable number of
1387 arguments. */
1388 case BUILT_IN_VA_START:
1389 case BUILT_IN_STDARG_START:
1390 case BUILT_IN_NEXT_ARG:
1391 case BUILT_IN_VA_END:
1392 inline_forbidden_reason
1393 = G_("function %q+F can never be inlined because it "
1394 "uses variable argument lists");
1395 return node;
1397 case BUILT_IN_LONGJMP:
1398 /* We can't inline functions that call __builtin_longjmp at
1399 all. The non-local goto machinery really requires the
1400 destination be in a different function. If we allow the
1401 function calling __builtin_longjmp to be inlined into the
1402 function calling __builtin_setjmp, Things will Go Awry. */
1403 inline_forbidden_reason
1404 = G_("function %q+F can never be inlined because "
1405 "it uses setjmp-longjmp exception handling");
1406 return node;
1408 case BUILT_IN_NONLOCAL_GOTO:
1409 /* Similarly. */
1410 inline_forbidden_reason
1411 = G_("function %q+F can never be inlined because "
1412 "it uses non-local goto");
1413 return node;
1415 case BUILT_IN_RETURN:
1416 case BUILT_IN_APPLY_ARGS:
1417 /* If a __builtin_apply_args caller would be inlined,
1418 it would be saving arguments of the function it has
1419 been inlined into. Similarly __builtin_return would
1420 return from the function the inline has been inlined into. */
1421 inline_forbidden_reason
1422 = G_("function %q+F can never be inlined because "
1423 "it uses __builtin_return or __builtin_apply_args");
1424 return node;
1426 default:
1427 break;
1429 break;
1431 case GOTO_EXPR:
1432 t = TREE_OPERAND (node, 0);
1434 /* We will not inline a function which uses computed goto. The
1435 addresses of its local labels, which may be tucked into
1436 global storage, are of course not constant across
1437 instantiations, which causes unexpected behavior. */
1438 if (TREE_CODE (t) != LABEL_DECL)
1440 inline_forbidden_reason
1441 = G_("function %q+F can never be inlined "
1442 "because it contains a computed goto");
1443 return node;
1445 break;
1447 case LABEL_EXPR:
1448 t = TREE_OPERAND (node, 0);
1449 if (DECL_NONLOCAL (t))
1451 /* We cannot inline a function that receives a non-local goto
1452 because we cannot remap the destination label used in the
1453 function that is performing the non-local goto. */
1454 inline_forbidden_reason
1455 = G_("function %q+F can never be inlined "
1456 "because it receives a non-local goto");
1457 return node;
1459 break;
1461 case RECORD_TYPE:
1462 case UNION_TYPE:
1463 /* We cannot inline a function of the form
1465 void F (int i) { struct S { int ar[i]; } s; }
1467 Attempting to do so produces a catch-22.
1468 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1469 UNION_TYPE nodes, then it goes into infinite recursion on a
1470 structure containing a pointer to its own type. If it doesn't,
1471 then the type node for S doesn't get adjusted properly when
1472 F is inlined.
1474 ??? This is likely no longer true, but it's too late in the 4.0
1475 cycle to try to find out. This should be checked for 4.1. */
1476 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1477 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1479 inline_forbidden_reason
1480 = G_("function %q+F can never be inlined "
1481 "because it uses variable sized variables");
1482 return node;
1485 default:
1486 break;
1489 return NULL_TREE;
1492 /* Return subexpression representing possible alloca call, if any. */
1493 static tree
1494 inline_forbidden_p (tree fndecl)
1496 location_t saved_loc = input_location;
1497 block_stmt_iterator bsi;
1498 basic_block bb;
1499 tree ret = NULL_TREE;
1501 FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (fndecl))
1502 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1504 ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
1505 inline_forbidden_p_1, fndecl);
1506 if (ret)
1507 goto egress;
1510 egress:
1511 input_location = saved_loc;
1512 return ret;
1515 /* Returns nonzero if FN is a function that does not have any
1516 fundamental inline blocking properties. */
1518 static bool
1519 inlinable_function_p (tree fn)
1521 bool inlinable = true;
1523 /* If we've already decided this function shouldn't be inlined,
1524 there's no need to check again. */
1525 if (DECL_UNINLINABLE (fn))
1526 return false;
1528 /* See if there is any language-specific reason it cannot be
1529 inlined. (It is important that this hook be called early because
1530 in C++ it may result in template instantiation.)
1531 If the function is not inlinable for language-specific reasons,
1532 it is left up to the langhook to explain why. */
1533 inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1535 /* If we don't have the function body available, we can't inline it.
1536 However, this should not be recorded since we also get here for
1537 forward declared inline functions. Therefore, return at once. */
1538 if (!DECL_SAVED_TREE (fn))
1539 return false;
1541 /* If we're not inlining at all, then we cannot inline this function. */
1542 else if (!flag_inline_trees)
1543 inlinable = false;
1545 /* Only try to inline functions if DECL_INLINE is set. This should be
1546 true for all functions declared `inline', and for all other functions
1547 as well with -finline-functions.
1549 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1550 it's the front-end that must set DECL_INLINE in this case, because
1551 dwarf2out loses if a function that does not have DECL_INLINE set is
1552 inlined anyway. That is why we have both DECL_INLINE and
1553 DECL_DECLARED_INLINE_P. */
1554 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1555 here should be redundant. */
1556 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1557 inlinable = false;
1559 else if (inline_forbidden_p (fn))
1561 /* See if we should warn about uninlinable functions. Previously,
1562 some of these warnings would be issued while trying to expand
1563 the function inline, but that would cause multiple warnings
1564 about functions that would for example call alloca. But since
1565 this a property of the function, just one warning is enough.
1566 As a bonus we can now give more details about the reason why a
1567 function is not inlinable.
1568 We only warn for functions declared `inline' by the user. */
1569 bool do_warning = (warn_inline
1570 && DECL_INLINE (fn)
1571 && DECL_DECLARED_INLINE_P (fn)
1572 && !DECL_IN_SYSTEM_HEADER (fn));
1574 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1575 sorry (inline_forbidden_reason, fn);
1576 else if (do_warning)
1577 warning (OPT_Winline, inline_forbidden_reason, fn);
1579 inlinable = false;
1582 /* Squirrel away the result so that we don't have to check again. */
1583 DECL_UNINLINABLE (fn) = !inlinable;
1585 return inlinable;
1588 /* Estimate the cost of a memory move. Use machine dependent
1589 word size and take possible memcpy call into account. */
1592 estimate_move_cost (tree type)
1594 HOST_WIDE_INT size;
1596 size = int_size_in_bytes (type);
1598 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1599 /* Cost of a memcpy call, 3 arguments and the call. */
1600 return 4;
1601 else
1602 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1605 /* Used by estimate_num_insns. Estimate number of instructions seen
1606 by given statement. */
1608 static tree
1609 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1611 int *count = data;
1612 tree x = *tp;
1614 if (IS_TYPE_OR_DECL_P (x))
1616 *walk_subtrees = 0;
1617 return NULL;
1619 /* Assume that constants and references counts nothing. These should
1620 be majorized by amount of operations among them we count later
1621 and are common target of CSE and similar optimizations. */
1622 else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
1623 return NULL;
1625 switch (TREE_CODE (x))
1627 /* Containers have no cost. */
1628 case TREE_LIST:
1629 case TREE_VEC:
1630 case BLOCK:
1631 case COMPONENT_REF:
1632 case BIT_FIELD_REF:
1633 case INDIRECT_REF:
1634 case ALIGN_INDIRECT_REF:
1635 case MISALIGNED_INDIRECT_REF:
1636 case ARRAY_REF:
1637 case ARRAY_RANGE_REF:
1638 case OBJ_TYPE_REF:
1639 case EXC_PTR_EXPR: /* ??? */
1640 case FILTER_EXPR: /* ??? */
1641 case COMPOUND_EXPR:
1642 case BIND_EXPR:
1643 case WITH_CLEANUP_EXPR:
1644 case NOP_EXPR:
1645 case VIEW_CONVERT_EXPR:
1646 case SAVE_EXPR:
1647 case ADDR_EXPR:
1648 case COMPLEX_EXPR:
1649 case RANGE_EXPR:
1650 case CASE_LABEL_EXPR:
1651 case SSA_NAME:
1652 case CATCH_EXPR:
1653 case EH_FILTER_EXPR:
1654 case STATEMENT_LIST:
1655 case ERROR_MARK:
1656 case NON_LVALUE_EXPR:
1657 case FDESC_EXPR:
1658 case VA_ARG_EXPR:
1659 case TRY_CATCH_EXPR:
1660 case TRY_FINALLY_EXPR:
1661 case LABEL_EXPR:
1662 case GOTO_EXPR:
1663 case RETURN_EXPR:
1664 case EXIT_EXPR:
1665 case LOOP_EXPR:
1666 case PHI_NODE:
1667 case WITH_SIZE_EXPR:
1668 break;
1670 /* We don't account constants for now. Assume that the cost is amortized
1671 by operations that do use them. We may re-consider this decision once
1672 we are able to optimize the tree before estimating its size and break
1673 out static initializers. */
1674 case IDENTIFIER_NODE:
1675 case INTEGER_CST:
1676 case REAL_CST:
1677 case COMPLEX_CST:
1678 case VECTOR_CST:
1679 case STRING_CST:
1680 *walk_subtrees = 0;
1681 return NULL;
1683 /* Try to estimate the cost of assignments. We have three cases to
1684 deal with:
1685 1) Simple assignments to registers;
1686 2) Stores to things that must live in memory. This includes
1687 "normal" stores to scalars, but also assignments of large
1688 structures, or constructors of big arrays;
1689 3) TARGET_EXPRs.
1691 Let us look at the first two cases, assuming we have "a = b + C":
1692 <modify_expr <var_decl "a"> <plus_expr <var_decl "b"> <constant C>>
1693 If "a" is a GIMPLE register, the assignment to it is free on almost
1694 any target, because "a" usually ends up in a real register. Hence
1695 the only cost of this expression comes from the PLUS_EXPR, and we
1696 can ignore the MODIFY_EXPR.
1697 If "a" is not a GIMPLE register, the assignment to "a" will most
1698 likely be a real store, so the cost of the MODIFY_EXPR is the cost
1699 of moving something into "a", which we compute using the function
1700 estimate_move_cost.
1702 The third case deals with TARGET_EXPRs, for which the semantics are
1703 that a temporary is assigned, unless the TARGET_EXPR itself is being
1704 assigned to something else. In the latter case we do not need the
1705 temporary. E.g. in <modify_expr <var_decl "a"> <target_expr>>, the
1706 MODIFY_EXPR is free. */
1707 case INIT_EXPR:
1708 case MODIFY_EXPR:
1709 /* Is the right and side a TARGET_EXPR? */
1710 if (TREE_CODE (TREE_OPERAND (x, 1)) == TARGET_EXPR)
1711 break;
1712 /* ... fall through ... */
1714 case TARGET_EXPR:
1715 x = TREE_OPERAND (x, 0);
1716 /* Is this an assignments to a register? */
1717 if (is_gimple_reg (x))
1718 break;
1719 /* Otherwise it's a store, so fall through to compute the move cost. */
1721 case CONSTRUCTOR:
1722 *count += estimate_move_cost (TREE_TYPE (x));
1723 break;
1725 /* Assign cost of 1 to usual operations.
1726 ??? We may consider mapping RTL costs to this. */
1727 case COND_EXPR:
1728 case VEC_COND_EXPR:
1730 case PLUS_EXPR:
1731 case MINUS_EXPR:
1732 case MULT_EXPR:
1734 case FIX_TRUNC_EXPR:
1735 case FIX_CEIL_EXPR:
1736 case FIX_FLOOR_EXPR:
1737 case FIX_ROUND_EXPR:
1739 case NEGATE_EXPR:
1740 case FLOAT_EXPR:
1741 case MIN_EXPR:
1742 case MAX_EXPR:
1743 case ABS_EXPR:
1745 case LSHIFT_EXPR:
1746 case RSHIFT_EXPR:
1747 case LROTATE_EXPR:
1748 case RROTATE_EXPR:
1749 case VEC_LSHIFT_EXPR:
1750 case VEC_RSHIFT_EXPR:
1752 case BIT_IOR_EXPR:
1753 case BIT_XOR_EXPR:
1754 case BIT_AND_EXPR:
1755 case BIT_NOT_EXPR:
1757 case TRUTH_ANDIF_EXPR:
1758 case TRUTH_ORIF_EXPR:
1759 case TRUTH_AND_EXPR:
1760 case TRUTH_OR_EXPR:
1761 case TRUTH_XOR_EXPR:
1762 case TRUTH_NOT_EXPR:
1764 case LT_EXPR:
1765 case LE_EXPR:
1766 case GT_EXPR:
1767 case GE_EXPR:
1768 case EQ_EXPR:
1769 case NE_EXPR:
1770 case ORDERED_EXPR:
1771 case UNORDERED_EXPR:
1773 case UNLT_EXPR:
1774 case UNLE_EXPR:
1775 case UNGT_EXPR:
1776 case UNGE_EXPR:
1777 case UNEQ_EXPR:
1778 case LTGT_EXPR:
1780 case CONVERT_EXPR:
1782 case CONJ_EXPR:
1784 case PREDECREMENT_EXPR:
1785 case PREINCREMENT_EXPR:
1786 case POSTDECREMENT_EXPR:
1787 case POSTINCREMENT_EXPR:
1789 case SWITCH_EXPR:
1791 case ASM_EXPR:
1793 case REALIGN_LOAD_EXPR:
1795 case REDUC_MAX_EXPR:
1796 case REDUC_MIN_EXPR:
1797 case REDUC_PLUS_EXPR:
1799 case RESX_EXPR:
1800 *count += 1;
1801 break;
1803 /* Few special cases of expensive operations. This is useful
1804 to avoid inlining on functions having too many of these. */
1805 case TRUNC_DIV_EXPR:
1806 case CEIL_DIV_EXPR:
1807 case FLOOR_DIV_EXPR:
1808 case ROUND_DIV_EXPR:
1809 case EXACT_DIV_EXPR:
1810 case TRUNC_MOD_EXPR:
1811 case CEIL_MOD_EXPR:
1812 case FLOOR_MOD_EXPR:
1813 case ROUND_MOD_EXPR:
1814 case RDIV_EXPR:
1815 *count += 10;
1816 break;
1817 case CALL_EXPR:
1819 tree decl = get_callee_fndecl (x);
1820 tree arg;
1822 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1823 switch (DECL_FUNCTION_CODE (decl))
1825 case BUILT_IN_CONSTANT_P:
1826 *walk_subtrees = 0;
1827 return NULL_TREE;
1828 case BUILT_IN_EXPECT:
1829 return NULL_TREE;
1830 default:
1831 break;
1834 /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
1835 that does use function declaration to figure out the arguments. */
1836 if (!decl)
1838 for (arg = TREE_OPERAND (x, 1); arg; arg = TREE_CHAIN (arg))
1839 *count += estimate_move_cost (TREE_TYPE (TREE_VALUE (arg)));
1841 else
1843 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1844 *count += estimate_move_cost (TREE_TYPE (arg));
1847 *count += PARAM_VALUE (PARAM_INLINE_CALL_COST);
1848 break;
1850 default:
1851 gcc_unreachable ();
1853 return NULL;
1856 /* Estimate number of instructions that will be created by expanding EXPR. */
1859 estimate_num_insns (tree expr)
1861 int num = 0;
1862 struct pointer_set_t *visited_nodes;
1863 basic_block bb;
1864 block_stmt_iterator bsi;
1865 struct function *my_function;
1867 /* If we're given an entire function, walk the CFG. */
1868 if (TREE_CODE (expr) == FUNCTION_DECL)
1870 my_function = DECL_STRUCT_FUNCTION (expr);
1871 gcc_assert (my_function && my_function->cfg);
1872 visited_nodes = pointer_set_create ();
1873 FOR_EACH_BB_FN (bb, my_function)
1875 for (bsi = bsi_start (bb);
1876 !bsi_end_p (bsi);
1877 bsi_next (&bsi))
1879 walk_tree (bsi_stmt_ptr (bsi), estimate_num_insns_1,
1880 &num, visited_nodes);
1883 pointer_set_destroy (visited_nodes);
1885 else
1886 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
1888 return num;
1891 typedef struct function *function_p;
1893 DEF_VEC_P(function_p);
1894 DEF_VEC_ALLOC_P(function_p,heap);
1896 /* Initialized with NOGC, making this poisonous to the garbage collector. */
1897 static VEC(function_p,heap) *cfun_stack;
1899 void
1900 push_cfun (struct function *new_cfun)
1902 VEC_safe_push (function_p, heap, cfun_stack, cfun);
1903 cfun = new_cfun;
1906 void
1907 pop_cfun (void)
1909 cfun = VEC_pop (function_p, cfun_stack);
1912 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
1913 static void
1914 add_lexical_block (tree current_block, tree new_block)
1916 tree *blk_p;
1918 /* Walk to the last sub-block. */
1919 for (blk_p = &BLOCK_SUBBLOCKS (current_block);
1920 *blk_p;
1921 blk_p = &TREE_CHAIN (*blk_p))
1923 *blk_p = new_block;
1924 BLOCK_SUPERCONTEXT (new_block) = current_block;
1927 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
1929 static bool
1930 expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
1932 inline_data *id;
1933 tree t;
1934 tree use_retvar;
1935 tree fn;
1936 splay_tree st;
1937 tree args;
1938 tree return_slot_addr;
1939 tree modify_dest;
1940 location_t saved_location;
1941 struct cgraph_edge *cg_edge;
1942 const char *reason;
1943 basic_block return_block;
1944 edge e;
1945 block_stmt_iterator bsi, stmt_bsi;
1946 bool successfully_inlined = FALSE;
1947 tree t_step;
1948 tree var;
1949 struct cgraph_node *old_node;
1950 tree decl;
1952 /* See what we've got. */
1953 id = (inline_data *) data;
1954 t = *tp;
1956 /* Set input_location here so we get the right instantiation context
1957 if we call instantiate_decl from inlinable_function_p. */
1958 saved_location = input_location;
1959 if (EXPR_HAS_LOCATION (t))
1960 input_location = EXPR_LOCATION (t);
1962 /* From here on, we're only interested in CALL_EXPRs. */
1963 if (TREE_CODE (t) != CALL_EXPR)
1964 goto egress;
1966 /* First, see if we can figure out what function is being called.
1967 If we cannot, then there is no hope of inlining the function. */
1968 fn = get_callee_fndecl (t);
1969 if (!fn)
1970 goto egress;
1972 /* Turn forward declarations into real ones. */
1973 fn = cgraph_node (fn)->decl;
1975 /* If fn is a declaration of a function in a nested scope that was
1976 globally declared inline, we don't set its DECL_INITIAL.
1977 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1978 C++ front-end uses it for cdtors to refer to their internal
1979 declarations, that are not real functions. Fortunately those
1980 don't have trees to be saved, so we can tell by checking their
1981 DECL_SAVED_TREE. */
1982 if (! DECL_INITIAL (fn)
1983 && DECL_ABSTRACT_ORIGIN (fn)
1984 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
1985 fn = DECL_ABSTRACT_ORIGIN (fn);
1987 /* Objective C and fortran still calls tree_rest_of_compilation directly.
1988 Kill this check once this is fixed. */
1989 if (!id->current_node->analyzed)
1990 goto egress;
1992 cg_edge = cgraph_edge (id->current_node, stmt);
1994 /* Constant propagation on argument done during previous inlining
1995 may create new direct call. Produce an edge for it. */
1996 if (!cg_edge)
1998 struct cgraph_node *dest = cgraph_node (fn);
2000 /* We have missing edge in the callgraph. This can happen in one case
2001 where previous inlining turned indirect call into direct call by
2002 constant propagating arguments. In all other cases we hit a bug
2003 (incorrect node sharing is most common reason for missing edges. */
2004 gcc_assert (dest->needed || !flag_unit_at_a_time);
2005 cgraph_create_edge (id->node, dest, stmt,
2006 bb->count, bb->loop_depth)->inline_failed
2007 = N_("originally indirect function call not considered for inlining");
2008 goto egress;
2011 /* Don't try to inline functions that are not well-suited to
2012 inlining. */
2013 if (!cgraph_inline_p (cg_edge, &reason))
2015 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
2016 /* Avoid warnings during early inline pass. */
2017 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2019 sorry ("inlining failed in call to %q+F: %s", fn, reason);
2020 sorry ("called from here");
2022 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
2023 && !DECL_IN_SYSTEM_HEADER (fn)
2024 && strlen (reason)
2025 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
2026 /* Avoid warnings during early inline pass. */
2027 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2029 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
2030 fn, reason);
2031 warning (OPT_Winline, "called from here");
2033 goto egress;
2036 #ifdef ENABLE_CHECKING
2037 if (cg_edge->callee->decl != id->node->decl)
2038 verify_cgraph_node (cg_edge->callee);
2039 #endif
2041 /* We will be inlining this callee. */
2043 id->eh_region = lookup_stmt_eh_region (stmt);
2045 /* Split the block holding the CALL_EXPR. */
2047 e = split_block (bb, stmt);
2048 bb = e->src;
2049 return_block = e->dest;
2050 remove_edge (e);
2052 /* split_block splits before the statement, work around this by moving
2053 the call into the first half_bb. Not pretty, but seems easier than
2054 doing the CFG manipulation by hand when the CALL_EXPR is in the last
2055 statement in BB. */
2056 stmt_bsi = bsi_last (bb);
2057 bsi = bsi_start (return_block);
2058 if (!bsi_end_p (bsi))
2059 bsi_move_before (&stmt_bsi, &bsi);
2060 else
2062 tree stmt = bsi_stmt (stmt_bsi);
2063 bsi_remove (&stmt_bsi);
2064 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2066 stmt_bsi = bsi_start (return_block);
2068 /* Build a block containing code to initialize the arguments, the
2069 actual inline expansion of the body, and a label for the return
2070 statements within the function to jump to. The type of the
2071 statement expression is the return type of the function call. */
2072 id->block = make_node (BLOCK);
2073 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
2074 BLOCK_SOURCE_LOCATION (id->block) = input_location;
2075 add_lexical_block (TREE_BLOCK (stmt), id->block);
2077 /* Local declarations will be replaced by their equivalents in this
2078 map. */
2079 st = id->decl_map;
2080 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
2081 NULL, NULL);
2083 /* Initialize the parameters. */
2084 args = TREE_OPERAND (t, 1);
2086 initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2), fn, bb);
2088 /* Record the function we are about to inline. */
2089 id->callee = fn;
2091 if (DECL_STRUCT_FUNCTION (fn)->saved_blocks)
2092 add_lexical_block (id->block, remap_blocks (DECL_STRUCT_FUNCTION (fn)->saved_blocks, id));
2093 else if (DECL_INITIAL (fn))
2094 add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
2096 /* Return statements in the function body will be replaced by jumps
2097 to the RET_LABEL. */
2099 gcc_assert (DECL_INITIAL (fn));
2100 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
2102 /* Find the lhs to which the result of this call is assigned. */
2103 return_slot_addr = NULL;
2104 if (TREE_CODE (stmt) == MODIFY_EXPR)
2106 modify_dest = TREE_OPERAND (stmt, 0);
2108 /* The function which we are inlining might not return a value,
2109 in which case we should issue a warning that the function
2110 does not return a value. In that case the optimizers will
2111 see that the variable to which the value is assigned was not
2112 initialized. We do not want to issue a warning about that
2113 uninitialized variable. */
2114 if (DECL_P (modify_dest))
2115 TREE_NO_WARNING (modify_dest) = 1;
2116 if (CALL_EXPR_RETURN_SLOT_OPT (t))
2118 return_slot_addr = build_fold_addr_expr (modify_dest);
2119 modify_dest = NULL;
2122 else
2123 modify_dest = NULL;
2125 /* Declare the return variable for the function. */
2126 decl = declare_return_variable (id, return_slot_addr,
2127 modify_dest, &use_retvar);
2128 /* Do this only if declare_return_variable created a new one. */
2129 if (decl && !return_slot_addr && decl != modify_dest)
2130 declare_inline_vars (id->block, decl);
2132 /* After we've initialized the parameters, we insert the body of the
2133 function itself. */
2134 old_node = id->current_node;
2136 /* Anoint the callee-to-be-duplicated as the "current_node." When
2137 CALL_EXPRs within callee are duplicated, the edges from callee to
2138 callee's callees (caller's grandchildren) will be cloned. */
2139 id->current_node = cg_edge->callee;
2141 /* This is it. Duplicate the callee body. Assume callee is
2142 pre-gimplified. Note that we must not alter the caller
2143 function in any way before this point, as this CALL_EXPR may be
2144 a self-referential call; if we're calling ourselves, we need to
2145 duplicate our body before altering anything. */
2146 copy_body (id, bb->count, bb->frequency, bb, return_block);
2147 id->current_node = old_node;
2149 /* Add local vars in this inlined callee to caller. */
2150 t_step = id->callee_cfun->unexpanded_var_list;
2151 if (id->callee_cfun->saved_unexpanded_var_list)
2152 t_step = id->callee_cfun->saved_unexpanded_var_list;
2153 for (; t_step; t_step = TREE_CHAIN (t_step))
2155 var = TREE_VALUE (t_step);
2156 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2157 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2158 cfun->unexpanded_var_list);
2159 else
2160 cfun->unexpanded_var_list = tree_cons (NULL_TREE, remap_decl (var, id),
2161 cfun->unexpanded_var_list);
2164 /* Clean up. */
2165 splay_tree_delete (id->decl_map);
2166 id->decl_map = st;
2168 /* If the inlined function returns a result that we care about,
2169 clobber the CALL_EXPR with a reference to the return variable. */
2170 if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
2172 *tp = use_retvar;
2173 maybe_clean_or_replace_eh_stmt (stmt, stmt);
2175 else
2176 /* We're modifying a TSI owned by gimple_expand_calls_inline();
2177 tsi_delink() will leave the iterator in a sane state. */
2178 bsi_remove (&stmt_bsi);
2180 bsi_next (&bsi);
2181 if (bsi_end_p (bsi))
2182 tree_purge_dead_eh_edges (return_block);
2184 /* If the value of the new expression is ignored, that's OK. We
2185 don't warn about this for CALL_EXPRs, so we shouldn't warn about
2186 the equivalent inlined version either. */
2187 TREE_USED (*tp) = 1;
2189 /* Output the inlining info for this abstract function, since it has been
2190 inlined. If we don't do this now, we can lose the information about the
2191 variables in the function when the blocks get blown away as soon as we
2192 remove the cgraph node. */
2193 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
2195 /* Update callgraph if needed. */
2196 cgraph_remove_node (cg_edge->callee);
2198 /* Declare the 'auto' variables added with this inlined body. */
2199 record_vars (BLOCK_VARS (id->block));
2200 id->block = NULL_TREE;
2201 successfully_inlined = TRUE;
2203 egress:
2204 input_location = saved_location;
2205 return successfully_inlined;
2208 /* Expand call statements reachable from STMT_P.
2209 We can only have CALL_EXPRs as the "toplevel" tree code or nested
2210 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
2211 unfortunately not use that function here because we need a pointer
2212 to the CALL_EXPR, not the tree itself. */
2214 static bool
2215 gimple_expand_calls_inline (basic_block bb, inline_data *id)
2217 block_stmt_iterator bsi;
2219 /* Register specific tree functions. */
2220 tree_register_cfg_hooks ();
2221 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2223 tree *expr_p = bsi_stmt_ptr (bsi);
2224 tree stmt = *expr_p;
2226 if (TREE_CODE (*expr_p) == MODIFY_EXPR)
2227 expr_p = &TREE_OPERAND (*expr_p, 1);
2228 if (TREE_CODE (*expr_p) == WITH_SIZE_EXPR)
2229 expr_p = &TREE_OPERAND (*expr_p, 0);
2230 if (TREE_CODE (*expr_p) == CALL_EXPR)
2231 if (expand_call_inline (bb, stmt, expr_p, id))
2232 return true;
2234 return false;
2237 /* Expand calls to inline functions in the body of FN. */
2239 void
2240 optimize_inline_calls (tree fn)
2242 inline_data id;
2243 tree prev_fn;
2244 basic_block bb;
2245 /* There is no point in performing inlining if errors have already
2246 occurred -- and we might crash if we try to inline invalid
2247 code. */
2248 if (errorcount || sorrycount)
2249 return;
2251 /* Clear out ID. */
2252 memset (&id, 0, sizeof (id));
2254 id.current_node = id.node = cgraph_node (fn);
2255 id.caller = fn;
2256 /* Or any functions that aren't finished yet. */
2257 prev_fn = NULL_TREE;
2258 if (current_function_decl)
2260 id.caller = current_function_decl;
2261 prev_fn = current_function_decl;
2263 push_gimplify_context ();
2265 /* Reach the trees by walking over the CFG, and note the
2266 enclosing basic-blocks in the call edges. */
2267 /* We walk the blocks going forward, because inlined function bodies
2268 will split id->current_basic_block, and the new blocks will
2269 follow it; we'll trudge through them, processing their CALL_EXPRs
2270 along the way. */
2271 FOR_EACH_BB (bb)
2272 gimple_expand_calls_inline (bb, &id);
2275 pop_gimplify_context (NULL);
2276 /* Renumber the (code) basic_blocks consecutively. */
2277 compact_blocks ();
2278 /* Renumber the lexical scoping (non-code) blocks consecutively. */
2279 number_blocks (fn);
2281 #ifdef ENABLE_CHECKING
2283 struct cgraph_edge *e;
2285 verify_cgraph_node (id.node);
2287 /* Double check that we inlined everything we are supposed to inline. */
2288 for (e = id.node->callees; e; e = e->next_callee)
2289 gcc_assert (e->inline_failed);
2291 #endif
2292 /* We need to rescale frequencies again to peak at REG_BR_PROB_BASE
2293 as inlining loops might increase the maximum. */
2294 if (ENTRY_BLOCK_PTR->count)
2295 counts_to_freqs ();
2296 fold_cond_expr_cond ();
2299 /* FN is a function that has a complete body, and CLONE is a function whose
2300 body is to be set to a copy of FN, mapping argument declarations according
2301 to the ARG_MAP splay_tree. */
2303 void
2304 clone_body (tree clone, tree fn, void *arg_map)
2306 inline_data id;
2308 /* Clone the body, as if we were making an inline call. But, remap the
2309 parameters in the callee to the parameters of caller. */
2310 memset (&id, 0, sizeof (id));
2311 id.caller = clone;
2312 id.callee = fn;
2313 id.callee_cfun = DECL_STRUCT_FUNCTION (fn);
2314 id.decl_map = (splay_tree)arg_map;
2316 /* Cloning is treated slightly differently from inlining. Set
2317 CLONING_P so that it's clear which operation we're performing. */
2318 id.cloning_p = true;
2320 /* We're not inside any EH region. */
2321 id.eh_region = -1;
2323 /* Actually copy the body. */
2324 append_to_statement_list_force (copy_generic_body (&id), &DECL_SAVED_TREE (clone));
2327 /* Save duplicate body in FN. MAP is used to pass around splay tree
2328 used to update arguments in restore_body. */
2330 /* Make and return duplicate of body in FN. Put copies of DECL_ARGUMENTS
2331 in *arg_copy and of the static chain, if any, in *sc_copy. */
2333 void
2334 save_body (tree fn, tree *arg_copy, tree *sc_copy)
2336 inline_data id;
2337 tree newdecl, *parg;
2338 basic_block fn_entry_block;
2339 tree t_step;
2341 memset (&id, 0, sizeof (id));
2342 id.callee = fn;
2343 id.callee_cfun = DECL_STRUCT_FUNCTION (fn);
2344 id.caller = fn;
2345 id.node = cgraph_node (fn);
2346 id.saving_p = true;
2347 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2348 *arg_copy = DECL_ARGUMENTS (fn);
2350 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
2352 tree new = copy_node (*parg);
2354 lang_hooks.dup_lang_specific_decl (new);
2355 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*parg);
2356 insert_decl_map (&id, *parg, new);
2357 TREE_CHAIN (new) = TREE_CHAIN (*parg);
2358 *parg = new;
2361 *sc_copy = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2362 if (*sc_copy)
2364 tree new = copy_node (*sc_copy);
2366 lang_hooks.dup_lang_specific_decl (new);
2367 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*sc_copy);
2368 insert_decl_map (&id, *sc_copy, new);
2369 TREE_CHAIN (new) = TREE_CHAIN (*sc_copy);
2370 *sc_copy = new;
2373 /* We're not inside any EH region. */
2374 id.eh_region = -1;
2376 insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn));
2378 DECL_STRUCT_FUNCTION (fn)->saved_blocks
2379 = remap_blocks (DECL_INITIAL (fn), &id);
2380 for (t_step = id.callee_cfun->unexpanded_var_list;
2381 t_step;
2382 t_step = TREE_CHAIN (t_step))
2384 tree var = TREE_VALUE (t_step);
2385 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2386 cfun->saved_unexpanded_var_list
2387 = tree_cons (NULL_TREE, var, cfun->saved_unexpanded_var_list);
2388 else
2389 cfun->saved_unexpanded_var_list
2390 = tree_cons (NULL_TREE, remap_decl (var, &id),
2391 cfun->saved_unexpanded_var_list);
2394 /* Actually copy the body, including a new (struct function *) and CFG.
2395 EH info is also duplicated so its labels point into the copied
2396 CFG, not the original. */
2397 fn_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fn));
2398 newdecl = copy_body (&id, fn_entry_block->count, fn_entry_block->frequency,
2399 NULL, NULL);
2400 DECL_STRUCT_FUNCTION (fn)->saved_cfg = DECL_STRUCT_FUNCTION (newdecl)->cfg;
2401 DECL_STRUCT_FUNCTION (fn)->saved_eh = DECL_STRUCT_FUNCTION (newdecl)->eh;
2403 /* Clean up. */
2404 splay_tree_delete (id.decl_map);
2407 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2409 tree
2410 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2412 enum tree_code code = TREE_CODE (*tp);
2413 inline_data *id = (inline_data *) data;
2415 /* We make copies of most nodes. */
2416 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
2417 || code == TREE_LIST
2418 || code == TREE_VEC
2419 || code == TYPE_DECL)
2421 /* Because the chain gets clobbered when we make a copy, we save it
2422 here. */
2423 tree chain = TREE_CHAIN (*tp);
2424 tree new;
2426 if (id && id->versioning_p && replace_ref_tree (id, tp))
2428 *walk_subtrees = 0;
2429 return NULL_TREE;
2431 /* Copy the node. */
2432 new = copy_node (*tp);
2434 /* Propagate mudflap marked-ness. */
2435 if (flag_mudflap && mf_marked_p (*tp))
2436 mf_mark (new);
2438 *tp = new;
2440 /* Now, restore the chain, if appropriate. That will cause
2441 walk_tree to walk into the chain as well. */
2442 if (code == PARM_DECL || code == TREE_LIST)
2443 TREE_CHAIN (*tp) = chain;
2445 /* For now, we don't update BLOCKs when we make copies. So, we
2446 have to nullify all BIND_EXPRs. */
2447 if (TREE_CODE (*tp) == BIND_EXPR)
2448 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2450 else if (code == CONSTRUCTOR)
2452 /* CONSTRUCTOR nodes need special handling because
2453 we need to duplicate the vector of elements. */
2454 tree new;
2456 new = copy_node (*tp);
2458 /* Propagate mudflap marked-ness. */
2459 if (flag_mudflap && mf_marked_p (*tp))
2460 mf_mark (new);
2462 CONSTRUCTOR_ELTS (new) = VEC_copy (constructor_elt, gc,
2463 CONSTRUCTOR_ELTS (*tp));
2464 *tp = new;
2466 else if (TREE_CODE_CLASS (code) == tcc_type)
2467 *walk_subtrees = 0;
2468 else if (TREE_CODE_CLASS (code) == tcc_declaration)
2469 *walk_subtrees = 0;
2470 else if (TREE_CODE_CLASS (code) == tcc_constant)
2471 *walk_subtrees = 0;
2472 else
2473 gcc_assert (code != STATEMENT_LIST);
2474 return NULL_TREE;
2477 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2478 information indicating to what new SAVE_EXPR this one should be mapped,
2479 use that one. Otherwise, create a new node and enter it in ST. FN is
2480 the function into which the copy will be placed. */
2482 static void
2483 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2485 splay_tree st = (splay_tree) st_;
2486 splay_tree_node n;
2487 tree t;
2489 /* See if we already encountered this SAVE_EXPR. */
2490 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2492 /* If we didn't already remap this SAVE_EXPR, do so now. */
2493 if (!n)
2495 t = copy_node (*tp);
2497 /* Remember this SAVE_EXPR. */
2498 splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2499 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2500 splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2502 else
2504 /* We've already walked into this SAVE_EXPR; don't do it again. */
2505 *walk_subtrees = 0;
2506 t = (tree) n->value;
2509 /* Replace this SAVE_EXPR with the copy. */
2510 *tp = t;
2513 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
2514 copies the declaration and enters it in the splay_tree in DATA (which is
2515 really an `inline_data *'). */
2517 static tree
2518 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2519 void *data)
2521 inline_data *id = (inline_data *) data;
2523 /* Don't walk into types. */
2524 if (TYPE_P (*tp))
2525 *walk_subtrees = 0;
2527 else if (TREE_CODE (*tp) == LABEL_EXPR)
2529 tree decl = TREE_OPERAND (*tp, 0);
2531 /* Copy the decl and remember the copy. */
2532 insert_decl_map (id, decl,
2533 copy_decl_for_dup (decl, DECL_CONTEXT (decl),
2534 DECL_CONTEXT (decl), /*versioning=*/false));
2537 return NULL_TREE;
2540 /* Perform any modifications to EXPR required when it is unsaved. Does
2541 not recurse into EXPR's subtrees. */
2543 static void
2544 unsave_expr_1 (tree expr)
2546 switch (TREE_CODE (expr))
2548 case TARGET_EXPR:
2549 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
2550 It's OK for this to happen if it was part of a subtree that
2551 isn't immediately expanded, such as operand 2 of another
2552 TARGET_EXPR. */
2553 if (TREE_OPERAND (expr, 1))
2554 break;
2556 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
2557 TREE_OPERAND (expr, 3) = NULL_TREE;
2558 break;
2560 default:
2561 break;
2565 /* Called via walk_tree when an expression is unsaved. Using the
2566 splay_tree pointed to by ST (which is really a `splay_tree'),
2567 remaps all local declarations to appropriate replacements. */
2569 static tree
2570 unsave_r (tree *tp, int *walk_subtrees, void *data)
2572 inline_data *id = (inline_data *) data;
2573 splay_tree st = id->decl_map;
2574 splay_tree_node n;
2576 /* Only a local declaration (variable or label). */
2577 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
2578 || TREE_CODE (*tp) == LABEL_DECL)
2580 /* Lookup the declaration. */
2581 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2583 /* If it's there, remap it. */
2584 if (n)
2585 *tp = (tree) n->value;
2588 else if (TREE_CODE (*tp) == STATEMENT_LIST)
2589 copy_statement_list (tp);
2590 else if (TREE_CODE (*tp) == BIND_EXPR)
2591 copy_bind_expr (tp, walk_subtrees, id);
2592 else if (TREE_CODE (*tp) == SAVE_EXPR)
2593 remap_save_expr (tp, st, walk_subtrees);
2594 else
2596 copy_tree_r (tp, walk_subtrees, NULL);
2598 /* Do whatever unsaving is required. */
2599 unsave_expr_1 (*tp);
2602 /* Keep iterating. */
2603 return NULL_TREE;
2606 /* Copies everything in EXPR and replaces variables, labels
2607 and SAVE_EXPRs local to EXPR. */
2609 tree
2610 unsave_expr_now (tree expr)
2612 inline_data id;
2614 /* There's nothing to do for NULL_TREE. */
2615 if (expr == 0)
2616 return expr;
2618 /* Set up ID. */
2619 memset (&id, 0, sizeof (id));
2620 id.callee = current_function_decl;
2621 id.caller = current_function_decl;
2622 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2624 /* Walk the tree once to find local labels. */
2625 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
2627 /* Walk the tree again, copying, remapping, and unsaving. */
2628 walk_tree (&expr, unsave_r, &id, NULL);
2630 /* Clean up. */
2631 splay_tree_delete (id.decl_map);
2633 return expr;
2636 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
2638 static tree
2639 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
2641 if (*tp == data)
2642 return (tree) data;
2643 else
2644 return NULL;
2647 bool
2648 debug_find_tree (tree top, tree search)
2650 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
2654 /* Declare the variables created by the inliner. Add all the variables in
2655 VARS to BIND_EXPR. */
2657 static void
2658 declare_inline_vars (tree block, tree vars)
2660 tree t;
2661 for (t = vars; t; t = TREE_CHAIN (t))
2662 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
2664 if (block)
2665 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
2669 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
2670 but now it will be in the TO_FN. VERSIONING means that this function
2671 is used by the versioning utility (not inlining or cloning). */
2673 tree
2674 copy_decl_for_dup (tree decl, tree from_fn, tree to_fn, bool versioning)
2676 tree copy;
2678 gcc_assert (DECL_P (decl));
2679 /* Copy the declaration. */
2680 if (!versioning
2681 && (TREE_CODE (decl) == PARM_DECL
2682 || TREE_CODE (decl) == RESULT_DECL))
2684 tree type = TREE_TYPE (decl);
2686 /* For a parameter or result, we must make an equivalent VAR_DECL,
2687 not a new PARM_DECL. */
2688 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
2689 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
2690 TREE_READONLY (copy) = TREE_READONLY (decl);
2691 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
2692 DECL_COMPLEX_GIMPLE_REG_P (copy) = DECL_COMPLEX_GIMPLE_REG_P (decl);
2694 else
2696 copy = copy_node (decl);
2697 /* The COPY is not abstract; it will be generated in TO_FN. */
2698 DECL_ABSTRACT (copy) = 0;
2699 lang_hooks.dup_lang_specific_decl (copy);
2701 /* TREE_ADDRESSABLE isn't used to indicate that a label's
2702 address has been taken; it's for internal bookkeeping in
2703 expand_goto_internal. */
2704 if (TREE_CODE (copy) == LABEL_DECL)
2706 TREE_ADDRESSABLE (copy) = 0;
2707 LABEL_DECL_UID (copy) = -1;
2711 /* Don't generate debug information for the copy if we wouldn't have
2712 generated it for the copy either. */
2713 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
2714 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
2716 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
2717 declaration inspired this copy. */
2718 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
2720 /* The new variable/label has no RTL, yet. */
2721 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
2722 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
2723 SET_DECL_RTL (copy, NULL_RTX);
2725 /* These args would always appear unused, if not for this. */
2726 TREE_USED (copy) = 1;
2728 /* Set the context for the new declaration. */
2729 if (!DECL_CONTEXT (decl))
2730 /* Globals stay global. */
2732 else if (DECL_CONTEXT (decl) != from_fn)
2733 /* Things that weren't in the scope of the function we're inlining
2734 from aren't in the scope we're inlining to, either. */
2736 else if (TREE_STATIC (decl))
2737 /* Function-scoped static variables should stay in the original
2738 function. */
2740 else
2741 /* Ordinary automatic local variables are now in the scope of the
2742 new function. */
2743 DECL_CONTEXT (copy) = to_fn;
2745 return copy;
2748 /* Return a copy of the function's argument tree. */
2749 static tree
2750 copy_arguments_for_versioning (tree orig_parm, inline_data * id)
2752 tree *arg_copy, *parg;
2754 arg_copy = &orig_parm;
2755 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
2757 tree new = remap_decl (*parg, id);
2758 lang_hooks.dup_lang_specific_decl (new);
2759 TREE_CHAIN (new) = TREE_CHAIN (*parg);
2760 *parg = new;
2762 return orig_parm;
2765 /* Return a copy of the function's static chain. */
2766 static tree
2767 copy_static_chain (tree static_chain, inline_data * id)
2769 tree *chain_copy, *pvar;
2771 chain_copy = &static_chain;
2772 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
2774 tree new = remap_decl (*pvar, id);
2775 lang_hooks.dup_lang_specific_decl (new);
2776 TREE_CHAIN (new) = TREE_CHAIN (*pvar);
2777 *pvar = new;
2779 return static_chain;
2782 /* Return true if the function is allowed to be versioned.
2783 This is a guard for the versioning functionality. */
2784 bool
2785 tree_versionable_function_p (tree fndecl)
2787 if (fndecl == NULL_TREE)
2788 return false;
2789 /* ??? There are cases where a function is
2790 uninlinable but can be versioned. */
2791 if (!tree_inlinable_function_p (fndecl))
2792 return false;
2794 return true;
2797 /* Create a copy of a function's tree.
2798 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
2799 of the original function and the new copied function
2800 respectively. In case we want to replace a DECL
2801 tree with another tree while duplicating the function's
2802 body, TREE_MAP represents the mapping between these
2803 trees. */
2804 void
2805 tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map)
2807 struct cgraph_node *old_version_node;
2808 struct cgraph_node *new_version_node;
2809 inline_data id;
2810 tree p, new_fndecl;
2811 unsigned i;
2812 struct ipa_replace_map *replace_info;
2813 basic_block old_entry_block;
2814 tree t_step;
2816 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
2817 && TREE_CODE (new_decl) == FUNCTION_DECL);
2818 DECL_POSSIBLY_INLINED (old_decl) = 1;
2820 old_version_node = cgraph_node (old_decl);
2821 new_version_node = cgraph_node (new_decl);
2823 allocate_struct_function (new_decl);
2824 /* Cfun points to the new allocated function struct at this point. */
2825 cfun->function_end_locus = DECL_SOURCE_LOCATION (new_decl);
2827 DECL_ARTIFICIAL (new_decl) = 1;
2828 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
2830 /* Generate a new name for the new version. */
2831 DECL_NAME (new_decl) =
2832 create_tmp_var_name (NULL);
2833 /* Create a new SYMBOL_REF rtx for the new name. */
2834 if (DECL_RTL (old_decl) != NULL)
2836 SET_DECL_RTL (new_decl, copy_rtx (DECL_RTL (old_decl)));
2837 XEXP (DECL_RTL (new_decl), 0) =
2838 gen_rtx_SYMBOL_REF (GET_MODE (XEXP (DECL_RTL (old_decl), 0)),
2839 IDENTIFIER_POINTER (DECL_NAME (new_decl)));
2842 /* Prepare the data structures for the tree copy. */
2843 memset (&id, 0, sizeof (id));
2845 /* The new version. */
2846 id.node = new_version_node;
2848 /* The old version. */
2849 id.current_node = cgraph_node (old_decl);
2851 id.versioning_p = true;
2852 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2853 id.caller = new_decl;
2854 id.callee = old_decl;
2855 id.callee_cfun = DECL_STRUCT_FUNCTION (old_decl);
2857 current_function_decl = new_decl;
2859 /* Copy the function's static chain. */
2860 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
2861 if (p)
2862 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
2863 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
2864 &id);
2865 /* Copy the function's arguments. */
2866 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
2867 DECL_ARGUMENTS (new_decl) =
2868 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id);
2870 /* If there's a tree_map, prepare for substitution. */
2871 if (tree_map)
2872 for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
2874 replace_info = VARRAY_GENERIC_PTR (tree_map, i);
2875 if (replace_info->replace_p && !replace_info->ref_p)
2876 insert_decl_map (&id, replace_info->old_tree,
2877 replace_info->new_tree);
2878 else if (replace_info->replace_p && replace_info->ref_p)
2879 id.ipa_info = tree_map;
2882 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.callee), &id);
2884 /* Renumber the lexical scoping (non-code) blocks consecutively. */
2885 number_blocks (id.caller);
2887 if (DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list != NULL_TREE)
2888 /* Add local vars. */
2889 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list;
2890 t_step; t_step = TREE_CHAIN (t_step))
2892 tree var = TREE_VALUE (t_step);
2893 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2894 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2895 cfun->unexpanded_var_list);
2896 else
2897 cfun->unexpanded_var_list =
2898 tree_cons (NULL_TREE, remap_decl (var, &id),
2899 cfun->unexpanded_var_list);
2902 /* Copy the Function's body. */
2903 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
2904 (DECL_STRUCT_FUNCTION (old_decl));
2905 new_fndecl = copy_body (&id,
2906 old_entry_block->count,
2907 old_entry_block->frequency, NULL, NULL);
2909 DECL_SAVED_TREE (new_decl) = DECL_SAVED_TREE (new_fndecl);
2911 DECL_STRUCT_FUNCTION (new_decl)->cfg =
2912 DECL_STRUCT_FUNCTION (new_fndecl)->cfg;
2913 DECL_STRUCT_FUNCTION (new_decl)->eh = DECL_STRUCT_FUNCTION (new_fndecl)->eh;
2914 DECL_STRUCT_FUNCTION (new_decl)->ib_boundaries_block =
2915 DECL_STRUCT_FUNCTION (new_fndecl)->ib_boundaries_block;
2916 DECL_STRUCT_FUNCTION (new_decl)->last_label_uid =
2917 DECL_STRUCT_FUNCTION (new_fndecl)->last_label_uid;
2919 if (DECL_RESULT (old_decl) != NULL_TREE)
2921 tree *res_decl = &DECL_RESULT (old_decl);
2922 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
2923 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
2926 current_function_decl = NULL;
2927 /* Renumber the lexical scoping (non-code) blocks consecutively. */
2928 number_blocks (new_decl);
2930 /* Clean up. */
2931 splay_tree_delete (id.decl_map);
2932 fold_cond_expr_cond ();
2933 return;
2936 /* Replace an INDIRECT_REF tree of a given DECL tree with a new
2937 given tree.
2938 ID->ipa_info keeps the old tree and the new tree.
2939 TP points to the INDIRECT REF tree. Return true if
2940 the trees were replaced. */
2941 static bool
2942 replace_ref_tree (inline_data * id, tree * tp)
2944 bool replaced = false;
2945 tree new;
2947 if (id->ipa_info && VARRAY_ACTIVE_SIZE (id->ipa_info) > 0)
2949 unsigned i;
2951 for (i = 0; i < VARRAY_ACTIVE_SIZE (id->ipa_info); i++)
2953 struct ipa_replace_map *replace_info;
2954 replace_info = VARRAY_GENERIC_PTR (id->ipa_info, i);
2956 if (replace_info->replace_p && replace_info->ref_p)
2958 tree old_tree = replace_info->old_tree;
2959 tree new_tree = replace_info->new_tree;
2961 if (TREE_CODE (*tp) == INDIRECT_REF
2962 && TREE_OPERAND (*tp, 0) == old_tree)
2964 new = copy_node (new_tree);
2965 *tp = new;
2966 replaced = true;
2971 return replaced;
2974 /* Return true if we are inlining. */
2975 static inline bool
2976 inlining_p (inline_data * id)
2978 return (!id->saving_p && !id->cloning_p && !id->versioning_p);
2981 /* Duplicate a type, fields and all. */
2983 tree
2984 build_duplicate_type (tree type)
2986 inline_data id;
2988 memset (&id, 0, sizeof (id));
2989 id.callee = current_function_decl;
2990 id.caller = current_function_decl;
2991 id.callee_cfun = cfun;
2992 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2994 type = remap_type_1 (type, &id);
2996 splay_tree_delete (id.decl_map);
2998 return type;