* Make-lang.in (GFORTRAN_TARGET_INSTALL_NAME): Define.
[official-gcc.git] / gcc / tree-inline.c
blob228252fd7090ba6a0cc2c59b49c6c045694968fb
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "varray.h"
36 #include "hashtab.h"
37 #include "splay-tree.h"
38 #include "langhooks.h"
39 #include "basic-block.h"
40 #include "tree-iterator.h"
41 #include "cgraph.h"
42 #include "intl.h"
43 #include "tree-mudflap.h"
44 #include "tree-flow.h"
45 #include "function.h"
46 #include "ggc.h"
47 #include "tree-flow.h"
48 #include "diagnostic.h"
49 #include "except.h"
50 #include "debug.h"
51 #include "pointer-set.h"
52 #include "ipa-prop.h"
54 /* I'm not real happy about this, but we need to handle gimple and
55 non-gimple trees. */
56 #include "tree-gimple.h"
58 /* Inlining, Saving, Cloning
60 Inlining: a function body is duplicated, but the PARM_DECLs are
61 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
62 MODIFY_EXPRs that store to a dedicated returned-value variable.
63 The duplicated eh_region info of the copy will later be appended
64 to the info for the caller; the eh_region info in copied throwing
65 statements and RESX_EXPRs is adjusted accordingly.
67 Saving: make a semantically-identical copy of the function body.
68 Necessary when we want to generate code for the body (a destructive
69 operation), but we expect to need this body in the future (e.g. for
70 inlining into another function).
72 Cloning: (only in C++) We have one body for a con/de/structor, and
73 multiple function decls, each with a unique parameter list.
74 Duplicate the body, using the given splay tree; some parameters
75 will become constants (like 0 or 1).
77 All of these will simultaneously lookup any callgraph edges. If
78 we're going to inline the duplicated function body, and the given
79 function has some cloned callgraph nodes (one for each place this
80 function will be inlined) those callgraph edges will be duplicated.
81 If we're saving or cloning the body, those callgraph edges will be
82 updated to point into the new body. (Note that the original
83 callgraph node and edge list will not be altered.)
85 See the CALL_EXPR handling case in copy_body_r (). */
87 /* 0 if we should not perform inlining.
88 1 if we should expand functions calls inline at the tree level.
89 2 if we should consider *all* functions to be inline
90 candidates. */
92 int flag_inline_trees = 0;
94 /* To Do:
96 o In order to make inlining-on-trees work, we pessimized
97 function-local static constants. In particular, they are now
98 always output, even when not addressed. Fix this by treating
99 function-local static constants just like global static
100 constants; the back-end already knows not to output them if they
101 are not needed.
103 o Provide heuristics to clamp inlining of recursive template
104 calls? */
106 /* Data required for function inlining. */
108 typedef struct inline_data
110 /* FUNCTION_DECL for function being inlined. */
111 tree callee;
112 /* FUNCTION_DECL for function being inlined into. */
113 tree caller;
114 /* struct function for function being inlined. Usually this is the same
115 as DECL_STRUCT_FUNCTION (callee), but can be different if saved_cfg
116 and saved_eh are in use. */
117 struct function *callee_cfun;
118 /* The VAR_DECL for the return value. */
119 tree retvar;
120 /* The map from local declarations in the inlined function to
121 equivalents in the function into which it is being inlined. */
122 splay_tree decl_map;
123 /* We use the same mechanism to build clones that we do to perform
124 inlining. However, there are a few places where we need to
125 distinguish between those two situations. This flag is true if
126 we are cloning, rather than inlining. */
127 bool cloning_p;
128 /* Similarly for saving function body. */
129 bool saving_p;
130 /* Versioning function is slightly different from inlining. */
131 bool versioning_p;
132 /* Callgraph node of function we are inlining into. */
133 struct cgraph_node *node;
134 /* Callgraph node of currently inlined function. */
135 struct cgraph_node *current_node;
136 /* Current BLOCK. */
137 tree block;
138 varray_type ipa_info;
139 /* Exception region the inlined call lie in. */
140 int eh_region;
141 /* Take region number in the function being copied, add this value and
142 get eh region number of the duplicate in the function we inline into. */
143 int eh_region_offset;
144 } inline_data;
146 /* Prototypes. */
148 static tree declare_return_variable (inline_data *, tree, tree, tree *);
149 static tree copy_body_r (tree *, int *, void *);
150 static tree copy_generic_body (inline_data *);
151 static bool inlinable_function_p (tree);
152 static tree remap_decl (tree, inline_data *);
153 static tree remap_type (tree, inline_data *);
154 static void remap_block (tree *, inline_data *);
155 static tree remap_decl (tree, inline_data *);
156 static tree remap_decls (tree, inline_data *);
157 static void copy_bind_expr (tree *, int *, inline_data *);
158 static tree mark_local_for_remap_r (tree *, int *, void *);
159 static void unsave_expr_1 (tree);
160 static tree unsave_r (tree *, int *, void *);
161 static void declare_inline_vars (tree, tree);
162 static void remap_save_expr (tree *, void *, int *);
163 static bool replace_ref_tree (inline_data *, tree *);
164 static inline bool inlining_p (inline_data *);
165 static void add_lexical_block (tree current_block, tree new_block);
167 /* Insert a tree->tree mapping for ID. Despite the name suggests
168 that the trees should be variables, it is used for more than that. */
170 static void
171 insert_decl_map (inline_data *id, tree key, tree value)
173 splay_tree_insert (id->decl_map, (splay_tree_key) key,
174 (splay_tree_value) value);
176 /* Always insert an identity map as well. If we see this same new
177 node again, we won't want to duplicate it a second time. */
178 if (key != value)
179 splay_tree_insert (id->decl_map, (splay_tree_key) value,
180 (splay_tree_value) value);
183 /* Remap DECL during the copying of the BLOCK tree for the function. */
185 static tree
186 remap_decl (tree decl, inline_data *id)
188 splay_tree_node n;
189 tree fn;
191 /* We only remap local variables in the current function. */
192 fn = id->callee;
194 /* See if we have remapped this declaration. */
196 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
198 /* If we didn't already have an equivalent for this declaration,
199 create one now. */
200 if (!n)
202 /* Make a copy of the variable or label. */
203 tree t;
204 t = copy_decl_for_dup (decl, fn, id->caller, id->versioning_p);
206 /* Remember it, so that if we encounter this local entity again
207 we can reuse this copy. Do this early because remap_type may
208 need this decl for TYPE_STUB_DECL. */
209 insert_decl_map (id, decl, t);
211 /* Remap types, if necessary. */
212 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
213 if (TREE_CODE (t) == TYPE_DECL)
214 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
216 /* Remap sizes as necessary. */
217 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
218 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
220 /* If fields, do likewise for offset and qualifier. */
221 if (TREE_CODE (t) == FIELD_DECL)
223 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
224 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
225 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
228 #if 0
229 /* FIXME handle anon aggrs. */
230 if (! DECL_NAME (t) && TREE_TYPE (t)
231 && lang_hooks.tree_inlining.anon_aggr_type_p (TREE_TYPE (t)))
233 /* For a VAR_DECL of anonymous type, we must also copy the
234 member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS. */
235 tree members = NULL;
236 tree src;
238 for (src = DECL_ANON_UNION_ELEMS (t); src;
239 src = TREE_CHAIN (src))
241 tree member = remap_decl (TREE_VALUE (src), id);
243 gcc_assert (!TREE_PURPOSE (src));
244 members = tree_cons (NULL, member, members);
246 DECL_ANON_UNION_ELEMS (t) = nreverse (members);
248 #endif
250 /* Remember it, so that if we encounter this local entity
251 again we can reuse this copy. */
252 insert_decl_map (id, decl, t);
253 return t;
256 return unshare_expr ((tree) n->value);
259 static tree
260 remap_type_1 (tree type, inline_data *id)
262 tree new, t;
264 /* We do need a copy. build and register it now. If this is a pointer or
265 reference type, remap the designated type and make a new pointer or
266 reference type. */
267 if (TREE_CODE (type) == POINTER_TYPE)
269 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
270 TYPE_MODE (type),
271 TYPE_REF_CAN_ALIAS_ALL (type));
272 insert_decl_map (id, type, new);
273 return new;
275 else if (TREE_CODE (type) == REFERENCE_TYPE)
277 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
278 TYPE_MODE (type),
279 TYPE_REF_CAN_ALIAS_ALL (type));
280 insert_decl_map (id, type, new);
281 return new;
283 else
284 new = copy_node (type);
286 insert_decl_map (id, type, new);
288 /* This is a new type, not a copy of an old type. Need to reassociate
289 variants. We can handle everything except the main variant lazily. */
290 t = TYPE_MAIN_VARIANT (type);
291 if (type != t)
293 t = remap_type (t, id);
294 TYPE_MAIN_VARIANT (new) = t;
295 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
296 TYPE_NEXT_VARIANT (t) = new;
298 else
300 TYPE_MAIN_VARIANT (new) = new;
301 TYPE_NEXT_VARIANT (new) = NULL;
304 if (TYPE_STUB_DECL (type))
305 TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
307 /* Lazily create pointer and reference types. */
308 TYPE_POINTER_TO (new) = NULL;
309 TYPE_REFERENCE_TO (new) = NULL;
311 switch (TREE_CODE (new))
313 case INTEGER_TYPE:
314 case REAL_TYPE:
315 case ENUMERAL_TYPE:
316 case BOOLEAN_TYPE:
317 case CHAR_TYPE:
318 t = TYPE_MIN_VALUE (new);
319 if (t && TREE_CODE (t) != INTEGER_CST)
320 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
322 t = TYPE_MAX_VALUE (new);
323 if (t && TREE_CODE (t) != INTEGER_CST)
324 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
325 return new;
327 case FUNCTION_TYPE:
328 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
329 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
330 return new;
332 case ARRAY_TYPE:
333 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
334 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
335 break;
337 case RECORD_TYPE:
338 case UNION_TYPE:
339 case QUAL_UNION_TYPE:
341 tree f, nf = NULL;
343 for (f = TYPE_FIELDS (new); f ; f = TREE_CHAIN (f))
345 t = remap_decl (f, id);
346 DECL_CONTEXT (t) = new;
347 TREE_CHAIN (t) = nf;
348 nf = t;
350 TYPE_FIELDS (new) = nreverse (nf);
352 break;
354 case OFFSET_TYPE:
355 default:
356 /* Shouldn't have been thought variable sized. */
357 gcc_unreachable ();
360 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
361 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
363 return new;
366 static tree
367 remap_type (tree type, inline_data *id)
369 splay_tree_node node;
371 if (type == NULL)
372 return type;
374 /* See if we have remapped this type. */
375 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
376 if (node)
377 return (tree) node->value;
379 /* The type only needs remapping if it's variably modified. */
380 if (! variably_modified_type_p (type, id->callee))
382 insert_decl_map (id, type, type);
383 return type;
386 return remap_type_1 (type, id);
389 static tree
390 remap_decls (tree decls, inline_data *id)
392 tree old_var;
393 tree new_decls = NULL_TREE;
395 /* Remap its variables. */
396 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
398 tree new_var;
400 /* We can not chain the local static declarations into the unexpanded_var_list
401 as we can't duplicate them or break one decl rule. Go ahead and link
402 them into unexpanded_var_list. */
403 if (!lang_hooks.tree_inlining.auto_var_in_fn_p (old_var, id->callee)
404 && !DECL_EXTERNAL (old_var))
406 cfun->unexpanded_var_list = tree_cons (NULL_TREE, old_var,
407 cfun->unexpanded_var_list);
408 continue;
411 /* Remap the variable. */
412 new_var = remap_decl (old_var, id);
414 /* If we didn't remap this variable, so we can't mess with its
415 TREE_CHAIN. If we remapped this variable to the return slot, it's
416 already declared somewhere else, so don't declare it here. */
417 if (!new_var || new_var == id->retvar)
419 else
421 gcc_assert (DECL_P (new_var));
422 TREE_CHAIN (new_var) = new_decls;
423 new_decls = new_var;
427 return nreverse (new_decls);
430 /* Copy the BLOCK to contain remapped versions of the variables
431 therein. And hook the new block into the block-tree. */
433 static void
434 remap_block (tree *block, inline_data *id)
436 tree old_block;
437 tree new_block;
438 tree fn;
440 /* Make the new block. */
441 old_block = *block;
442 new_block = make_node (BLOCK);
443 TREE_USED (new_block) = TREE_USED (old_block);
444 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
445 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
446 *block = new_block;
448 /* Remap its variables. */
449 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
451 fn = id->caller;
452 if (id->cloning_p)
453 /* We're building a clone; DECL_INITIAL is still
454 error_mark_node, and current_binding_level is the parm
455 binding level. */
456 lang_hooks.decls.insert_block (new_block);
457 /* Remember the remapped block. */
458 insert_decl_map (id, old_block, new_block);
461 /* Copy the whole block tree and root it in id->block. */
462 static tree
463 remap_blocks (tree block, inline_data *id)
465 tree t;
466 tree new = block;
468 if (!block)
469 return NULL;
471 remap_block (&new, id);
472 gcc_assert (new != block);
473 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
474 add_lexical_block (new, remap_blocks (t, id));
475 return new;
478 static void
479 copy_statement_list (tree *tp)
481 tree_stmt_iterator oi, ni;
482 tree new;
484 new = alloc_stmt_list ();
485 ni = tsi_start (new);
486 oi = tsi_start (*tp);
487 *tp = new;
489 for (; !tsi_end_p (oi); tsi_next (&oi))
490 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
493 static void
494 copy_bind_expr (tree *tp, int *walk_subtrees, inline_data *id)
496 tree block = BIND_EXPR_BLOCK (*tp);
497 /* Copy (and replace) the statement. */
498 copy_tree_r (tp, walk_subtrees, NULL);
499 if (block)
501 remap_block (&block, id);
502 BIND_EXPR_BLOCK (*tp) = block;
505 if (BIND_EXPR_VARS (*tp))
506 /* This will remap a lot of the same decls again, but this should be
507 harmless. */
508 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
511 /* Called from copy_body_id via walk_tree. DATA is really an
512 `inline_data *'. */
514 static tree
515 copy_body_r (tree *tp, int *walk_subtrees, void *data)
517 inline_data *id = (inline_data *) data;
518 tree fn = id->callee;
519 tree new_block;
521 /* Begin by recognizing trees that we'll completely rewrite for the
522 inlining context. Our output for these trees is completely
523 different from out input (e.g. RETURN_EXPR is deleted, and morphs
524 into an edge). Further down, we'll handle trees that get
525 duplicated and/or tweaked. */
527 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
528 GOTO_STMT with the RET_LABEL as its target. */
529 if (TREE_CODE (*tp) == RETURN_EXPR && inlining_p (id))
531 tree assignment = TREE_OPERAND (*tp, 0);
533 /* If we're returning something, just turn that into an
534 assignment into the equivalent of the original RESULT_DECL.
535 If the "assignment" is just the result decl, the result
536 decl has already been set (e.g. a recent "foo (&result_decl,
537 ...)"); just toss the entire RETURN_EXPR. */
538 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
540 /* Replace the RETURN_EXPR with (a copy of) the
541 MODIFY_EXPR hanging underneath. */
542 *tp = copy_node (assignment);
544 else /* Else the RETURN_EXPR returns no value. */
546 *tp = NULL;
547 return (void *)1;
551 /* Local variables and labels need to be replaced by equivalent
552 variables. We don't want to copy static variables; there's only
553 one of those, no matter how many times we inline the containing
554 function. Similarly for globals from an outer function. */
555 else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
557 tree new_decl;
559 /* Remap the declaration. */
560 new_decl = remap_decl (*tp, id);
561 gcc_assert (new_decl);
562 /* Replace this variable with the copy. */
563 STRIP_TYPE_NOPS (new_decl);
564 *tp = new_decl;
565 *walk_subtrees = 0;
567 else if (TREE_CODE (*tp) == STATEMENT_LIST)
568 copy_statement_list (tp);
569 else if (TREE_CODE (*tp) == SAVE_EXPR)
570 remap_save_expr (tp, id->decl_map, walk_subtrees);
571 else if (TREE_CODE (*tp) == LABEL_DECL
572 && (! DECL_CONTEXT (*tp)
573 || decl_function_context (*tp) == id->callee))
574 /* These may need to be remapped for EH handling. */
575 *tp = remap_decl (*tp, id);
576 else if (TREE_CODE (*tp) == BIND_EXPR)
577 copy_bind_expr (tp, walk_subtrees, id);
578 /* Types may need remapping as well. */
579 else if (TYPE_P (*tp))
580 *tp = remap_type (*tp, id);
582 /* If this is a constant, we have to copy the node iff the type will be
583 remapped. copy_tree_r will not copy a constant. */
584 else if (CONSTANT_CLASS_P (*tp))
586 tree new_type = remap_type (TREE_TYPE (*tp), id);
588 if (new_type == TREE_TYPE (*tp))
589 *walk_subtrees = 0;
591 else if (TREE_CODE (*tp) == INTEGER_CST)
592 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
593 TREE_INT_CST_HIGH (*tp));
594 else
596 *tp = copy_node (*tp);
597 TREE_TYPE (*tp) = new_type;
601 /* Otherwise, just copy the node. Note that copy_tree_r already
602 knows not to copy VAR_DECLs, etc., so this is safe. */
603 else
605 /* Here we handle trees that are not completely rewritten.
606 First we detect some inlining-induced bogosities for
607 discarding. */
608 if (TREE_CODE (*tp) == MODIFY_EXPR
609 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
610 && (lang_hooks.tree_inlining.auto_var_in_fn_p
611 (TREE_OPERAND (*tp, 0), fn)))
613 /* Some assignments VAR = VAR; don't generate any rtl code
614 and thus don't count as variable modification. Avoid
615 keeping bogosities like 0 = 0. */
616 tree decl = TREE_OPERAND (*tp, 0), value;
617 splay_tree_node n;
619 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
620 if (n)
622 value = (tree) n->value;
623 STRIP_TYPE_NOPS (value);
624 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
626 *tp = build_empty_stmt ();
627 return copy_body_r (tp, walk_subtrees, data);
631 else if (TREE_CODE (*tp) == INDIRECT_REF
632 && !id->versioning_p)
634 /* Get rid of *& from inline substitutions that can happen when a
635 pointer argument is an ADDR_EXPR. */
636 tree decl = TREE_OPERAND (*tp, 0);
637 splay_tree_node n;
639 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
640 if (n)
642 tree new;
643 /* If we happen to get an ADDR_EXPR in n->value, strip
644 it manually here as we'll eventually get ADDR_EXPRs
645 which lie about their types pointed to. In this case
646 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
647 but we absolutely rely on that. As fold_indirect_ref
648 does other useful transformations, try that first, though. */
649 tree type = TREE_TYPE (TREE_TYPE ((tree)n->value));
650 new = unshare_expr ((tree)n->value);
651 *tp = fold_indirect_ref_1 (type, new);
652 if (! *tp)
654 if (TREE_CODE (new) == ADDR_EXPR)
655 *tp = TREE_OPERAND (new, 0);
656 else
657 *tp = build1 (INDIRECT_REF, type, new);
659 *walk_subtrees = 0;
660 return NULL;
664 /* Here is the "usual case". Copy this tree node, and then
665 tweak some special cases. */
666 copy_tree_r (tp, walk_subtrees, id->versioning_p ? data : NULL);
668 /* If EXPR has block defined, map it to newly constructed block.
669 When inlining we want EXPRs without block appear in the block
670 of function call. */
671 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (*tp))))
673 new_block = id->block;
674 if (TREE_BLOCK (*tp))
676 splay_tree_node n;
677 n = splay_tree_lookup (id->decl_map,
678 (splay_tree_key) TREE_BLOCK (*tp));
679 gcc_assert (n);
680 new_block = (tree) n->value;
682 TREE_BLOCK (*tp) = new_block;
685 if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
686 TREE_OPERAND (*tp, 0) =
687 build_int_cst
688 (NULL_TREE,
689 id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
691 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
693 /* The copied TARGET_EXPR has never been expanded, even if the
694 original node was expanded already. */
695 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
697 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
698 TREE_OPERAND (*tp, 3) = NULL_TREE;
701 /* Variable substitution need not be simple. In particular, the
702 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
703 and friends are up-to-date. */
704 else if (TREE_CODE (*tp) == ADDR_EXPR)
706 walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
707 recompute_tree_invarant_for_addr_expr (*tp);
708 *walk_subtrees = 0;
712 /* Keep iterating. */
713 return NULL_TREE;
716 /* Copy basic block, scale profile accordingly. Edges will be taken care of
717 later */
719 static basic_block
720 copy_bb (inline_data *id, basic_block bb, int frequency_scale, int count_scale)
722 block_stmt_iterator bsi, copy_bsi;
723 basic_block copy_basic_block;
725 /* create_basic_block() will append every new block to
726 basic_block_info automatically. */
727 copy_basic_block = create_basic_block (NULL, (void *) 0, bb->prev_bb->aux);
728 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
729 copy_basic_block->frequency = (bb->frequency
730 * frequency_scale / REG_BR_PROB_BASE);
731 copy_bsi = bsi_start (copy_basic_block);
733 for (bsi = bsi_start (bb);
734 !bsi_end_p (bsi); bsi_next (&bsi))
736 tree stmt = bsi_stmt (bsi);
737 tree orig_stmt = stmt;
739 walk_tree (&stmt, copy_body_r, id, NULL);
741 /* RETURN_EXPR might be removed,
742 this is signalled by making stmt pointer NULL. */
743 if (stmt)
745 tree call, decl;
746 bsi_insert_after (&copy_bsi, stmt, BSI_NEW_STMT);
747 call = get_call_expr_in (stmt);
748 /* We're duplicating a CALL_EXPR. Find any corresponding
749 callgraph edges and update or duplicate them. */
750 if (call && (decl = get_callee_fndecl (call)))
752 if (id->saving_p)
754 struct cgraph_node *node;
755 struct cgraph_edge *edge;
757 /* We're saving a copy of the body, so we'll update the
758 callgraph nodes in place. Note that we avoid
759 altering the original callgraph node; we begin with
760 the first clone. */
761 for (node = id->node->next_clone;
762 node;
763 node = node->next_clone)
765 edge = cgraph_edge (node, orig_stmt);
766 gcc_assert (edge);
767 edge->call_stmt = stmt;
770 else
772 struct cgraph_edge *edge;
774 /* We're cloning or inlining this body; duplicate the
775 associate callgraph nodes. */
776 if (!id->versioning_p)
778 edge = cgraph_edge (id->current_node, orig_stmt);
779 if (edge)
780 cgraph_clone_edge (edge, id->node, stmt,
781 REG_BR_PROB_BASE, 1, true);
784 if (id->versioning_p)
786 /* Update the call_expr on the edges from the new version
787 to its callees. */
788 struct cgraph_edge *edge;
789 edge = cgraph_edge (id->node, orig_stmt);
790 if (edge)
791 edge->call_stmt = stmt;
794 /* If you think we can abort here, you are wrong.
795 There is no region 0 in tree land. */
796 gcc_assert (lookup_stmt_eh_region_fn (id->callee_cfun, orig_stmt)
797 != 0);
799 if (tree_could_throw_p (stmt))
801 int region = lookup_stmt_eh_region_fn (id->callee_cfun, orig_stmt);
802 /* Add an entry for the copied tree in the EH hashtable.
803 When saving or cloning or versioning, use the hashtable in
804 cfun, and just copy the EH number. When inlining, use the
805 hashtable in the caller, and adjust the region number. */
806 if (region > 0)
807 add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
809 /* If this tree doesn't have a region associated with it,
810 and there is a "current region,"
811 then associate this tree with the current region
812 and add edges associated with this region. */
813 if ((lookup_stmt_eh_region_fn (id->callee_cfun,
814 orig_stmt) <= 0
815 && id->eh_region > 0)
816 && tree_could_throw_p (stmt))
817 add_stmt_to_eh_region (stmt, id->eh_region);
821 return copy_basic_block;
824 /* Copy edges from BB into its copy constructed earlier, scale profile
825 accordingly. Edges will be taken care of later. Assume aux
826 pointers to point to the copies of each BB. */
827 static void
828 copy_edges_for_bb (basic_block bb, int count_scale)
830 basic_block new_bb = bb->aux;
831 edge_iterator ei;
832 edge old_edge;
833 block_stmt_iterator bsi;
834 int flags;
836 /* Use the indices from the original blocks to create edges for the
837 new ones. */
838 FOR_EACH_EDGE (old_edge, ei, bb->succs)
839 if (!(old_edge->flags & EDGE_EH))
841 edge new;
843 flags = old_edge->flags;
845 /* Return edges do get a FALLTHRU flag when the get inlined. */
846 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
847 && old_edge->dest->aux != EXIT_BLOCK_PTR)
848 flags |= EDGE_FALLTHRU;
849 new = make_edge (new_bb, old_edge->dest->aux, flags);
850 new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
851 new->probability = old_edge->probability;
854 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
855 return;
857 for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
859 tree copy_stmt;
861 copy_stmt = bsi_stmt (bsi);
862 update_stmt (copy_stmt);
863 /* Do this before the possible split_block. */
864 bsi_next (&bsi);
866 /* If this tree could throw an exception, there are two
867 cases where we need to add abnormal edge(s): the
868 tree wasn't in a region and there is a "current
869 region" in the caller; or the original tree had
870 EH edges. In both cases split the block after the tree,
871 and add abnormal edge(s) as needed; we need both
872 those from the callee and the caller.
873 We check whether the copy can throw, because the const
874 propagation can change an INDIRECT_REF which throws
875 into a COMPONENT_REF which doesn't. If the copy
876 can throw, the original could also throw. */
878 if (tree_can_throw_internal (copy_stmt))
880 if (!bsi_end_p (bsi))
881 /* Note that bb's predecessor edges aren't necessarily
882 right at this point; split_block doesn't care. */
884 edge e = split_block (new_bb, copy_stmt);
885 new_bb = e->dest;
886 bsi = bsi_start (new_bb);
889 make_eh_edges (copy_stmt);
894 /* Wrapper for remap_decl so it can be used as a callback. */
895 static tree
896 remap_decl_1 (tree decl, void *data)
898 return remap_decl (decl, data);
901 /* Make a copy of the body of FN so that it can be inserted inline in
902 another function. Walks FN via CFG, returns new fndecl. */
904 static tree
905 copy_cfg_body (inline_data * id, gcov_type count, int frequency,
906 basic_block entry_block_map, basic_block exit_block_map)
908 tree callee_fndecl = id->callee;
909 /* Original cfun for the callee, doesn't change. */
910 struct function *callee_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
911 /* Copy, built by this function. */
912 struct function *new_cfun;
913 /* Place to copy from; when a copy of the function was saved off earlier,
914 use that instead of the main copy. */
915 struct function *cfun_to_copy =
916 (struct function *) ggc_alloc_cleared (sizeof (struct function));
917 basic_block bb;
918 tree new_fndecl = NULL;
919 bool saving_or_cloning;
920 int count_scale, frequency_scale;
922 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count)
923 count_scale = (REG_BR_PROB_BASE * count
924 / ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count);
925 else
926 count_scale = 1;
928 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency)
929 frequency_scale = (REG_BR_PROB_BASE * frequency
931 ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency);
932 else
933 frequency_scale = count_scale;
935 /* Register specific tree functions. */
936 tree_register_cfg_hooks ();
938 /* Must have a CFG here at this point. */
939 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
940 (DECL_STRUCT_FUNCTION (callee_fndecl)));
942 *cfun_to_copy = *DECL_STRUCT_FUNCTION (callee_fndecl);
944 /* If there is a saved_cfg+saved_args lurking in the
945 struct function, a copy of the callee body was saved there, and
946 the 'struct cgraph edge' nodes have been fudged to point into the
947 saved body. Accordingly, we want to copy that saved body so the
948 callgraph edges will be recognized and cloned properly. */
949 if (cfun_to_copy->saved_cfg)
951 cfun_to_copy->cfg = cfun_to_copy->saved_cfg;
952 cfun_to_copy->eh = cfun_to_copy->saved_eh;
954 id->callee_cfun = cfun_to_copy;
956 /* If saving or cloning a function body, create new basic_block_info
957 and label_to_block_maps. Otherwise, we're duplicating a function
958 body for inlining; insert our new blocks and labels into the
959 existing varrays. */
960 saving_or_cloning = (id->saving_p || id->cloning_p || id->versioning_p);
961 if (saving_or_cloning)
963 new_cfun =
964 (struct function *) ggc_alloc_cleared (sizeof (struct function));
965 *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
966 new_cfun->cfg = NULL;
967 new_cfun->decl = new_fndecl = copy_node (callee_fndecl);
968 new_cfun->ib_boundaries_block = (varray_type) 0;
969 DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
970 push_cfun (new_cfun);
971 init_empty_tree_cfg ();
973 ENTRY_BLOCK_PTR->count =
974 (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count * count_scale /
975 REG_BR_PROB_BASE);
976 ENTRY_BLOCK_PTR->frequency =
977 (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency *
978 frequency_scale / REG_BR_PROB_BASE);
979 EXIT_BLOCK_PTR->count =
980 (EXIT_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count * count_scale /
981 REG_BR_PROB_BASE);
982 EXIT_BLOCK_PTR->frequency =
983 (EXIT_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency *
984 frequency_scale / REG_BR_PROB_BASE);
986 entry_block_map = ENTRY_BLOCK_PTR;
987 exit_block_map = EXIT_BLOCK_PTR;
990 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
991 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
994 /* Duplicate any exception-handling regions. */
995 if (cfun->eh)
997 if (saving_or_cloning)
998 init_eh_for_function ();
999 id->eh_region_offset = duplicate_eh_regions (cfun_to_copy,
1000 remap_decl_1,
1001 id, id->eh_region);
1002 gcc_assert (inlining_p (id) || !id->eh_region_offset);
1004 /* Use aux pointers to map the original blocks to copy. */
1005 FOR_EACH_BB_FN (bb, cfun_to_copy)
1006 bb->aux = copy_bb (id, bb, frequency_scale, count_scale);
1007 /* Now that we've duplicated the blocks, duplicate their edges. */
1008 FOR_ALL_BB_FN (bb, cfun_to_copy)
1009 copy_edges_for_bb (bb, count_scale);
1010 FOR_ALL_BB_FN (bb, cfun_to_copy)
1011 bb->aux = NULL;
1013 if (saving_or_cloning)
1014 pop_cfun ();
1016 return new_fndecl;
1019 /* Make a copy of the body of FN so that it can be inserted inline in
1020 another function. */
1022 static tree
1023 copy_generic_body (inline_data *id)
1025 tree body;
1026 tree fndecl = id->callee;
1028 body = DECL_SAVED_TREE (fndecl);
1029 walk_tree (&body, copy_body_r, id, NULL);
1031 return body;
1034 static tree
1035 copy_body (inline_data *id, gcov_type count, int frequency,
1036 basic_block entry_block_map, basic_block exit_block_map)
1038 tree fndecl = id->callee;
1039 tree body;
1041 /* If this body has a CFG, walk CFG and copy. */
1042 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
1043 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
1045 return body;
1048 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
1049 defined in function FN, or of a data member thereof. */
1051 static bool
1052 self_inlining_addr_expr (tree value, tree fn)
1054 tree var;
1056 if (TREE_CODE (value) != ADDR_EXPR)
1057 return false;
1059 var = get_base_address (TREE_OPERAND (value, 0));
1061 return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
1064 static void
1065 setup_one_parameter (inline_data *id, tree p, tree value, tree fn,
1066 basic_block bb, tree *vars)
1068 tree init_stmt;
1069 tree var;
1070 tree var_sub;
1072 /* If the parameter is never assigned to, we may not need to
1073 create a new variable here at all. Instead, we may be able
1074 to just use the argument value. */
1075 if (TREE_READONLY (p)
1076 && !TREE_ADDRESSABLE (p)
1077 && value && !TREE_SIDE_EFFECTS (value))
1079 /* We may produce non-gimple trees by adding NOPs or introduce
1080 invalid sharing when operand is not really constant.
1081 It is not big deal to prohibit constant propagation here as
1082 we will constant propagate in DOM1 pass anyway. */
1083 if (is_gimple_min_invariant (value)
1084 && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
1085 /* We have to be very careful about ADDR_EXPR. Make sure
1086 the base variable isn't a local variable of the inlined
1087 function, e.g., when doing recursive inlining, direct or
1088 mutually-recursive or whatever, which is why we don't
1089 just test whether fn == current_function_decl. */
1090 && ! self_inlining_addr_expr (value, fn))
1092 insert_decl_map (id, p, value);
1093 return;
1097 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
1098 here since the type of this decl must be visible to the calling
1099 function. */
1100 var = copy_decl_for_dup (p, fn, id->caller, /*versioning=*/false);
1102 /* See if the frontend wants to pass this by invisible reference. If
1103 so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
1104 replace uses of the PARM_DECL with dereferences. */
1105 if (TREE_TYPE (var) != TREE_TYPE (p)
1106 && POINTER_TYPE_P (TREE_TYPE (var))
1107 && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
1109 insert_decl_map (id, var, var);
1110 var_sub = build_fold_indirect_ref (var);
1112 else
1113 var_sub = var;
1115 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
1116 that way, when the PARM_DECL is encountered, it will be
1117 automatically replaced by the VAR_DECL. */
1118 insert_decl_map (id, p, var_sub);
1120 /* Declare this new variable. */
1121 TREE_CHAIN (var) = *vars;
1122 *vars = var;
1124 /* Make gimplifier happy about this variable. */
1125 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1127 /* Even if P was TREE_READONLY, the new VAR should not be.
1128 In the original code, we would have constructed a
1129 temporary, and then the function body would have never
1130 changed the value of P. However, now, we will be
1131 constructing VAR directly. The constructor body may
1132 change its value multiple times as it is being
1133 constructed. Therefore, it must not be TREE_READONLY;
1134 the back-end assumes that TREE_READONLY variable is
1135 assigned to only once. */
1136 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
1137 TREE_READONLY (var) = 0;
1139 /* Initialize this VAR_DECL from the equivalent argument. Convert
1140 the argument to the proper type in case it was promoted. */
1141 if (value)
1143 tree rhs = fold_convert (TREE_TYPE (var), value);
1144 block_stmt_iterator bsi = bsi_last (bb);
1146 if (rhs == error_mark_node)
1147 return;
1149 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
1150 keep our trees in gimple form. */
1151 init_stmt = build (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
1153 /* If we did not create a gimple value and we did not create a gimple
1154 cast of a gimple value, then we will need to gimplify INIT_STMTS
1155 at the end. Note that is_gimple_cast only checks the outer
1156 tree code, not its operand. Thus the explicit check that its
1157 operand is a gimple value. */
1158 if (!is_gimple_val (rhs)
1159 && (!is_gimple_cast (rhs)
1160 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
1161 gimplify_stmt (&init_stmt);
1162 bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
1166 /* Generate code to initialize the parameters of the function at the
1167 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
1169 static void
1170 initialize_inlined_parameters (inline_data *id, tree args, tree static_chain,
1171 tree fn, basic_block bb)
1173 tree parms;
1174 tree a;
1175 tree p;
1176 tree vars = NULL_TREE;
1177 int argnum = 0;
1179 /* Figure out what the parameters are. */
1180 parms = DECL_ARGUMENTS (fn);
1181 if (fn == current_function_decl)
1182 parms = cfun->saved_args;
1184 /* Loop through the parameter declarations, replacing each with an
1185 equivalent VAR_DECL, appropriately initialized. */
1186 for (p = parms, a = args; p;
1187 a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
1189 tree value;
1191 ++argnum;
1193 /* Find the initializer. */
1194 value = lang_hooks.tree_inlining.convert_parm_for_inlining
1195 (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
1197 setup_one_parameter (id, p, value, fn, bb, &vars);
1200 /* Initialize the static chain. */
1201 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1202 if (fn == current_function_decl)
1203 p = DECL_STRUCT_FUNCTION (fn)->saved_static_chain_decl;
1204 if (p)
1206 /* No static chain? Seems like a bug in tree-nested.c. */
1207 gcc_assert (static_chain);
1209 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
1212 declare_inline_vars (id->block, vars);
1215 /* Declare a return variable to replace the RESULT_DECL for the
1216 function we are calling. An appropriate DECL_STMT is returned.
1217 The USE_STMT is filled to contain a use of the declaration to
1218 indicate the return value of the function.
1220 RETURN_SLOT_ADDR, if non-null, was a fake parameter that
1221 took the address of the result. MODIFY_DEST, if non-null, was the LHS of
1222 the MODIFY_EXPR to which this call is the RHS.
1224 The return value is a (possibly null) value that is the result of the
1225 function as seen by the callee. *USE_P is a (possibly null) value that
1226 holds the result as seen by the caller. */
1228 static tree
1229 declare_return_variable (inline_data *id, tree return_slot_addr,
1230 tree modify_dest, tree *use_p)
1232 tree callee = id->callee;
1233 tree caller = id->caller;
1234 tree result = DECL_RESULT (callee);
1235 tree callee_type = TREE_TYPE (result);
1236 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
1237 tree var, use;
1239 /* We don't need to do anything for functions that don't return
1240 anything. */
1241 if (!result || VOID_TYPE_P (callee_type))
1243 *use_p = NULL_TREE;
1244 return NULL_TREE;
1247 /* If there was a return slot, then the return value is the
1248 dereferenced address of that object. */
1249 if (return_slot_addr)
1251 /* The front end shouldn't have used both return_slot_addr and
1252 a modify expression. */
1253 gcc_assert (!modify_dest);
1254 if (DECL_BY_REFERENCE (result))
1255 var = return_slot_addr;
1256 else
1257 var = build_fold_indirect_ref (return_slot_addr);
1258 use = NULL;
1259 goto done;
1262 /* All types requiring non-trivial constructors should have been handled. */
1263 gcc_assert (!TREE_ADDRESSABLE (callee_type));
1265 /* Attempt to avoid creating a new temporary variable. */
1266 if (modify_dest)
1268 bool use_it = false;
1270 /* We can't use MODIFY_DEST if there's type promotion involved. */
1271 if (!lang_hooks.types_compatible_p (caller_type, callee_type))
1272 use_it = false;
1274 /* ??? If we're assigning to a variable sized type, then we must
1275 reuse the destination variable, because we've no good way to
1276 create variable sized temporaries at this point. */
1277 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
1278 use_it = true;
1280 /* If the callee cannot possibly modify MODIFY_DEST, then we can
1281 reuse it as the result of the call directly. Don't do this if
1282 it would promote MODIFY_DEST to addressable. */
1283 else if (TREE_ADDRESSABLE (result))
1284 use_it = false;
1285 else
1287 tree base_m = get_base_address (modify_dest);
1289 /* If the base isn't a decl, then it's a pointer, and we don't
1290 know where that's going to go. */
1291 if (!DECL_P (base_m))
1292 use_it = false;
1293 else if (is_global_var (base_m))
1294 use_it = false;
1295 else if (!TREE_ADDRESSABLE (base_m))
1296 use_it = true;
1299 if (use_it)
1301 var = modify_dest;
1302 use = NULL;
1303 goto done;
1307 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
1309 var = copy_decl_for_dup (result, callee, caller, /*versioning=*/false);
1311 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1312 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
1313 = tree_cons (NULL_TREE, var,
1314 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
1316 /* Do not have the rest of GCC warn about this variable as it should
1317 not be visible to the user. */
1318 TREE_NO_WARNING (var) = 1;
1320 /* Build the use expr. If the return type of the function was
1321 promoted, convert it back to the expected type. */
1322 use = var;
1323 if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
1324 use = fold_convert (caller_type, var);
1326 done:
1327 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
1328 way, when the RESULT_DECL is encountered, it will be
1329 automatically replaced by the VAR_DECL. */
1330 insert_decl_map (id, result, var);
1332 /* Remember this so we can ignore it in remap_decls. */
1333 id->retvar = var;
1335 *use_p = use;
1336 return var;
1339 /* Returns nonzero if a function can be inlined as a tree. */
1341 bool
1342 tree_inlinable_function_p (tree fn)
1344 return inlinable_function_p (fn);
1347 static const char *inline_forbidden_reason;
1349 static tree
1350 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
1351 void *fnp)
1353 tree node = *nodep;
1354 tree fn = (tree) fnp;
1355 tree t;
1357 switch (TREE_CODE (node))
1359 case CALL_EXPR:
1360 /* Refuse to inline alloca call unless user explicitly forced so as
1361 this may change program's memory overhead drastically when the
1362 function using alloca is called in loop. In GCC present in
1363 SPEC2000 inlining into schedule_block cause it to require 2GB of
1364 RAM instead of 256MB. */
1365 if (alloca_call_p (node)
1366 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1368 inline_forbidden_reason
1369 = G_("function %q+F can never be inlined because it uses "
1370 "alloca (override using the always_inline attribute)");
1371 return node;
1373 t = get_callee_fndecl (node);
1374 if (! t)
1375 break;
1377 /* We cannot inline functions that call setjmp. */
1378 if (setjmp_call_p (t))
1380 inline_forbidden_reason
1381 = G_("function %q+F can never be inlined because it uses setjmp");
1382 return node;
1385 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
1386 switch (DECL_FUNCTION_CODE (t))
1388 /* We cannot inline functions that take a variable number of
1389 arguments. */
1390 case BUILT_IN_VA_START:
1391 case BUILT_IN_STDARG_START:
1392 case BUILT_IN_NEXT_ARG:
1393 case BUILT_IN_VA_END:
1394 inline_forbidden_reason
1395 = G_("function %q+F can never be inlined because it "
1396 "uses variable argument lists");
1397 return node;
1399 case BUILT_IN_LONGJMP:
1400 /* We can't inline functions that call __builtin_longjmp at
1401 all. The non-local goto machinery really requires the
1402 destination be in a different function. If we allow the
1403 function calling __builtin_longjmp to be inlined into the
1404 function calling __builtin_setjmp, Things will Go Awry. */
1405 inline_forbidden_reason
1406 = G_("function %q+F can never be inlined because "
1407 "it uses setjmp-longjmp exception handling");
1408 return node;
1410 case BUILT_IN_NONLOCAL_GOTO:
1411 /* Similarly. */
1412 inline_forbidden_reason
1413 = G_("function %q+F can never be inlined because "
1414 "it uses non-local goto");
1415 return node;
1417 case BUILT_IN_RETURN:
1418 case BUILT_IN_APPLY_ARGS:
1419 /* If a __builtin_apply_args caller would be inlined,
1420 it would be saving arguments of the function it has
1421 been inlined into. Similarly __builtin_return would
1422 return from the function the inline has been inlined into. */
1423 inline_forbidden_reason
1424 = G_("function %q+F can never be inlined because "
1425 "it uses __builtin_return or __builtin_apply_args");
1426 return node;
1428 default:
1429 break;
1431 break;
1433 case GOTO_EXPR:
1434 t = TREE_OPERAND (node, 0);
1436 /* We will not inline a function which uses computed goto. The
1437 addresses of its local labels, which may be tucked into
1438 global storage, are of course not constant across
1439 instantiations, which causes unexpected behavior. */
1440 if (TREE_CODE (t) != LABEL_DECL)
1442 inline_forbidden_reason
1443 = G_("function %q+F can never be inlined "
1444 "because it contains a computed goto");
1445 return node;
1447 break;
1449 case LABEL_EXPR:
1450 t = TREE_OPERAND (node, 0);
1451 if (DECL_NONLOCAL (t))
1453 /* We cannot inline a function that receives a non-local goto
1454 because we cannot remap the destination label used in the
1455 function that is performing the non-local goto. */
1456 inline_forbidden_reason
1457 = G_("function %q+F can never be inlined "
1458 "because it receives a non-local goto");
1459 return node;
1461 break;
1463 case RECORD_TYPE:
1464 case UNION_TYPE:
1465 /* We cannot inline a function of the form
1467 void F (int i) { struct S { int ar[i]; } s; }
1469 Attempting to do so produces a catch-22.
1470 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1471 UNION_TYPE nodes, then it goes into infinite recursion on a
1472 structure containing a pointer to its own type. If it doesn't,
1473 then the type node for S doesn't get adjusted properly when
1474 F is inlined.
1476 ??? This is likely no longer true, but it's too late in the 4.0
1477 cycle to try to find out. This should be checked for 4.1. */
1478 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1479 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1481 inline_forbidden_reason
1482 = G_("function %q+F can never be inlined "
1483 "because it uses variable sized variables");
1484 return node;
1487 default:
1488 break;
1491 return NULL_TREE;
1494 /* Return subexpression representing possible alloca call, if any. */
1495 static tree
1496 inline_forbidden_p (tree fndecl)
1498 location_t saved_loc = input_location;
1499 block_stmt_iterator bsi;
1500 basic_block bb;
1501 tree ret = NULL_TREE;
1503 FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (fndecl))
1504 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1506 ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
1507 inline_forbidden_p_1, fndecl);
1508 if (ret)
1509 goto egress;
1512 egress:
1513 input_location = saved_loc;
1514 return ret;
1517 /* Returns nonzero if FN is a function that does not have any
1518 fundamental inline blocking properties. */
1520 static bool
1521 inlinable_function_p (tree fn)
1523 bool inlinable = true;
1525 /* If we've already decided this function shouldn't be inlined,
1526 there's no need to check again. */
1527 if (DECL_UNINLINABLE (fn))
1528 return false;
1530 /* See if there is any language-specific reason it cannot be
1531 inlined. (It is important that this hook be called early because
1532 in C++ it may result in template instantiation.)
1533 If the function is not inlinable for language-specific reasons,
1534 it is left up to the langhook to explain why. */
1535 inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1537 /* If we don't have the function body available, we can't inline it.
1538 However, this should not be recorded since we also get here for
1539 forward declared inline functions. Therefore, return at once. */
1540 if (!DECL_SAVED_TREE (fn))
1541 return false;
1543 /* If we're not inlining at all, then we cannot inline this function. */
1544 else if (!flag_inline_trees)
1545 inlinable = false;
1547 /* Only try to inline functions if DECL_INLINE is set. This should be
1548 true for all functions declared `inline', and for all other functions
1549 as well with -finline-functions.
1551 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1552 it's the front-end that must set DECL_INLINE in this case, because
1553 dwarf2out loses if a function that does not have DECL_INLINE set is
1554 inlined anyway. That is why we have both DECL_INLINE and
1555 DECL_DECLARED_INLINE_P. */
1556 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1557 here should be redundant. */
1558 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1559 inlinable = false;
1561 else if (inline_forbidden_p (fn))
1563 /* See if we should warn about uninlinable functions. Previously,
1564 some of these warnings would be issued while trying to expand
1565 the function inline, but that would cause multiple warnings
1566 about functions that would for example call alloca. But since
1567 this a property of the function, just one warning is enough.
1568 As a bonus we can now give more details about the reason why a
1569 function is not inlinable.
1570 We only warn for functions declared `inline' by the user. */
1571 bool do_warning = (warn_inline
1572 && DECL_INLINE (fn)
1573 && DECL_DECLARED_INLINE_P (fn)
1574 && !DECL_IN_SYSTEM_HEADER (fn));
1576 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1577 sorry (inline_forbidden_reason, fn);
1578 else if (do_warning)
1579 warning (OPT_Winline, inline_forbidden_reason, fn);
1581 inlinable = false;
1584 /* Squirrel away the result so that we don't have to check again. */
1585 DECL_UNINLINABLE (fn) = !inlinable;
1587 return inlinable;
1590 /* Estimate the cost of a memory move. Use machine dependent
1591 word size and take possible memcpy call into account. */
1594 estimate_move_cost (tree type)
1596 HOST_WIDE_INT size;
1598 size = int_size_in_bytes (type);
1600 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1601 /* Cost of a memcpy call, 3 arguments and the call. */
1602 return 4;
1603 else
1604 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1607 /* Used by estimate_num_insns. Estimate number of instructions seen
1608 by given statement. */
1610 static tree
1611 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1613 int *count = data;
1614 tree x = *tp;
1616 if (IS_TYPE_OR_DECL_P (x))
1618 *walk_subtrees = 0;
1619 return NULL;
1621 /* Assume that constants and references counts nothing. These should
1622 be majorized by amount of operations among them we count later
1623 and are common target of CSE and similar optimizations. */
1624 else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
1625 return NULL;
1627 switch (TREE_CODE (x))
1629 /* Containers have no cost. */
1630 case TREE_LIST:
1631 case TREE_VEC:
1632 case BLOCK:
1633 case COMPONENT_REF:
1634 case BIT_FIELD_REF:
1635 case INDIRECT_REF:
1636 case ALIGN_INDIRECT_REF:
1637 case MISALIGNED_INDIRECT_REF:
1638 case ARRAY_REF:
1639 case ARRAY_RANGE_REF:
1640 case OBJ_TYPE_REF:
1641 case EXC_PTR_EXPR: /* ??? */
1642 case FILTER_EXPR: /* ??? */
1643 case COMPOUND_EXPR:
1644 case BIND_EXPR:
1645 case WITH_CLEANUP_EXPR:
1646 case NOP_EXPR:
1647 case VIEW_CONVERT_EXPR:
1648 case SAVE_EXPR:
1649 case ADDR_EXPR:
1650 case COMPLEX_EXPR:
1651 case RANGE_EXPR:
1652 case CASE_LABEL_EXPR:
1653 case SSA_NAME:
1654 case CATCH_EXPR:
1655 case EH_FILTER_EXPR:
1656 case STATEMENT_LIST:
1657 case ERROR_MARK:
1658 case NON_LVALUE_EXPR:
1659 case FDESC_EXPR:
1660 case VA_ARG_EXPR:
1661 case TRY_CATCH_EXPR:
1662 case TRY_FINALLY_EXPR:
1663 case LABEL_EXPR:
1664 case GOTO_EXPR:
1665 case RETURN_EXPR:
1666 case EXIT_EXPR:
1667 case LOOP_EXPR:
1668 case PHI_NODE:
1669 case WITH_SIZE_EXPR:
1670 break;
1672 /* We don't account constants for now. Assume that the cost is amortized
1673 by operations that do use them. We may re-consider this decision once
1674 we are able to optimize the tree before estimating its size and break
1675 out static initializers. */
1676 case IDENTIFIER_NODE:
1677 case INTEGER_CST:
1678 case REAL_CST:
1679 case COMPLEX_CST:
1680 case VECTOR_CST:
1681 case STRING_CST:
1682 *walk_subtrees = 0;
1683 return NULL;
1685 /* Try to estimate the cost of assignments. We have three cases to
1686 deal with:
1687 1) Simple assignments to registers;
1688 2) Stores to things that must live in memory. This includes
1689 "normal" stores to scalars, but also assignments of large
1690 structures, or constructors of big arrays;
1691 3) TARGET_EXPRs.
1693 Let us look at the first two cases, assuming we have "a = b + C":
1694 <modify_expr <var_decl "a"> <plus_expr <var_decl "b"> <constant C>>
1695 If "a" is a GIMPLE register, the assignment to it is free on almost
1696 any target, because "a" usually ends up in a real register. Hence
1697 the only cost of this expression comes from the PLUS_EXPR, and we
1698 can ignore the MODIFY_EXPR.
1699 If "a" is not a GIMPLE register, the assignment to "a" will most
1700 likely be a real store, so the cost of the MODIFY_EXPR is the cost
1701 of moving something into "a", which we compute using the function
1702 estimate_move_cost.
1704 The third case deals with TARGET_EXPRs, for which the semantics are
1705 that a temporary is assigned, unless the TARGET_EXPR itself is being
1706 assigned to something else. In the latter case we do not need the
1707 temporary. E.g. in <modify_expr <var_decl "a"> <target_expr>>, the
1708 MODIFY_EXPR is free. */
1709 case INIT_EXPR:
1710 case MODIFY_EXPR:
1711 /* Is the right and side a TARGET_EXPR? */
1712 if (TREE_CODE (TREE_OPERAND (x, 1)) == TARGET_EXPR)
1713 break;
1714 /* ... fall through ... */
1716 case TARGET_EXPR:
1717 x = TREE_OPERAND (x, 0);
1718 /* Is this an assignments to a register? */
1719 if (is_gimple_reg (x))
1720 break;
1721 /* Otherwise it's a store, so fall through to compute the move cost. */
1723 case CONSTRUCTOR:
1724 *count += estimate_move_cost (TREE_TYPE (x));
1725 break;
1727 /* Assign cost of 1 to usual operations.
1728 ??? We may consider mapping RTL costs to this. */
1729 case COND_EXPR:
1730 case VEC_COND_EXPR:
1732 case PLUS_EXPR:
1733 case MINUS_EXPR:
1734 case MULT_EXPR:
1736 case FIX_TRUNC_EXPR:
1737 case FIX_CEIL_EXPR:
1738 case FIX_FLOOR_EXPR:
1739 case FIX_ROUND_EXPR:
1741 case NEGATE_EXPR:
1742 case FLOAT_EXPR:
1743 case MIN_EXPR:
1744 case MAX_EXPR:
1745 case ABS_EXPR:
1747 case LSHIFT_EXPR:
1748 case RSHIFT_EXPR:
1749 case LROTATE_EXPR:
1750 case RROTATE_EXPR:
1751 case VEC_LSHIFT_EXPR:
1752 case VEC_RSHIFT_EXPR:
1754 case BIT_IOR_EXPR:
1755 case BIT_XOR_EXPR:
1756 case BIT_AND_EXPR:
1757 case BIT_NOT_EXPR:
1759 case TRUTH_ANDIF_EXPR:
1760 case TRUTH_ORIF_EXPR:
1761 case TRUTH_AND_EXPR:
1762 case TRUTH_OR_EXPR:
1763 case TRUTH_XOR_EXPR:
1764 case TRUTH_NOT_EXPR:
1766 case LT_EXPR:
1767 case LE_EXPR:
1768 case GT_EXPR:
1769 case GE_EXPR:
1770 case EQ_EXPR:
1771 case NE_EXPR:
1772 case ORDERED_EXPR:
1773 case UNORDERED_EXPR:
1775 case UNLT_EXPR:
1776 case UNLE_EXPR:
1777 case UNGT_EXPR:
1778 case UNGE_EXPR:
1779 case UNEQ_EXPR:
1780 case LTGT_EXPR:
1782 case CONVERT_EXPR:
1784 case CONJ_EXPR:
1786 case PREDECREMENT_EXPR:
1787 case PREINCREMENT_EXPR:
1788 case POSTDECREMENT_EXPR:
1789 case POSTINCREMENT_EXPR:
1791 case SWITCH_EXPR:
1793 case ASM_EXPR:
1795 case REALIGN_LOAD_EXPR:
1797 case REDUC_MAX_EXPR:
1798 case REDUC_MIN_EXPR:
1799 case REDUC_PLUS_EXPR:
1801 case RESX_EXPR:
1802 *count += 1;
1803 break;
1805 /* Few special cases of expensive operations. This is useful
1806 to avoid inlining on functions having too many of these. */
1807 case TRUNC_DIV_EXPR:
1808 case CEIL_DIV_EXPR:
1809 case FLOOR_DIV_EXPR:
1810 case ROUND_DIV_EXPR:
1811 case EXACT_DIV_EXPR:
1812 case TRUNC_MOD_EXPR:
1813 case CEIL_MOD_EXPR:
1814 case FLOOR_MOD_EXPR:
1815 case ROUND_MOD_EXPR:
1816 case RDIV_EXPR:
1817 *count += 10;
1818 break;
1819 case CALL_EXPR:
1821 tree decl = get_callee_fndecl (x);
1822 tree arg;
1824 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1825 switch (DECL_FUNCTION_CODE (decl))
1827 case BUILT_IN_CONSTANT_P:
1828 *walk_subtrees = 0;
1829 return NULL_TREE;
1830 case BUILT_IN_EXPECT:
1831 return NULL_TREE;
1832 default:
1833 break;
1836 /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
1837 that does use function declaration to figure out the arguments. */
1838 if (!decl)
1840 for (arg = TREE_OPERAND (x, 1); arg; arg = TREE_CHAIN (arg))
1841 *count += estimate_move_cost (TREE_TYPE (TREE_VALUE (arg)));
1843 else
1845 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1846 *count += estimate_move_cost (TREE_TYPE (arg));
1849 *count += PARAM_VALUE (PARAM_INLINE_CALL_COST);
1850 break;
1852 default:
1853 gcc_unreachable ();
1855 return NULL;
1858 /* Estimate number of instructions that will be created by expanding EXPR. */
1861 estimate_num_insns (tree expr)
1863 int num = 0;
1864 struct pointer_set_t *visited_nodes;
1865 basic_block bb;
1866 block_stmt_iterator bsi;
1867 struct function *my_function;
1869 /* If we're given an entire function, walk the CFG. */
1870 if (TREE_CODE (expr) == FUNCTION_DECL)
1872 my_function = DECL_STRUCT_FUNCTION (expr);
1873 gcc_assert (my_function && my_function->cfg);
1874 visited_nodes = pointer_set_create ();
1875 FOR_EACH_BB_FN (bb, my_function)
1877 for (bsi = bsi_start (bb);
1878 !bsi_end_p (bsi);
1879 bsi_next (&bsi))
1881 walk_tree (bsi_stmt_ptr (bsi), estimate_num_insns_1,
1882 &num, visited_nodes);
1885 pointer_set_destroy (visited_nodes);
1887 else
1888 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
1890 return num;
1893 typedef struct function *function_p;
1895 DEF_VEC_P(function_p);
1896 DEF_VEC_ALLOC_P(function_p,heap);
1898 /* Initialized with NOGC, making this poisonous to the garbage collector. */
1899 static VEC(function_p,heap) *cfun_stack;
1901 void
1902 push_cfun (struct function *new_cfun)
1904 VEC_safe_push (function_p, heap, cfun_stack, cfun);
1905 cfun = new_cfun;
1908 void
1909 pop_cfun (void)
1911 cfun = VEC_pop (function_p, cfun_stack);
1914 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
1915 static void
1916 add_lexical_block (tree current_block, tree new_block)
1918 tree *blk_p;
1920 /* Walk to the last sub-block. */
1921 for (blk_p = &BLOCK_SUBBLOCKS (current_block);
1922 *blk_p;
1923 blk_p = &TREE_CHAIN (*blk_p))
1925 *blk_p = new_block;
1926 BLOCK_SUPERCONTEXT (new_block) = current_block;
1929 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
1931 static bool
1932 expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
1934 inline_data *id;
1935 tree t;
1936 tree use_retvar;
1937 tree fn;
1938 splay_tree st;
1939 tree args;
1940 tree return_slot_addr;
1941 tree modify_dest;
1942 location_t saved_location;
1943 struct cgraph_edge *cg_edge;
1944 const char *reason;
1945 basic_block return_block;
1946 edge e;
1947 block_stmt_iterator bsi, stmt_bsi;
1948 bool successfully_inlined = FALSE;
1949 tree t_step;
1950 tree var;
1951 struct cgraph_node *old_node;
1952 tree decl;
1954 /* See what we've got. */
1955 id = (inline_data *) data;
1956 t = *tp;
1958 /* Set input_location here so we get the right instantiation context
1959 if we call instantiate_decl from inlinable_function_p. */
1960 saved_location = input_location;
1961 if (EXPR_HAS_LOCATION (t))
1962 input_location = EXPR_LOCATION (t);
1964 /* From here on, we're only interested in CALL_EXPRs. */
1965 if (TREE_CODE (t) != CALL_EXPR)
1966 goto egress;
1968 /* First, see if we can figure out what function is being called.
1969 If we cannot, then there is no hope of inlining the function. */
1970 fn = get_callee_fndecl (t);
1971 if (!fn)
1972 goto egress;
1974 /* Turn forward declarations into real ones. */
1975 fn = cgraph_node (fn)->decl;
1977 /* If fn is a declaration of a function in a nested scope that was
1978 globally declared inline, we don't set its DECL_INITIAL.
1979 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1980 C++ front-end uses it for cdtors to refer to their internal
1981 declarations, that are not real functions. Fortunately those
1982 don't have trees to be saved, so we can tell by checking their
1983 DECL_SAVED_TREE. */
1984 if (! DECL_INITIAL (fn)
1985 && DECL_ABSTRACT_ORIGIN (fn)
1986 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
1987 fn = DECL_ABSTRACT_ORIGIN (fn);
1989 /* Objective C and fortran still calls tree_rest_of_compilation directly.
1990 Kill this check once this is fixed. */
1991 if (!id->current_node->analyzed)
1992 goto egress;
1994 cg_edge = cgraph_edge (id->current_node, stmt);
1996 /* Constant propagation on argument done during previous inlining
1997 may create new direct call. Produce an edge for it. */
1998 if (!cg_edge)
2000 struct cgraph_node *dest = cgraph_node (fn);
2002 /* We have missing edge in the callgraph. This can happen in one case
2003 where previous inlining turned indirect call into direct call by
2004 constant propagating arguments. In all other cases we hit a bug
2005 (incorrect node sharing is most common reason for missing edges. */
2006 gcc_assert (dest->needed || !flag_unit_at_a_time);
2007 cgraph_create_edge (id->node, dest, stmt,
2008 bb->count, bb->loop_depth)->inline_failed
2009 = N_("originally indirect function call not considered for inlining");
2010 goto egress;
2013 /* Don't try to inline functions that are not well-suited to
2014 inlining. */
2015 if (!cgraph_inline_p (cg_edge, &reason))
2017 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
2018 /* Avoid warnings during early inline pass. */
2019 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2021 sorry ("inlining failed in call to %q+F: %s", fn, reason);
2022 sorry ("called from here");
2024 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
2025 && !DECL_IN_SYSTEM_HEADER (fn)
2026 && strlen (reason)
2027 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
2028 /* Avoid warnings during early inline pass. */
2029 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2031 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
2032 fn, reason);
2033 warning (OPT_Winline, "called from here");
2035 goto egress;
2038 #ifdef ENABLE_CHECKING
2039 if (cg_edge->callee->decl != id->node->decl)
2040 verify_cgraph_node (cg_edge->callee);
2041 #endif
2043 /* We will be inlining this callee. */
2045 id->eh_region = lookup_stmt_eh_region (stmt);
2047 /* Split the block holding the CALL_EXPR. */
2049 e = split_block (bb, stmt);
2050 bb = e->src;
2051 return_block = e->dest;
2052 remove_edge (e);
2054 /* split_block splits before the statement, work around this by moving
2055 the call into the first half_bb. Not pretty, but seems easier than
2056 doing the CFG manipulation by hand when the CALL_EXPR is in the last
2057 statement in BB. */
2058 stmt_bsi = bsi_last (bb);
2059 bsi = bsi_start (return_block);
2060 if (!bsi_end_p (bsi))
2061 bsi_move_before (&stmt_bsi, &bsi);
2062 else
2064 tree stmt = bsi_stmt (stmt_bsi);
2065 bsi_remove (&stmt_bsi);
2066 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2068 stmt_bsi = bsi_start (return_block);
2070 /* Build a block containing code to initialize the arguments, the
2071 actual inline expansion of the body, and a label for the return
2072 statements within the function to jump to. The type of the
2073 statement expression is the return type of the function call. */
2074 id->block = make_node (BLOCK);
2075 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
2076 BLOCK_SOURCE_LOCATION (id->block) = input_location;
2077 add_lexical_block (TREE_BLOCK (stmt), id->block);
2079 /* Local declarations will be replaced by their equivalents in this
2080 map. */
2081 st = id->decl_map;
2082 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
2083 NULL, NULL);
2085 /* Initialize the parameters. */
2086 args = TREE_OPERAND (t, 1);
2088 initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2), fn, bb);
2090 /* Record the function we are about to inline. */
2091 id->callee = fn;
2093 if (DECL_STRUCT_FUNCTION (fn)->saved_blocks)
2094 add_lexical_block (id->block, remap_blocks (DECL_STRUCT_FUNCTION (fn)->saved_blocks, id));
2095 else if (DECL_INITIAL (fn))
2096 add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
2098 /* Return statements in the function body will be replaced by jumps
2099 to the RET_LABEL. */
2101 gcc_assert (DECL_INITIAL (fn));
2102 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
2104 /* Find the lhs to which the result of this call is assigned. */
2105 return_slot_addr = NULL;
2106 if (TREE_CODE (stmt) == MODIFY_EXPR)
2108 modify_dest = TREE_OPERAND (stmt, 0);
2110 /* The function which we are inlining might not return a value,
2111 in which case we should issue a warning that the function
2112 does not return a value. In that case the optimizers will
2113 see that the variable to which the value is assigned was not
2114 initialized. We do not want to issue a warning about that
2115 uninitialized variable. */
2116 if (DECL_P (modify_dest))
2117 TREE_NO_WARNING (modify_dest) = 1;
2118 if (CALL_EXPR_RETURN_SLOT_OPT (t))
2120 return_slot_addr = build_fold_addr_expr (modify_dest);
2121 modify_dest = NULL;
2124 else
2125 modify_dest = NULL;
2127 /* Declare the return variable for the function. */
2128 decl = declare_return_variable (id, return_slot_addr,
2129 modify_dest, &use_retvar);
2130 /* Do this only if declare_return_variable created a new one. */
2131 if (decl && !return_slot_addr && decl != modify_dest)
2132 declare_inline_vars (id->block, decl);
2134 /* After we've initialized the parameters, we insert the body of the
2135 function itself. */
2136 old_node = id->current_node;
2138 /* Anoint the callee-to-be-duplicated as the "current_node." When
2139 CALL_EXPRs within callee are duplicated, the edges from callee to
2140 callee's callees (caller's grandchildren) will be cloned. */
2141 id->current_node = cg_edge->callee;
2143 /* This is it. Duplicate the callee body. Assume callee is
2144 pre-gimplified. Note that we must not alter the caller
2145 function in any way before this point, as this CALL_EXPR may be
2146 a self-referential call; if we're calling ourselves, we need to
2147 duplicate our body before altering anything. */
2148 copy_body (id, bb->count, bb->frequency, bb, return_block);
2149 id->current_node = old_node;
2151 /* Add local vars in this inlined callee to caller. */
2152 t_step = id->callee_cfun->unexpanded_var_list;
2153 if (id->callee_cfun->saved_unexpanded_var_list)
2154 t_step = id->callee_cfun->saved_unexpanded_var_list;
2155 for (; t_step; t_step = TREE_CHAIN (t_step))
2157 var = TREE_VALUE (t_step);
2158 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2159 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2160 cfun->unexpanded_var_list);
2161 else
2162 cfun->unexpanded_var_list = tree_cons (NULL_TREE, remap_decl (var, id),
2163 cfun->unexpanded_var_list);
2166 /* Clean up. */
2167 splay_tree_delete (id->decl_map);
2168 id->decl_map = st;
2170 /* If the inlined function returns a result that we care about,
2171 clobber the CALL_EXPR with a reference to the return variable. */
2172 if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
2174 *tp = use_retvar;
2175 maybe_clean_or_replace_eh_stmt (stmt, stmt);
2177 else
2178 /* We're modifying a TSI owned by gimple_expand_calls_inline();
2179 tsi_delink() will leave the iterator in a sane state. */
2180 bsi_remove (&stmt_bsi);
2182 bsi_next (&bsi);
2183 if (bsi_end_p (bsi))
2184 tree_purge_dead_eh_edges (return_block);
2186 /* If the value of the new expression is ignored, that's OK. We
2187 don't warn about this for CALL_EXPRs, so we shouldn't warn about
2188 the equivalent inlined version either. */
2189 TREE_USED (*tp) = 1;
2191 /* Output the inlining info for this abstract function, since it has been
2192 inlined. If we don't do this now, we can lose the information about the
2193 variables in the function when the blocks get blown away as soon as we
2194 remove the cgraph node. */
2195 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
2197 /* Update callgraph if needed. */
2198 cgraph_remove_node (cg_edge->callee);
2200 /* Declare the 'auto' variables added with this inlined body. */
2201 record_vars (BLOCK_VARS (id->block));
2202 id->block = NULL_TREE;
2203 successfully_inlined = TRUE;
2205 egress:
2206 input_location = saved_location;
2207 return successfully_inlined;
2210 /* Expand call statements reachable from STMT_P.
2211 We can only have CALL_EXPRs as the "toplevel" tree code or nested
2212 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
2213 unfortunately not use that function here because we need a pointer
2214 to the CALL_EXPR, not the tree itself. */
2216 static bool
2217 gimple_expand_calls_inline (basic_block bb, inline_data *id)
2219 block_stmt_iterator bsi;
2221 /* Register specific tree functions. */
2222 tree_register_cfg_hooks ();
2223 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2225 tree *expr_p = bsi_stmt_ptr (bsi);
2226 tree stmt = *expr_p;
2228 if (TREE_CODE (*expr_p) == MODIFY_EXPR)
2229 expr_p = &TREE_OPERAND (*expr_p, 1);
2230 if (TREE_CODE (*expr_p) == WITH_SIZE_EXPR)
2231 expr_p = &TREE_OPERAND (*expr_p, 0);
2232 if (TREE_CODE (*expr_p) == CALL_EXPR)
2233 if (expand_call_inline (bb, stmt, expr_p, id))
2234 return true;
2236 return false;
2239 /* Expand calls to inline functions in the body of FN. */
2241 void
2242 optimize_inline_calls (tree fn)
2244 inline_data id;
2245 tree prev_fn;
2246 basic_block bb;
2247 /* There is no point in performing inlining if errors have already
2248 occurred -- and we might crash if we try to inline invalid
2249 code. */
2250 if (errorcount || sorrycount)
2251 return;
2253 /* Clear out ID. */
2254 memset (&id, 0, sizeof (id));
2256 id.current_node = id.node = cgraph_node (fn);
2257 id.caller = fn;
2258 /* Or any functions that aren't finished yet. */
2259 prev_fn = NULL_TREE;
2260 if (current_function_decl)
2262 id.caller = current_function_decl;
2263 prev_fn = current_function_decl;
2265 push_gimplify_context ();
2267 /* Reach the trees by walking over the CFG, and note the
2268 enclosing basic-blocks in the call edges. */
2269 /* We walk the blocks going forward, because inlined function bodies
2270 will split id->current_basic_block, and the new blocks will
2271 follow it; we'll trudge through them, processing their CALL_EXPRs
2272 along the way. */
2273 FOR_EACH_BB (bb)
2274 gimple_expand_calls_inline (bb, &id);
2277 pop_gimplify_context (NULL);
2278 /* Renumber the (code) basic_blocks consecutively. */
2279 compact_blocks ();
2280 /* Renumber the lexical scoping (non-code) blocks consecutively. */
2281 number_blocks (fn);
2283 #ifdef ENABLE_CHECKING
2285 struct cgraph_edge *e;
2287 verify_cgraph_node (id.node);
2289 /* Double check that we inlined everything we are supposed to inline. */
2290 for (e = id.node->callees; e; e = e->next_callee)
2291 gcc_assert (e->inline_failed);
2293 #endif
2294 /* We need to rescale frequencies again to peak at REG_BR_PROB_BASE
2295 as inlining loops might increase the maximum. */
2296 if (ENTRY_BLOCK_PTR->count)
2297 counts_to_freqs ();
2298 fold_cond_expr_cond ();
2301 /* FN is a function that has a complete body, and CLONE is a function whose
2302 body is to be set to a copy of FN, mapping argument declarations according
2303 to the ARG_MAP splay_tree. */
2305 void
2306 clone_body (tree clone, tree fn, void *arg_map)
2308 inline_data id;
2310 /* Clone the body, as if we were making an inline call. But, remap the
2311 parameters in the callee to the parameters of caller. */
2312 memset (&id, 0, sizeof (id));
2313 id.caller = clone;
2314 id.callee = fn;
2315 id.callee_cfun = DECL_STRUCT_FUNCTION (fn);
2316 id.decl_map = (splay_tree)arg_map;
2318 /* Cloning is treated slightly differently from inlining. Set
2319 CLONING_P so that it's clear which operation we're performing. */
2320 id.cloning_p = true;
2322 /* We're not inside any EH region. */
2323 id.eh_region = -1;
2325 /* Actually copy the body. */
2326 append_to_statement_list_force (copy_generic_body (&id), &DECL_SAVED_TREE (clone));
2329 /* Save duplicate body in FN. MAP is used to pass around splay tree
2330 used to update arguments in restore_body. */
2332 /* Make and return duplicate of body in FN. Put copies of DECL_ARGUMENTS
2333 in *arg_copy and of the static chain, if any, in *sc_copy. */
2335 void
2336 save_body (tree fn, tree *arg_copy, tree *sc_copy)
2338 inline_data id;
2339 tree newdecl, *parg;
2340 basic_block fn_entry_block;
2341 tree t_step;
2343 memset (&id, 0, sizeof (id));
2344 id.callee = fn;
2345 id.callee_cfun = DECL_STRUCT_FUNCTION (fn);
2346 id.caller = fn;
2347 id.node = cgraph_node (fn);
2348 id.saving_p = true;
2349 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2350 *arg_copy = DECL_ARGUMENTS (fn);
2352 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
2354 tree new = copy_node (*parg);
2356 lang_hooks.dup_lang_specific_decl (new);
2357 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*parg);
2358 insert_decl_map (&id, *parg, new);
2359 TREE_CHAIN (new) = TREE_CHAIN (*parg);
2360 *parg = new;
2363 *sc_copy = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2364 if (*sc_copy)
2366 tree new = copy_node (*sc_copy);
2368 lang_hooks.dup_lang_specific_decl (new);
2369 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*sc_copy);
2370 insert_decl_map (&id, *sc_copy, new);
2371 TREE_CHAIN (new) = TREE_CHAIN (*sc_copy);
2372 *sc_copy = new;
2375 /* We're not inside any EH region. */
2376 id.eh_region = -1;
2378 insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn));
2380 DECL_STRUCT_FUNCTION (fn)->saved_blocks
2381 = remap_blocks (DECL_INITIAL (fn), &id);
2382 for (t_step = id.callee_cfun->unexpanded_var_list;
2383 t_step;
2384 t_step = TREE_CHAIN (t_step))
2386 tree var = TREE_VALUE (t_step);
2387 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2388 cfun->saved_unexpanded_var_list
2389 = tree_cons (NULL_TREE, var, cfun->saved_unexpanded_var_list);
2390 else
2391 cfun->saved_unexpanded_var_list
2392 = tree_cons (NULL_TREE, remap_decl (var, &id),
2393 cfun->saved_unexpanded_var_list);
2396 /* Actually copy the body, including a new (struct function *) and CFG.
2397 EH info is also duplicated so its labels point into the copied
2398 CFG, not the original. */
2399 fn_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fn));
2400 newdecl = copy_body (&id, fn_entry_block->count, fn_entry_block->frequency,
2401 NULL, NULL);
2402 DECL_STRUCT_FUNCTION (fn)->saved_cfg = DECL_STRUCT_FUNCTION (newdecl)->cfg;
2403 DECL_STRUCT_FUNCTION (fn)->saved_eh = DECL_STRUCT_FUNCTION (newdecl)->eh;
2405 /* Clean up. */
2406 splay_tree_delete (id.decl_map);
2409 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2411 tree
2412 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2414 enum tree_code code = TREE_CODE (*tp);
2415 inline_data *id = (inline_data *) data;
2417 /* We make copies of most nodes. */
2418 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
2419 || code == TREE_LIST
2420 || code == TREE_VEC
2421 || code == TYPE_DECL)
2423 /* Because the chain gets clobbered when we make a copy, we save it
2424 here. */
2425 tree chain = TREE_CHAIN (*tp);
2426 tree new;
2428 if (id && id->versioning_p && replace_ref_tree (id, tp))
2430 *walk_subtrees = 0;
2431 return NULL_TREE;
2433 /* Copy the node. */
2434 new = copy_node (*tp);
2436 /* Propagate mudflap marked-ness. */
2437 if (flag_mudflap && mf_marked_p (*tp))
2438 mf_mark (new);
2440 *tp = new;
2442 /* Now, restore the chain, if appropriate. That will cause
2443 walk_tree to walk into the chain as well. */
2444 if (code == PARM_DECL || code == TREE_LIST)
2445 TREE_CHAIN (*tp) = chain;
2447 /* For now, we don't update BLOCKs when we make copies. So, we
2448 have to nullify all BIND_EXPRs. */
2449 if (TREE_CODE (*tp) == BIND_EXPR)
2450 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2452 else if (code == CONSTRUCTOR)
2454 /* CONSTRUCTOR nodes need special handling because
2455 we need to duplicate the vector of elements. */
2456 tree new;
2458 new = copy_node (*tp);
2460 /* Propagate mudflap marked-ness. */
2461 if (flag_mudflap && mf_marked_p (*tp))
2462 mf_mark (new);
2464 CONSTRUCTOR_ELTS (new) = VEC_copy (constructor_elt, gc,
2465 CONSTRUCTOR_ELTS (*tp));
2466 *tp = new;
2468 else if (TREE_CODE_CLASS (code) == tcc_type)
2469 *walk_subtrees = 0;
2470 else if (TREE_CODE_CLASS (code) == tcc_declaration)
2471 *walk_subtrees = 0;
2472 else if (TREE_CODE_CLASS (code) == tcc_constant)
2473 *walk_subtrees = 0;
2474 else
2475 gcc_assert (code != STATEMENT_LIST);
2476 return NULL_TREE;
2479 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2480 information indicating to what new SAVE_EXPR this one should be mapped,
2481 use that one. Otherwise, create a new node and enter it in ST. FN is
2482 the function into which the copy will be placed. */
2484 static void
2485 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2487 splay_tree st = (splay_tree) st_;
2488 splay_tree_node n;
2489 tree t;
2491 /* See if we already encountered this SAVE_EXPR. */
2492 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2494 /* If we didn't already remap this SAVE_EXPR, do so now. */
2495 if (!n)
2497 t = copy_node (*tp);
2499 /* Remember this SAVE_EXPR. */
2500 splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2501 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2502 splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2504 else
2506 /* We've already walked into this SAVE_EXPR; don't do it again. */
2507 *walk_subtrees = 0;
2508 t = (tree) n->value;
2511 /* Replace this SAVE_EXPR with the copy. */
2512 *tp = t;
2515 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
2516 copies the declaration and enters it in the splay_tree in DATA (which is
2517 really an `inline_data *'). */
2519 static tree
2520 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2521 void *data)
2523 inline_data *id = (inline_data *) data;
2525 /* Don't walk into types. */
2526 if (TYPE_P (*tp))
2527 *walk_subtrees = 0;
2529 else if (TREE_CODE (*tp) == LABEL_EXPR)
2531 tree decl = TREE_OPERAND (*tp, 0);
2533 /* Copy the decl and remember the copy. */
2534 insert_decl_map (id, decl,
2535 copy_decl_for_dup (decl, DECL_CONTEXT (decl),
2536 DECL_CONTEXT (decl), /*versioning=*/false));
2539 return NULL_TREE;
2542 /* Perform any modifications to EXPR required when it is unsaved. Does
2543 not recurse into EXPR's subtrees. */
2545 static void
2546 unsave_expr_1 (tree expr)
2548 switch (TREE_CODE (expr))
2550 case TARGET_EXPR:
2551 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
2552 It's OK for this to happen if it was part of a subtree that
2553 isn't immediately expanded, such as operand 2 of another
2554 TARGET_EXPR. */
2555 if (TREE_OPERAND (expr, 1))
2556 break;
2558 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
2559 TREE_OPERAND (expr, 3) = NULL_TREE;
2560 break;
2562 default:
2563 break;
2567 /* Called via walk_tree when an expression is unsaved. Using the
2568 splay_tree pointed to by ST (which is really a `splay_tree'),
2569 remaps all local declarations to appropriate replacements. */
2571 static tree
2572 unsave_r (tree *tp, int *walk_subtrees, void *data)
2574 inline_data *id = (inline_data *) data;
2575 splay_tree st = id->decl_map;
2576 splay_tree_node n;
2578 /* Only a local declaration (variable or label). */
2579 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
2580 || TREE_CODE (*tp) == LABEL_DECL)
2582 /* Lookup the declaration. */
2583 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2585 /* If it's there, remap it. */
2586 if (n)
2587 *tp = (tree) n->value;
2590 else if (TREE_CODE (*tp) == STATEMENT_LIST)
2591 copy_statement_list (tp);
2592 else if (TREE_CODE (*tp) == BIND_EXPR)
2593 copy_bind_expr (tp, walk_subtrees, id);
2594 else if (TREE_CODE (*tp) == SAVE_EXPR)
2595 remap_save_expr (tp, st, walk_subtrees);
2596 else
2598 copy_tree_r (tp, walk_subtrees, NULL);
2600 /* Do whatever unsaving is required. */
2601 unsave_expr_1 (*tp);
2604 /* Keep iterating. */
2605 return NULL_TREE;
2608 /* Copies everything in EXPR and replaces variables, labels
2609 and SAVE_EXPRs local to EXPR. */
2611 tree
2612 unsave_expr_now (tree expr)
2614 inline_data id;
2616 /* There's nothing to do for NULL_TREE. */
2617 if (expr == 0)
2618 return expr;
2620 /* Set up ID. */
2621 memset (&id, 0, sizeof (id));
2622 id.callee = current_function_decl;
2623 id.caller = current_function_decl;
2624 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2626 /* Walk the tree once to find local labels. */
2627 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
2629 /* Walk the tree again, copying, remapping, and unsaving. */
2630 walk_tree (&expr, unsave_r, &id, NULL);
2632 /* Clean up. */
2633 splay_tree_delete (id.decl_map);
2635 return expr;
2638 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
2640 static tree
2641 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
2643 if (*tp == data)
2644 return (tree) data;
2645 else
2646 return NULL;
2649 bool
2650 debug_find_tree (tree top, tree search)
2652 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
2656 /* Declare the variables created by the inliner. Add all the variables in
2657 VARS to BIND_EXPR. */
2659 static void
2660 declare_inline_vars (tree block, tree vars)
2662 tree t;
2663 for (t = vars; t; t = TREE_CHAIN (t))
2664 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
2666 if (block)
2667 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
2671 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
2672 but now it will be in the TO_FN. VERSIONING means that this function
2673 is used by the versioning utility (not inlining or cloning). */
2675 tree
2676 copy_decl_for_dup (tree decl, tree from_fn, tree to_fn, bool versioning)
2678 tree copy;
2680 gcc_assert (DECL_P (decl));
2681 /* Copy the declaration. */
2682 if (!versioning
2683 && (TREE_CODE (decl) == PARM_DECL
2684 || TREE_CODE (decl) == RESULT_DECL))
2686 tree type = TREE_TYPE (decl);
2688 /* For a parameter or result, we must make an equivalent VAR_DECL,
2689 not a new PARM_DECL. */
2690 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
2691 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
2692 TREE_READONLY (copy) = TREE_READONLY (decl);
2693 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
2694 DECL_COMPLEX_GIMPLE_REG_P (copy) = DECL_COMPLEX_GIMPLE_REG_P (decl);
2696 else
2698 copy = copy_node (decl);
2699 /* The COPY is not abstract; it will be generated in TO_FN. */
2700 DECL_ABSTRACT (copy) = 0;
2701 lang_hooks.dup_lang_specific_decl (copy);
2703 /* TREE_ADDRESSABLE isn't used to indicate that a label's
2704 address has been taken; it's for internal bookkeeping in
2705 expand_goto_internal. */
2706 if (TREE_CODE (copy) == LABEL_DECL)
2708 TREE_ADDRESSABLE (copy) = 0;
2709 LABEL_DECL_UID (copy) = -1;
2713 /* Don't generate debug information for the copy if we wouldn't have
2714 generated it for the copy either. */
2715 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
2716 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
2718 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
2719 declaration inspired this copy. */
2720 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
2722 /* The new variable/label has no RTL, yet. */
2723 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
2724 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
2725 SET_DECL_RTL (copy, NULL_RTX);
2727 /* These args would always appear unused, if not for this. */
2728 TREE_USED (copy) = 1;
2730 /* Set the context for the new declaration. */
2731 if (!DECL_CONTEXT (decl))
2732 /* Globals stay global. */
2734 else if (DECL_CONTEXT (decl) != from_fn)
2735 /* Things that weren't in the scope of the function we're inlining
2736 from aren't in the scope we're inlining to, either. */
2738 else if (TREE_STATIC (decl))
2739 /* Function-scoped static variables should stay in the original
2740 function. */
2742 else
2743 /* Ordinary automatic local variables are now in the scope of the
2744 new function. */
2745 DECL_CONTEXT (copy) = to_fn;
2747 return copy;
2750 /* Return a copy of the function's argument tree. */
2751 static tree
2752 copy_arguments_for_versioning (tree orig_parm, inline_data * id)
2754 tree *arg_copy, *parg;
2756 arg_copy = &orig_parm;
2757 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
2759 tree new = remap_decl (*parg, id);
2760 lang_hooks.dup_lang_specific_decl (new);
2761 TREE_CHAIN (new) = TREE_CHAIN (*parg);
2762 *parg = new;
2764 return orig_parm;
2767 /* Return a copy of the function's static chain. */
2768 static tree
2769 copy_static_chain (tree static_chain, inline_data * id)
2771 tree *chain_copy, *pvar;
2773 chain_copy = &static_chain;
2774 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
2776 tree new = remap_decl (*pvar, id);
2777 lang_hooks.dup_lang_specific_decl (new);
2778 TREE_CHAIN (new) = TREE_CHAIN (*pvar);
2779 *pvar = new;
2781 return static_chain;
2784 /* Return true if the function is allowed to be versioned.
2785 This is a guard for the versioning functionality. */
2786 bool
2787 tree_versionable_function_p (tree fndecl)
2789 if (fndecl == NULL_TREE)
2790 return false;
2791 /* ??? There are cases where a function is
2792 uninlinable but can be versioned. */
2793 if (!tree_inlinable_function_p (fndecl))
2794 return false;
2796 return true;
2799 /* Create a copy of a function's tree.
2800 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
2801 of the original function and the new copied function
2802 respectively. In case we want to replace a DECL
2803 tree with another tree while duplicating the function's
2804 body, TREE_MAP represents the mapping between these
2805 trees. */
2806 void
2807 tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map)
2809 struct cgraph_node *old_version_node;
2810 struct cgraph_node *new_version_node;
2811 inline_data id;
2812 tree p, new_fndecl;
2813 unsigned i;
2814 struct ipa_replace_map *replace_info;
2815 basic_block old_entry_block;
2816 tree t_step;
2818 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
2819 && TREE_CODE (new_decl) == FUNCTION_DECL);
2820 DECL_POSSIBLY_INLINED (old_decl) = 1;
2822 old_version_node = cgraph_node (old_decl);
2823 new_version_node = cgraph_node (new_decl);
2825 allocate_struct_function (new_decl);
2826 /* Cfun points to the new allocated function struct at this point. */
2827 cfun->function_end_locus = DECL_SOURCE_LOCATION (new_decl);
2829 DECL_ARTIFICIAL (new_decl) = 1;
2830 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
2832 /* Generate a new name for the new version. */
2833 DECL_NAME (new_decl) =
2834 create_tmp_var_name (NULL);
2835 /* Create a new SYMBOL_REF rtx for the new name. */
2836 if (DECL_RTL (old_decl) != NULL)
2838 SET_DECL_RTL (new_decl, copy_rtx (DECL_RTL (old_decl)));
2839 XEXP (DECL_RTL (new_decl), 0) =
2840 gen_rtx_SYMBOL_REF (GET_MODE (XEXP (DECL_RTL (old_decl), 0)),
2841 IDENTIFIER_POINTER (DECL_NAME (new_decl)));
2844 /* Prepare the data structures for the tree copy. */
2845 memset (&id, 0, sizeof (id));
2847 /* The new version. */
2848 id.node = new_version_node;
2850 /* The old version. */
2851 id.current_node = cgraph_node (old_decl);
2853 id.versioning_p = true;
2854 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2855 id.caller = new_decl;
2856 id.callee = old_decl;
2857 id.callee_cfun = DECL_STRUCT_FUNCTION (old_decl);
2859 current_function_decl = new_decl;
2861 /* Copy the function's static chain. */
2862 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
2863 if (p)
2864 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
2865 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
2866 &id);
2867 /* Copy the function's arguments. */
2868 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
2869 DECL_ARGUMENTS (new_decl) =
2870 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id);
2872 /* If there's a tree_map, prepare for substitution. */
2873 if (tree_map)
2874 for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
2876 replace_info = VARRAY_GENERIC_PTR (tree_map, i);
2877 if (replace_info->replace_p && !replace_info->ref_p)
2878 insert_decl_map (&id, replace_info->old_tree,
2879 replace_info->new_tree);
2880 else if (replace_info->replace_p && replace_info->ref_p)
2881 id.ipa_info = tree_map;
2884 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.callee), &id);
2886 /* Renumber the lexical scoping (non-code) blocks consecutively. */
2887 number_blocks (id.caller);
2889 if (DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list != NULL_TREE)
2890 /* Add local vars. */
2891 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list;
2892 t_step; t_step = TREE_CHAIN (t_step))
2894 tree var = TREE_VALUE (t_step);
2895 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2896 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2897 cfun->unexpanded_var_list);
2898 else
2899 cfun->unexpanded_var_list =
2900 tree_cons (NULL_TREE, remap_decl (var, &id),
2901 cfun->unexpanded_var_list);
2904 /* Copy the Function's body. */
2905 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
2906 (DECL_STRUCT_FUNCTION (old_decl));
2907 new_fndecl = copy_body (&id,
2908 old_entry_block->count,
2909 old_entry_block->frequency, NULL, NULL);
2911 DECL_SAVED_TREE (new_decl) = DECL_SAVED_TREE (new_fndecl);
2913 DECL_STRUCT_FUNCTION (new_decl)->cfg =
2914 DECL_STRUCT_FUNCTION (new_fndecl)->cfg;
2915 DECL_STRUCT_FUNCTION (new_decl)->eh = DECL_STRUCT_FUNCTION (new_fndecl)->eh;
2916 DECL_STRUCT_FUNCTION (new_decl)->ib_boundaries_block =
2917 DECL_STRUCT_FUNCTION (new_fndecl)->ib_boundaries_block;
2918 DECL_STRUCT_FUNCTION (new_decl)->last_label_uid =
2919 DECL_STRUCT_FUNCTION (new_fndecl)->last_label_uid;
2921 if (DECL_RESULT (old_decl) != NULL_TREE)
2923 tree *res_decl = &DECL_RESULT (old_decl);
2924 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
2925 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
2928 current_function_decl = NULL;
2929 /* Renumber the lexical scoping (non-code) blocks consecutively. */
2930 number_blocks (new_decl);
2932 /* Clean up. */
2933 splay_tree_delete (id.decl_map);
2934 fold_cond_expr_cond ();
2935 return;
2938 /* Replace an INDIRECT_REF tree of a given DECL tree with a new
2939 given tree.
2940 ID->ipa_info keeps the old tree and the new tree.
2941 TP points to the INDIRECT REF tree. Return true if
2942 the trees were replaced. */
2943 static bool
2944 replace_ref_tree (inline_data * id, tree * tp)
2946 bool replaced = false;
2947 tree new;
2949 if (id->ipa_info && VARRAY_ACTIVE_SIZE (id->ipa_info) > 0)
2951 unsigned i;
2953 for (i = 0; i < VARRAY_ACTIVE_SIZE (id->ipa_info); i++)
2955 struct ipa_replace_map *replace_info;
2956 replace_info = VARRAY_GENERIC_PTR (id->ipa_info, i);
2958 if (replace_info->replace_p && replace_info->ref_p)
2960 tree old_tree = replace_info->old_tree;
2961 tree new_tree = replace_info->new_tree;
2963 if (TREE_CODE (*tp) == INDIRECT_REF
2964 && TREE_OPERAND (*tp, 0) == old_tree)
2966 new = copy_node (new_tree);
2967 *tp = new;
2968 replaced = true;
2973 return replaced;
2976 /* Return true if we are inlining. */
2977 static inline bool
2978 inlining_p (inline_data * id)
2980 return (!id->saving_p && !id->cloning_p && !id->versioning_p);
2983 /* Duplicate a type, fields and all. */
2985 tree
2986 build_duplicate_type (tree type)
2988 inline_data id;
2990 memset (&id, 0, sizeof (id));
2991 id.callee = current_function_decl;
2992 id.caller = current_function_decl;
2993 id.callee_cfun = cfun;
2994 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2996 type = remap_type_1 (type, &id);
2998 splay_tree_delete (id.decl_map);
3000 return type;