* config.sub: Add cases for the Renesas m32c. (This patch has been
[official-gcc.git] / gcc / tree-inline.c
blob28bdf6cc47886de913379617139fedfe65dc7d9a
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "varray.h"
36 #include "hashtab.h"
37 #include "splay-tree.h"
38 #include "langhooks.h"
39 #include "basic-block.h"
40 #include "tree-iterator.h"
41 #include "cgraph.h"
42 #include "intl.h"
43 #include "tree-mudflap.h"
44 #include "tree-flow.h"
45 #include "function.h"
46 #include "ggc.h"
47 #include "tree-flow.h"
48 #include "diagnostic.h"
49 #include "except.h"
50 #include "debug.h"
51 #include "pointer-set.h"
52 #include "integrate.h"
54 /* I'm not real happy about this, but we need to handle gimple and
55 non-gimple trees. */
56 #include "tree-gimple.h"
58 /* Inlining, Saving, Cloning
60 Inlining: a function body is duplicated, but the PARM_DECLs are
61 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
62 MODIFY_EXPRs that store to a dedicated returned-value variable.
63 The duplicated eh_region info of the copy will later be appended
64 to the info for the caller; the eh_region info in copied throwing
65 statements and RESX_EXPRs is adjusted accordingly.
67 Saving: make a semantically-identical copy of the function body.
68 Necessary when we want to generate code for the body (a destructive
69 operation), but we expect to need this body in the future (e.g. for
70 inlining into another function).
72 Cloning: (only in C++) We have one body for a con/de/structor, and
73 multiple function decls, each with a unique parameter list.
74 Duplicate the body, using the given splay tree; some parameters
75 will become constants (like 0 or 1).
77 All of these will simultaneously lookup any callgraph edges. If
78 we're going to inline the duplicated function body, and the given
79 function has some cloned callgraph nodes (one for each place this
80 function will be inlined) those callgraph edges will be duplicated.
81 If we're saving or cloning the body, those callgraph edges will be
82 updated to point into the new body. (Note that the original
83 callgraph node and edge list will not be altered.)
85 See the CALL_EXPR handling case in copy_body_r (). */
87 /* 0 if we should not perform inlining.
88 1 if we should expand functions calls inline at the tree level.
89 2 if we should consider *all* functions to be inline
90 candidates. */
92 int flag_inline_trees = 0;
94 /* To Do:
96 o In order to make inlining-on-trees work, we pessimized
97 function-local static constants. In particular, they are now
98 always output, even when not addressed. Fix this by treating
99 function-local static constants just like global static
100 constants; the back-end already knows not to output them if they
101 are not needed.
103 o Provide heuristics to clamp inlining of recursive template
104 calls? */
106 /* Data required for function inlining. */
108 typedef struct inline_data
110 /* FUNCTION_DECL for function being inlined. */
111 tree callee;
112 /* FUNCTION_DECL for function being inlined into. */
113 tree caller;
114 /* struct function for function being inlined. Usually this is the same
115 as DECL_STRUCT_FUNCTION (callee), but can be different if saved_cfg
116 and saved_eh are in use. */
117 struct function *callee_cfun;
118 /* The VAR_DECL for the return value. */
119 tree retvar;
120 /* The map from local declarations in the inlined function to
121 equivalents in the function into which it is being inlined. */
122 splay_tree decl_map;
123 /* We use the same mechanism to build clones that we do to perform
124 inlining. However, there are a few places where we need to
125 distinguish between those two situations. This flag is true if
126 we are cloning, rather than inlining. */
127 bool cloning_p;
128 /* Similarly for saving function body. */
129 bool saving_p;
130 /* Callgraph node of function we are inlining into. */
131 struct cgraph_node *node;
132 /* Callgraph node of currently inlined function. */
133 struct cgraph_node *current_node;
134 /* Current BLOCK. */
135 tree block;
136 /* Exception region the inlined call lie in. */
137 int eh_region;
138 /* Take region number in the function being copied, add this value and
139 get eh region number of the duplicate in the function we inline into. */
140 int eh_region_offset;
141 } inline_data;
143 /* Prototypes. */
145 static tree declare_return_variable (inline_data *, tree, tree, tree *);
146 static tree copy_body_r (tree *, int *, void *);
147 static tree copy_generic_body (inline_data *);
148 static bool inlinable_function_p (tree);
149 static tree remap_decl (tree, inline_data *);
150 static tree remap_type (tree, inline_data *);
151 static void remap_block (tree *, inline_data *);
152 static tree remap_decl (tree, inline_data *);
153 static tree remap_decls (tree, inline_data *);
154 static void copy_bind_expr (tree *, int *, inline_data *);
155 static tree mark_local_for_remap_r (tree *, int *, void *);
156 static void unsave_expr_1 (tree);
157 static tree unsave_r (tree *, int *, void *);
158 static void declare_inline_vars (tree, tree);
159 static void remap_save_expr (tree *, void *, int *);
161 static inline bool inlining_p (inline_data *id);
163 /* Insert a tree->tree mapping for ID. Despite the name suggests
164 that the trees should be variables, it is used for more than that. */
166 static void
167 insert_decl_map (inline_data *id, tree key, tree value)
169 splay_tree_insert (id->decl_map, (splay_tree_key) key,
170 (splay_tree_value) value);
172 /* Always insert an identity map as well. If we see this same new
173 node again, we won't want to duplicate it a second time. */
174 if (key != value)
175 splay_tree_insert (id->decl_map, (splay_tree_key) value,
176 (splay_tree_value) value);
179 /* Remap DECL during the copying of the BLOCK tree for the function. */
181 static tree
182 remap_decl (tree decl, inline_data *id)
184 splay_tree_node n;
185 tree fn;
187 /* We only remap local variables in the current function. */
188 fn = id->callee;
190 /* See if we have remapped this declaration. */
192 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
194 /* If we didn't already have an equivalent for this declaration,
195 create one now. */
196 if (!n)
198 /* Make a copy of the variable or label. */
199 tree t;
200 t = copy_decl_for_inlining (decl, fn, id->caller);
202 /* Remember it, so that if we encounter this local entity again
203 we can reuse this copy. Do this early because remap_type may
204 need this decl for TYPE_STUB_DECL. */
205 insert_decl_map (id, decl, t);
207 /* Remap types, if necessary. */
208 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
209 if (TREE_CODE (t) == TYPE_DECL)
210 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
211 else if (TREE_CODE (t) == PARM_DECL)
212 DECL_ARG_TYPE_AS_WRITTEN (t)
213 = remap_type (DECL_ARG_TYPE_AS_WRITTEN (t), id);
215 /* Remap sizes as necessary. */
216 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
217 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
219 /* If fields, do likewise for offset and qualifier. */
220 if (TREE_CODE (t) == FIELD_DECL)
222 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
223 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
224 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
227 #if 0
228 /* FIXME handle anon aggrs. */
229 if (! DECL_NAME (t) && TREE_TYPE (t)
230 && lang_hooks.tree_inlining.anon_aggr_type_p (TREE_TYPE (t)))
232 /* For a VAR_DECL of anonymous type, we must also copy the
233 member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS. */
234 tree members = NULL;
235 tree src;
237 for (src = DECL_ANON_UNION_ELEMS (t); src;
238 src = TREE_CHAIN (src))
240 tree member = remap_decl (TREE_VALUE (src), id);
242 gcc_assert (!TREE_PURPOSE (src));
243 members = tree_cons (NULL, member, members);
245 DECL_ANON_UNION_ELEMS (t) = nreverse (members);
247 #endif
249 /* If we are inlining and this is a variable (not a label), declare the
250 remapped variable in the callers' body. */
251 if (inlining_p (id)
252 && (TREE_CODE (t) == VAR_DECL
253 || TREE_CODE (t) == PARM_DECL))
254 declare_inline_vars (id->block, t);
256 /* Remember it, so that if we encounter this local entity
257 again we can reuse this copy. */
258 insert_decl_map (id, decl, t);
259 return t;
262 return unshare_expr ((tree) n->value);
265 static tree
266 remap_type (tree type, inline_data *id)
268 splay_tree_node node;
269 tree new, t;
271 if (type == NULL)
272 return type;
274 /* See if we have remapped this type. */
275 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
276 if (node)
277 return (tree) node->value;
279 /* The type only needs remapping if it's variably modified. */
280 if (! variably_modified_type_p (type, id->callee))
282 insert_decl_map (id, type, type);
283 return type;
286 /* We do need a copy. build and register it now. If this is a pointer or
287 reference type, remap the designated type and make a new pointer or
288 reference type. */
289 if (TREE_CODE (type) == POINTER_TYPE)
291 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
292 TYPE_MODE (type),
293 TYPE_REF_CAN_ALIAS_ALL (type));
294 insert_decl_map (id, type, new);
295 return new;
297 else if (TREE_CODE (type) == REFERENCE_TYPE)
299 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
300 TYPE_MODE (type),
301 TYPE_REF_CAN_ALIAS_ALL (type));
302 insert_decl_map (id, type, new);
303 return new;
305 else
306 new = copy_node (type);
308 insert_decl_map (id, type, new);
310 /* This is a new type, not a copy of an old type. Need to reassociate
311 variants. We can handle everything except the main variant lazily. */
312 t = TYPE_MAIN_VARIANT (type);
313 if (type != t)
315 t = remap_type (t, id);
316 TYPE_MAIN_VARIANT (new) = t;
317 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
318 TYPE_NEXT_VARIANT (t) = new;
320 else
322 TYPE_MAIN_VARIANT (new) = new;
323 TYPE_NEXT_VARIANT (new) = NULL;
326 if (TYPE_STUB_DECL (type))
327 TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
329 /* Lazily create pointer and reference types. */
330 TYPE_POINTER_TO (new) = NULL;
331 TYPE_REFERENCE_TO (new) = NULL;
333 switch (TREE_CODE (new))
335 case INTEGER_TYPE:
336 case REAL_TYPE:
337 case ENUMERAL_TYPE:
338 case BOOLEAN_TYPE:
339 case CHAR_TYPE:
340 t = TYPE_MIN_VALUE (new);
341 if (t && TREE_CODE (t) != INTEGER_CST)
342 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
344 t = TYPE_MAX_VALUE (new);
345 if (t && TREE_CODE (t) != INTEGER_CST)
346 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
347 return new;
349 case FUNCTION_TYPE:
350 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
351 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
352 return new;
354 case ARRAY_TYPE:
355 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
356 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
357 break;
359 case RECORD_TYPE:
360 case UNION_TYPE:
361 case QUAL_UNION_TYPE:
362 walk_tree (&TYPE_FIELDS (new), copy_body_r, id, NULL);
363 break;
365 case OFFSET_TYPE:
366 default:
367 /* Shouldn't have been thought variable sized. */
368 gcc_unreachable ();
371 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
372 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
374 return new;
377 static tree
378 remap_decls (tree decls, inline_data *id)
380 tree old_var;
381 tree new_decls = NULL_TREE;
383 /* Remap its variables. */
384 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
386 tree new_var;
388 /* Remap the variable. */
389 new_var = remap_decl (old_var, id);
391 /* If we didn't remap this variable, so we can't mess with its
392 TREE_CHAIN. If we remapped this variable to the return slot, it's
393 already declared somewhere else, so don't declare it here. */
394 if (!new_var || new_var == id->retvar)
396 else
398 gcc_assert (DECL_P (new_var));
399 TREE_CHAIN (new_var) = new_decls;
400 new_decls = new_var;
404 return nreverse (new_decls);
407 /* Copy the BLOCK to contain remapped versions of the variables
408 therein. And hook the new block into the block-tree. */
410 static void
411 remap_block (tree *block, inline_data *id)
413 tree old_block;
414 tree new_block;
415 tree fn;
417 /* Make the new block. */
418 old_block = *block;
419 new_block = make_node (BLOCK);
420 TREE_USED (new_block) = TREE_USED (old_block);
421 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
422 *block = new_block;
424 /* Remap its variables. */
425 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
427 fn = id->caller;
428 #if 1
429 /* FIXME! It shouldn't be so hard to manage blocks. Rebuilding them in
430 rest_of_compilation is a good start. */
431 if (id->cloning_p)
432 /* We're building a clone; DECL_INITIAL is still
433 error_mark_node, and current_binding_level is the parm
434 binding level. */
435 lang_hooks.decls.insert_block (new_block);
436 else
438 /* Attach this new block after the DECL_INITIAL block for the
439 function into which this block is being inlined. In
440 rest_of_compilation we will straighten out the BLOCK tree. */
441 tree *first_block;
442 if (DECL_INITIAL (fn))
443 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
444 else
445 first_block = &DECL_INITIAL (fn);
446 BLOCK_CHAIN (new_block) = *first_block;
447 *first_block = new_block;
449 #endif
450 /* Remember the remapped block. */
451 insert_decl_map (id, old_block, new_block);
454 static void
455 copy_statement_list (tree *tp)
457 tree_stmt_iterator oi, ni;
458 tree new;
460 new = alloc_stmt_list ();
461 ni = tsi_start (new);
462 oi = tsi_start (*tp);
463 *tp = new;
465 for (; !tsi_end_p (oi); tsi_next (&oi))
466 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
469 static void
470 copy_bind_expr (tree *tp, int *walk_subtrees, inline_data *id)
472 tree block = BIND_EXPR_BLOCK (*tp);
473 /* Copy (and replace) the statement. */
474 copy_tree_r (tp, walk_subtrees, NULL);
475 if (block)
477 remap_block (&block, id);
478 BIND_EXPR_BLOCK (*tp) = block;
481 if (BIND_EXPR_VARS (*tp))
482 /* This will remap a lot of the same decls again, but this should be
483 harmless. */
484 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
487 /* Called from copy_body_id via walk_tree. DATA is really an
488 `inline_data *'. */
490 static tree
491 copy_body_r (tree *tp, int *walk_subtrees, void *data)
493 inline_data *id = (inline_data *) data;
494 tree fn = id->callee;
496 /* Begin by recognizing trees that we'll completely rewrite for the
497 inlining context. Our output for these trees is completely
498 different from out input (e.g. RETURN_EXPR is deleted, and morphs
499 into an edge). Further down, we'll handle trees that get
500 duplicated and/or tweaked. */
502 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
503 GOTO_STMT with the RET_LABEL as its target. */
504 if (TREE_CODE (*tp) == RETURN_EXPR && inlining_p (id))
506 tree assignment = TREE_OPERAND (*tp, 0);
508 /* If we're returning something, just turn that into an
509 assignment into the equivalent of the original RESULT_DECL.
510 If the "assignment" is just the result decl, the result
511 decl has already been set (e.g. a recent "foo (&result_decl,
512 ...)"); just toss the entire RETURN_EXPR. */
513 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
515 /* Replace the RETURN_EXPR with (a copy of) the
516 MODIFY_EXPR hanging underneath. */
517 *tp = copy_node (assignment);
519 else /* Else the RETURN_EXPR returns no value. */
521 *tp = NULL;
522 return (void *)1;
526 /* Local variables and labels need to be replaced by equivalent
527 variables. We don't want to copy static variables; there's only
528 one of those, no matter how many times we inline the containing
529 function. Similarly for globals from an outer function. */
530 else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
532 tree new_decl;
534 /* Remap the declaration. */
535 new_decl = remap_decl (*tp, id);
536 gcc_assert (new_decl);
537 /* Replace this variable with the copy. */
538 STRIP_TYPE_NOPS (new_decl);
539 *tp = new_decl;
540 *walk_subtrees = 0;
542 else if (TREE_CODE (*tp) == STATEMENT_LIST)
543 copy_statement_list (tp);
544 else if (TREE_CODE (*tp) == SAVE_EXPR)
545 remap_save_expr (tp, id->decl_map, walk_subtrees);
546 else if (TREE_CODE (*tp) == LABEL_DECL)
547 /* These may need to be remapped for EH handling. */
548 remap_decl (*tp, id);
549 else if (TREE_CODE (*tp) == BIND_EXPR)
550 copy_bind_expr (tp, walk_subtrees, id);
551 /* Types may need remapping as well. */
552 else if (TYPE_P (*tp))
553 *tp = remap_type (*tp, id);
555 /* If this is a constant, we have to copy the node iff the type will be
556 remapped. copy_tree_r will not copy a constant. */
557 else if (CONSTANT_CLASS_P (*tp))
559 tree new_type = remap_type (TREE_TYPE (*tp), id);
561 if (new_type == TREE_TYPE (*tp))
562 *walk_subtrees = 0;
564 else if (TREE_CODE (*tp) == INTEGER_CST)
565 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
566 TREE_INT_CST_HIGH (*tp));
567 else
569 *tp = copy_node (*tp);
570 TREE_TYPE (*tp) = new_type;
574 /* Otherwise, just copy the node. Note that copy_tree_r already
575 knows not to copy VAR_DECLs, etc., so this is safe. */
576 else
578 /* Here we handle trees that are not completely rewritten.
579 First we detect some inlining-induced bogosities for
580 discarding. */
581 if (TREE_CODE (*tp) == MODIFY_EXPR
582 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
583 && (lang_hooks.tree_inlining.auto_var_in_fn_p
584 (TREE_OPERAND (*tp, 0), fn)))
586 /* Some assignments VAR = VAR; don't generate any rtl code
587 and thus don't count as variable modification. Avoid
588 keeping bogosities like 0 = 0. */
589 tree decl = TREE_OPERAND (*tp, 0), value;
590 splay_tree_node n;
592 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
593 if (n)
595 value = (tree) n->value;
596 STRIP_TYPE_NOPS (value);
597 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
599 *tp = build_empty_stmt ();
600 return copy_body_r (tp, walk_subtrees, data);
604 else if (TREE_CODE (*tp) == INDIRECT_REF)
606 /* Get rid of *& from inline substitutions that can happen when a
607 pointer argument is an ADDR_EXPR. */
608 tree decl = TREE_OPERAND (*tp, 0);
609 splay_tree_node n;
611 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
612 if (n)
614 /* If we happen to get an ADDR_EXPR in n->value, strip
615 it manually here as we'll eventually get ADDR_EXPRs
616 which lie about their types pointed to. In this case
617 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
618 but we absolutely rely on that. As fold_indirect_ref
619 does other useful transformations, try that first, though. */
620 tree type = TREE_TYPE (TREE_TYPE ((tree)n->value));
621 *tp = fold_indirect_ref_1 (type, (tree)n->value);
622 if (! *tp)
624 if (TREE_CODE ((tree)n->value) == ADDR_EXPR)
625 *tp = TREE_OPERAND ((tree)n->value, 0);
626 else
627 *tp = build1 (INDIRECT_REF, type, (tree)n->value);
629 *walk_subtrees = 0;
630 return NULL;
634 /* Here is the "usual case". Copy this tree node, and then
635 tweak some special cases. */
636 copy_tree_r (tp, walk_subtrees, NULL);
637 if (id->block
638 && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (*tp))))
639 TREE_BLOCK (*tp) = id->block;
641 if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
642 TREE_OPERAND (*tp, 0) =
643 build_int_cst
644 (NULL_TREE,
645 id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
647 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
649 /* The copied TARGET_EXPR has never been expanded, even if the
650 original node was expanded already. */
651 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
653 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
654 TREE_OPERAND (*tp, 3) = NULL_TREE;
657 /* Variable substitution need not be simple. In particular, the
658 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
659 and friends are up-to-date. */
660 else if (TREE_CODE (*tp) == ADDR_EXPR)
662 walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
663 recompute_tree_invarant_for_addr_expr (*tp);
664 *walk_subtrees = 0;
668 /* Keep iterating. */
669 return NULL_TREE;
672 /* Copy basic block, scale profile accordingly. Edges will be taken care of
673 later */
675 static basic_block
676 copy_bb (inline_data *id, basic_block bb, int frequency_scale, int count_scale)
678 block_stmt_iterator bsi, copy_bsi;
679 basic_block copy_basic_block;
681 /* create_basic_block() will append every new block to
682 basic_block_info automatically. */
683 copy_basic_block = create_basic_block (NULL, (void *) 0, bb->prev_bb->aux);
684 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
685 copy_basic_block->frequency = (bb->frequency
686 * frequency_scale / REG_BR_PROB_BASE);
687 copy_bsi = bsi_start (copy_basic_block);
689 for (bsi = bsi_start (bb);
690 !bsi_end_p (bsi); bsi_next (&bsi))
692 tree stmt = bsi_stmt (bsi);
693 tree orig_stmt = stmt;
695 walk_tree (&stmt, copy_body_r, id, NULL);
697 /* RETURN_EXPR might be removed,
698 this is signalled by making stmt pointer NULL. */
699 if (stmt)
701 tree call, decl;
702 bsi_insert_after (&copy_bsi, stmt, BSI_NEW_STMT);
703 call = get_call_expr_in (stmt);
704 /* We're duplicating a CALL_EXPR. Find any corresponding
705 callgraph edges and update or duplicate them. */
706 if (call && (decl = get_callee_fndecl (call)))
708 if (id->saving_p)
710 struct cgraph_node *node;
711 struct cgraph_edge *edge;
713 /* We're saving a copy of the body, so we'll update the
714 callgraph nodes in place. Note that we avoid
715 altering the original callgraph node; we begin with
716 the first clone. */
717 for (node = id->node->next_clone;
718 node;
719 node = node->next_clone)
721 edge = cgraph_edge (node, orig_stmt);
722 gcc_assert (edge);
723 edge->call_stmt = stmt;
726 else
728 struct cgraph_edge *edge;
730 /* We're cloning or inlining this body; duplicate the
731 associate callgraph nodes. */
732 edge = cgraph_edge (id->current_node, orig_stmt);
733 if (edge)
734 cgraph_clone_edge (edge, id->node, stmt,
735 REG_BR_PROB_BASE, 1);
738 /* If you think we can abort here, you are wrong.
739 There is no region 0 in tree land. */
740 gcc_assert (lookup_stmt_eh_region_fn (id->callee_cfun, orig_stmt)
741 != 0);
743 if (tree_could_throw_p (stmt))
745 int region = lookup_stmt_eh_region_fn (id->callee_cfun, orig_stmt);
746 /* Add an entry for the copied tree in the EH hashtable.
747 When saving or cloning or versioning, use the hashtable in
748 cfun, and just copy the EH number. When inlining, use the
749 hashtable in the caller, and adjust the region number. */
750 if (region > 0)
751 add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
753 /* If this tree doesn't have a region associated with it,
754 and there is a "current region,"
755 then associate this tree with the current region
756 and add edges associated with this region. */
757 if ((lookup_stmt_eh_region_fn (id->callee_cfun,
758 orig_stmt) <= 0
759 && id->eh_region > 0)
760 && tree_could_throw_p (stmt))
761 add_stmt_to_eh_region (stmt, id->eh_region);
765 return copy_basic_block;
768 /* Copy edges from BB into its copy constructed earlier, scale profile
769 accordingly. Edges will be taken care of later. Assume aux
770 pointers to point to the copies of each BB. */
771 static void
772 copy_edges_for_bb (basic_block bb, int count_scale)
774 basic_block new_bb = bb->aux;
775 edge_iterator ei;
776 edge old_edge;
777 block_stmt_iterator bsi;
778 int flags;
780 /* Use the indices from the original blocks to create edges for the
781 new ones. */
782 FOR_EACH_EDGE (old_edge, ei, bb->succs)
783 if (!(old_edge->flags & EDGE_EH))
785 edge new;
787 flags = old_edge->flags;
789 /* Return edges do get a FALLTHRU flag when the get inlined. */
790 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
791 && old_edge->dest->aux != EXIT_BLOCK_PTR)
792 flags |= EDGE_FALLTHRU;
793 new = make_edge (new_bb, old_edge->dest->aux, flags);
794 new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
795 new->probability = old_edge->probability;
798 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
799 return;
801 for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
803 tree copy_stmt;
805 copy_stmt = bsi_stmt (bsi);
806 update_stmt (copy_stmt);
807 /* Do this before the possible split_block. */
808 bsi_next (&bsi);
810 /* If this tree could throw an exception, there are two
811 cases where we need to add abnormal edge(s): the
812 tree wasn't in a region and there is a "current
813 region" in the caller; or the original tree had
814 EH edges. In both cases split the block after the tree,
815 and add abnormal edge(s) as needed; we need both
816 those from the callee and the caller.
817 We check whether the copy can throw, because the const
818 propagation can change an INDIRECT_REF which throws
819 into a COMPONENT_REF which doesn't. If the copy
820 can throw, the original could also throw. */
822 if (tree_can_throw_internal (copy_stmt))
824 if (!bsi_end_p (bsi))
825 /* Note that bb's predecessor edges aren't necessarily
826 right at this point; split_block doesn't care. */
828 edge e = split_block (new_bb, copy_stmt);
829 new_bb = e->dest;
830 bsi = bsi_start (new_bb);
833 make_eh_edges (copy_stmt);
838 /* Wrapper for remap_decl so it can be used as a callback. */
839 static tree
840 remap_decl_1 (tree decl, void *data)
842 return remap_decl (decl, data);
845 /* Make a copy of the body of FN so that it can be inserted inline in
846 another function. Walks FN via CFG, returns new fndecl. */
848 static tree
849 copy_cfg_body (inline_data * id, gcov_type count, int frequency,
850 basic_block entry_block_map, basic_block exit_block_map)
852 tree callee_fndecl = id->callee;
853 /* Original cfun for the callee, doesn't change. */
854 struct function *callee_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
855 /* Copy, built by this function. */
856 struct function *new_cfun;
857 /* Place to copy from; when a copy of the function was saved off earlier,
858 use that instead of the main copy. */
859 struct function *cfun_to_copy =
860 (struct function *) ggc_alloc_cleared (sizeof (struct function));
861 basic_block bb;
862 tree new_fndecl = NULL;
863 bool saving_or_cloning;
864 int count_scale, frequency_scale;
866 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count)
867 count_scale = (REG_BR_PROB_BASE * count
868 / ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count);
869 else
870 count_scale = 1;
872 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency)
873 frequency_scale = (REG_BR_PROB_BASE * frequency
875 ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency);
876 else
877 frequency_scale = count_scale;
879 /* Register specific tree functions. */
880 tree_register_cfg_hooks ();
882 /* Must have a CFG here at this point. */
883 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
884 (DECL_STRUCT_FUNCTION (callee_fndecl)));
886 *cfun_to_copy = *DECL_STRUCT_FUNCTION (callee_fndecl);
888 /* If there is a saved_cfg+saved_args lurking in the
889 struct function, a copy of the callee body was saved there, and
890 the 'struct cgraph edge' nodes have been fudged to point into the
891 saved body. Accordingly, we want to copy that saved body so the
892 callgraph edges will be recognized and cloned properly. */
893 if (cfun_to_copy->saved_cfg)
895 cfun_to_copy->cfg = cfun_to_copy->saved_cfg;
896 cfun_to_copy->eh = cfun_to_copy->saved_eh;
898 id->callee_cfun = cfun_to_copy;
900 /* If saving or cloning a function body, create new basic_block_info
901 and label_to_block_maps. Otherwise, we're duplicating a function
902 body for inlining; insert our new blocks and labels into the
903 existing varrays. */
904 saving_or_cloning = (id->saving_p || id->cloning_p);
905 if (saving_or_cloning)
907 new_cfun =
908 (struct function *) ggc_alloc_cleared (sizeof (struct function));
909 *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
910 new_cfun->cfg = NULL;
911 new_cfun->decl = new_fndecl = copy_node (callee_fndecl);
912 new_cfun->ib_boundaries_block = (varray_type) 0;
913 DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
914 push_cfun (new_cfun);
915 init_empty_tree_cfg ();
917 ENTRY_BLOCK_PTR->count =
918 (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count * count_scale /
919 REG_BR_PROB_BASE);
920 ENTRY_BLOCK_PTR->frequency =
921 (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency *
922 frequency_scale / REG_BR_PROB_BASE);
923 EXIT_BLOCK_PTR->count =
924 (EXIT_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count * count_scale /
925 REG_BR_PROB_BASE);
926 EXIT_BLOCK_PTR->frequency =
927 (EXIT_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency *
928 frequency_scale / REG_BR_PROB_BASE);
930 entry_block_map = ENTRY_BLOCK_PTR;
931 exit_block_map = EXIT_BLOCK_PTR;
934 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
935 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
938 /* Duplicate any exception-handling regions. */
939 if (cfun->eh)
941 if (saving_or_cloning)
942 init_eh_for_function ();
943 id->eh_region_offset = duplicate_eh_regions (cfun_to_copy,
944 remap_decl_1,
945 id, id->eh_region);
946 gcc_assert (inlining_p (id) || !id->eh_region_offset);
948 /* Use aux pointers to map the original blocks to copy. */
949 FOR_EACH_BB_FN (bb, cfun_to_copy)
950 bb->aux = copy_bb (id, bb, frequency_scale, count_scale);
951 /* Now that we've duplicated the blocks, duplicate their edges. */
952 FOR_ALL_BB_FN (bb, cfun_to_copy)
953 copy_edges_for_bb (bb, count_scale);
954 FOR_ALL_BB_FN (bb, cfun_to_copy)
955 bb->aux = NULL;
957 if (saving_or_cloning)
958 pop_cfun ();
960 return new_fndecl;
963 /* Make a copy of the body of FN so that it can be inserted inline in
964 another function. */
966 static tree
967 copy_generic_body (inline_data *id)
969 tree body;
970 tree fndecl = id->callee;
972 body = DECL_SAVED_TREE (fndecl);
973 walk_tree (&body, copy_body_r, id, NULL);
975 return body;
978 static tree
979 copy_body (inline_data *id, gcov_type count, int frequency,
980 basic_block entry_block_map, basic_block exit_block_map)
982 tree fndecl = id->callee;
983 tree body;
985 /* If this body has a CFG, walk CFG and copy. */
986 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
987 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
989 return body;
992 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
993 defined in function FN, or of a data member thereof. */
995 static bool
996 self_inlining_addr_expr (tree value, tree fn)
998 tree var;
1000 if (TREE_CODE (value) != ADDR_EXPR)
1001 return false;
1003 var = get_base_address (TREE_OPERAND (value, 0));
1005 return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
1008 static void
1009 setup_one_parameter (inline_data *id, tree p, tree value, tree fn,
1010 basic_block bb, tree *vars)
1012 tree init_stmt;
1013 tree var;
1014 tree var_sub;
1016 /* If the parameter is never assigned to, we may not need to
1017 create a new variable here at all. Instead, we may be able
1018 to just use the argument value. */
1019 if (TREE_READONLY (p)
1020 && !TREE_ADDRESSABLE (p)
1021 && value && !TREE_SIDE_EFFECTS (value))
1023 /* We may produce non-gimple trees by adding NOPs or introduce
1024 invalid sharing when operand is not really constant.
1025 It is not big deal to prohibit constant propagation here as
1026 we will constant propagate in DOM1 pass anyway. */
1027 if (is_gimple_min_invariant (value)
1028 && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
1029 /* We have to be very careful about ADDR_EXPR. Make sure
1030 the base variable isn't a local variable of the inlined
1031 function, e.g., when doing recursive inlining, direct or
1032 mutually-recursive or whatever, which is why we don't
1033 just test whether fn == current_function_decl. */
1034 && ! self_inlining_addr_expr (value, fn))
1036 insert_decl_map (id, p, value);
1037 return;
1041 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
1042 here since the type of this decl must be visible to the calling
1043 function. */
1044 var = copy_decl_for_inlining (p, fn, id->caller);
1046 /* See if the frontend wants to pass this by invisible reference. If
1047 so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
1048 replace uses of the PARM_DECL with dereferences. */
1049 if (TREE_TYPE (var) != TREE_TYPE (p)
1050 && POINTER_TYPE_P (TREE_TYPE (var))
1051 && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
1053 insert_decl_map (id, var, var);
1054 var_sub = build_fold_indirect_ref (var);
1056 else
1057 var_sub = var;
1059 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
1060 that way, when the PARM_DECL is encountered, it will be
1061 automatically replaced by the VAR_DECL. */
1062 insert_decl_map (id, p, var_sub);
1064 /* Declare this new variable. */
1065 TREE_CHAIN (var) = *vars;
1066 *vars = var;
1068 /* Make gimplifier happy about this variable. */
1069 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1071 /* Even if P was TREE_READONLY, the new VAR should not be.
1072 In the original code, we would have constructed a
1073 temporary, and then the function body would have never
1074 changed the value of P. However, now, we will be
1075 constructing VAR directly. The constructor body may
1076 change its value multiple times as it is being
1077 constructed. Therefore, it must not be TREE_READONLY;
1078 the back-end assumes that TREE_READONLY variable is
1079 assigned to only once. */
1080 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
1081 TREE_READONLY (var) = 0;
1083 /* Initialize this VAR_DECL from the equivalent argument. Convert
1084 the argument to the proper type in case it was promoted. */
1085 if (value)
1087 tree rhs = fold_convert (TREE_TYPE (var), value);
1088 block_stmt_iterator bsi = bsi_last (bb);
1090 if (rhs == error_mark_node)
1091 return;
1093 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
1094 keep our trees in gimple form. */
1095 init_stmt = build (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
1097 /* If we did not create a gimple value and we did not create a gimple
1098 cast of a gimple value, then we will need to gimplify INIT_STMTS
1099 at the end. Note that is_gimple_cast only checks the outer
1100 tree code, not its operand. Thus the explicit check that its
1101 operand is a gimple value. */
1102 if (!is_gimple_val (rhs)
1103 && (!is_gimple_cast (rhs)
1104 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
1105 gimplify_stmt (&init_stmt);
1106 bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
1110 /* Generate code to initialize the parameters of the function at the
1111 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
1113 static void
1114 initialize_inlined_parameters (inline_data *id, tree args, tree static_chain,
1115 tree fn, basic_block bb)
1117 tree parms;
1118 tree a;
1119 tree p;
1120 tree vars = NULL_TREE;
1121 int argnum = 0;
1123 /* Figure out what the parameters are. */
1124 parms = DECL_ARGUMENTS (fn);
1125 if (fn == current_function_decl)
1126 parms = cfun->saved_args;
1128 /* Loop through the parameter declarations, replacing each with an
1129 equivalent VAR_DECL, appropriately initialized. */
1130 for (p = parms, a = args; p;
1131 a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
1133 tree value;
1135 ++argnum;
1137 /* Find the initializer. */
1138 value = lang_hooks.tree_inlining.convert_parm_for_inlining
1139 (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
1141 setup_one_parameter (id, p, value, fn, bb, &vars);
1144 /* Initialize the static chain. */
1145 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1146 if (fn == current_function_decl)
1147 p = DECL_STRUCT_FUNCTION (fn)->saved_static_chain_decl;
1148 if (p)
1150 /* No static chain? Seems like a bug in tree-nested.c. */
1151 gcc_assert (static_chain);
1153 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
1156 declare_inline_vars (id->block, vars);
1159 /* Declare a return variable to replace the RESULT_DECL for the
1160 function we are calling. An appropriate DECL_STMT is returned.
1161 The USE_STMT is filled to contain a use of the declaration to
1162 indicate the return value of the function.
1164 RETURN_SLOT_ADDR, if non-null, was a fake parameter that
1165 took the address of the result. MODIFY_DEST, if non-null, was the LHS of
1166 the MODIFY_EXPR to which this call is the RHS.
1168 The return value is a (possibly null) value that is the result of the
1169 function as seen by the callee. *USE_P is a (possibly null) value that
1170 holds the result as seen by the caller. */
1172 static tree
1173 declare_return_variable (inline_data *id, tree return_slot_addr,
1174 tree modify_dest, tree *use_p)
1176 tree callee = id->callee;
1177 tree caller = id->caller;
1178 tree result = DECL_RESULT (callee);
1179 tree callee_type = TREE_TYPE (result);
1180 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
1181 tree var, use;
1183 /* We don't need to do anything for functions that don't return
1184 anything. */
1185 if (!result || VOID_TYPE_P (callee_type))
1187 *use_p = NULL_TREE;
1188 return NULL_TREE;
1191 /* If there was a return slot, then the return value is the
1192 dereferenced address of that object. */
1193 if (return_slot_addr)
1195 /* The front end shouldn't have used both return_slot_addr and
1196 a modify expression. */
1197 gcc_assert (!modify_dest);
1198 if (DECL_BY_REFERENCE (result))
1199 var = return_slot_addr;
1200 else
1201 var = build_fold_indirect_ref (return_slot_addr);
1202 use = NULL;
1203 goto done;
1206 /* All types requiring non-trivial constructors should have been handled. */
1207 gcc_assert (!TREE_ADDRESSABLE (callee_type));
1209 /* Attempt to avoid creating a new temporary variable. */
1210 if (modify_dest)
1212 bool use_it = false;
1214 /* We can't use MODIFY_DEST if there's type promotion involved. */
1215 if (!lang_hooks.types_compatible_p (caller_type, callee_type))
1216 use_it = false;
1218 /* ??? If we're assigning to a variable sized type, then we must
1219 reuse the destination variable, because we've no good way to
1220 create variable sized temporaries at this point. */
1221 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
1222 use_it = true;
1224 /* If the callee cannot possibly modify MODIFY_DEST, then we can
1225 reuse it as the result of the call directly. Don't do this if
1226 it would promote MODIFY_DEST to addressable. */
1227 else if (!TREE_STATIC (modify_dest)
1228 && !TREE_ADDRESSABLE (modify_dest)
1229 && !TREE_ADDRESSABLE (result))
1230 use_it = true;
1232 if (use_it)
1234 var = modify_dest;
1235 use = NULL;
1236 goto done;
1240 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
1242 var = copy_decl_for_inlining (result, callee, caller);
1244 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1245 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
1246 = tree_cons (NULL_TREE, var,
1247 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
1249 /* Do not have the rest of GCC warn about this variable as it should
1250 not be visible to the user. */
1251 TREE_NO_WARNING (var) = 1;
1253 /* Build the use expr. If the return type of the function was
1254 promoted, convert it back to the expected type. */
1255 use = var;
1256 if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
1257 use = fold_convert (caller_type, var);
1259 done:
1260 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
1261 way, when the RESULT_DECL is encountered, it will be
1262 automatically replaced by the VAR_DECL. */
1263 insert_decl_map (id, result, var);
1265 /* Remember this so we can ignore it in remap_decls. */
1266 id->retvar = var;
1268 *use_p = use;
1269 return var;
1272 /* Returns nonzero if a function can be inlined as a tree. */
1274 bool
1275 tree_inlinable_function_p (tree fn)
1277 return inlinable_function_p (fn);
1280 static const char *inline_forbidden_reason;
1282 static tree
1283 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
1284 void *fnp)
1286 tree node = *nodep;
1287 tree fn = (tree) fnp;
1288 tree t;
1290 switch (TREE_CODE (node))
1292 case CALL_EXPR:
1293 /* Refuse to inline alloca call unless user explicitly forced so as
1294 this may change program's memory overhead drastically when the
1295 function using alloca is called in loop. In GCC present in
1296 SPEC2000 inlining into schedule_block cause it to require 2GB of
1297 RAM instead of 256MB. */
1298 if (alloca_call_p (node)
1299 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1301 inline_forbidden_reason
1302 = N_("%Jfunction %qF can never be inlined because it uses "
1303 "alloca (override using the always_inline attribute)");
1304 return node;
1306 t = get_callee_fndecl (node);
1307 if (! t)
1308 break;
1310 /* We cannot inline functions that call setjmp. */
1311 if (setjmp_call_p (t))
1313 inline_forbidden_reason
1314 = N_("%Jfunction %qF can never be inlined because it uses setjmp");
1315 return node;
1318 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
1319 switch (DECL_FUNCTION_CODE (t))
1321 /* We cannot inline functions that take a variable number of
1322 arguments. */
1323 case BUILT_IN_VA_START:
1324 case BUILT_IN_STDARG_START:
1325 case BUILT_IN_NEXT_ARG:
1326 case BUILT_IN_VA_END:
1327 inline_forbidden_reason
1328 = N_("%Jfunction %qF can never be inlined because it "
1329 "uses variable argument lists");
1330 return node;
1332 case BUILT_IN_LONGJMP:
1333 /* We can't inline functions that call __builtin_longjmp at
1334 all. The non-local goto machinery really requires the
1335 destination be in a different function. If we allow the
1336 function calling __builtin_longjmp to be inlined into the
1337 function calling __builtin_setjmp, Things will Go Awry. */
1338 inline_forbidden_reason
1339 = N_("%Jfunction %qF can never be inlined because "
1340 "it uses setjmp-longjmp exception handling");
1341 return node;
1343 case BUILT_IN_NONLOCAL_GOTO:
1344 /* Similarly. */
1345 inline_forbidden_reason
1346 = N_("%Jfunction %qF can never be inlined because "
1347 "it uses non-local goto");
1348 return node;
1350 case BUILT_IN_RETURN:
1351 case BUILT_IN_APPLY_ARGS:
1352 /* If a __builtin_apply_args caller would be inlined,
1353 it would be saving arguments of the function it has
1354 been inlined into. Similarly __builtin_return would
1355 return from the function the inline has been inlined into. */
1356 inline_forbidden_reason
1357 = N_("%Jfunction %qF can never be inlined because "
1358 "it uses __builtin_return or __builtin_apply_args");
1359 return node;
1361 default:
1362 break;
1364 break;
1366 case GOTO_EXPR:
1367 t = TREE_OPERAND (node, 0);
1369 /* We will not inline a function which uses computed goto. The
1370 addresses of its local labels, which may be tucked into
1371 global storage, are of course not constant across
1372 instantiations, which causes unexpected behavior. */
1373 if (TREE_CODE (t) != LABEL_DECL)
1375 inline_forbidden_reason
1376 = N_("%Jfunction %qF can never be inlined "
1377 "because it contains a computed goto");
1378 return node;
1380 break;
1382 case LABEL_EXPR:
1383 t = TREE_OPERAND (node, 0);
1384 if (DECL_NONLOCAL (t))
1386 /* We cannot inline a function that receives a non-local goto
1387 because we cannot remap the destination label used in the
1388 function that is performing the non-local goto. */
1389 inline_forbidden_reason
1390 = N_("%Jfunction %qF can never be inlined "
1391 "because it receives a non-local goto");
1392 return node;
1394 break;
1396 case RECORD_TYPE:
1397 case UNION_TYPE:
1398 /* We cannot inline a function of the form
1400 void F (int i) { struct S { int ar[i]; } s; }
1402 Attempting to do so produces a catch-22.
1403 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1404 UNION_TYPE nodes, then it goes into infinite recursion on a
1405 structure containing a pointer to its own type. If it doesn't,
1406 then the type node for S doesn't get adjusted properly when
1407 F is inlined.
1409 ??? This is likely no longer true, but it's too late in the 4.0
1410 cycle to try to find out. This should be checked for 4.1. */
1411 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1412 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1414 inline_forbidden_reason
1415 = N_("%Jfunction %qF can never be inlined "
1416 "because it uses variable sized variables");
1417 return node;
1420 default:
1421 break;
1424 return NULL_TREE;
1427 /* Return subexpression representing possible alloca call, if any. */
1428 static tree
1429 inline_forbidden_p (tree fndecl)
1431 location_t saved_loc = input_location;
1432 block_stmt_iterator bsi;
1433 basic_block bb;
1434 tree ret = NULL_TREE;
1436 FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (fndecl))
1437 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1439 ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
1440 inline_forbidden_p_1, fndecl);
1441 if (ret)
1442 goto egress;
1445 egress:
1446 input_location = saved_loc;
1447 return ret;
1450 /* Returns nonzero if FN is a function that does not have any
1451 fundamental inline blocking properties. */
1453 static bool
1454 inlinable_function_p (tree fn)
1456 bool inlinable = true;
1458 /* If we've already decided this function shouldn't be inlined,
1459 there's no need to check again. */
1460 if (DECL_UNINLINABLE (fn))
1461 return false;
1463 /* See if there is any language-specific reason it cannot be
1464 inlined. (It is important that this hook be called early because
1465 in C++ it may result in template instantiation.)
1466 If the function is not inlinable for language-specific reasons,
1467 it is left up to the langhook to explain why. */
1468 inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1470 /* If we don't have the function body available, we can't inline it.
1471 However, this should not be recorded since we also get here for
1472 forward declared inline functions. Therefore, return at once. */
1473 if (!DECL_SAVED_TREE (fn))
1474 return false;
1476 /* If we're not inlining at all, then we cannot inline this function. */
1477 else if (!flag_inline_trees)
1478 inlinable = false;
1480 /* Only try to inline functions if DECL_INLINE is set. This should be
1481 true for all functions declared `inline', and for all other functions
1482 as well with -finline-functions.
1484 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1485 it's the front-end that must set DECL_INLINE in this case, because
1486 dwarf2out loses if a function that does not have DECL_INLINE set is
1487 inlined anyway. That is why we have both DECL_INLINE and
1488 DECL_DECLARED_INLINE_P. */
1489 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1490 here should be redundant. */
1491 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1492 inlinable = false;
1494 else if (inline_forbidden_p (fn))
1496 /* See if we should warn about uninlinable functions. Previously,
1497 some of these warnings would be issued while trying to expand
1498 the function inline, but that would cause multiple warnings
1499 about functions that would for example call alloca. But since
1500 this a property of the function, just one warning is enough.
1501 As a bonus we can now give more details about the reason why a
1502 function is not inlinable.
1503 We only warn for functions declared `inline' by the user. */
1504 bool do_warning = (warn_inline
1505 && DECL_INLINE (fn)
1506 && DECL_DECLARED_INLINE_P (fn)
1507 && !DECL_IN_SYSTEM_HEADER (fn));
1509 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1510 sorry (inline_forbidden_reason, fn, fn);
1511 else if (do_warning)
1512 warning (0, inline_forbidden_reason, fn, fn);
1514 inlinable = false;
1517 /* Squirrel away the result so that we don't have to check again. */
1518 DECL_UNINLINABLE (fn) = !inlinable;
1520 return inlinable;
1523 /* Estimate the cost of a memory move. Use machine dependent
1524 word size and take possible memcpy call into account. */
1527 estimate_move_cost (tree type)
1529 HOST_WIDE_INT size;
1531 size = int_size_in_bytes (type);
1533 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1534 /* Cost of a memcpy call, 3 arguments and the call. */
1535 return 4;
1536 else
1537 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1540 /* Used by estimate_num_insns. Estimate number of instructions seen
1541 by given statement. */
1543 static tree
1544 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1546 int *count = data;
1547 tree x = *tp;
1549 if (IS_TYPE_OR_DECL_P (x))
1551 *walk_subtrees = 0;
1552 return NULL;
1554 /* Assume that constants and references counts nothing. These should
1555 be majorized by amount of operations among them we count later
1556 and are common target of CSE and similar optimizations. */
1557 else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
1558 return NULL;
1560 switch (TREE_CODE (x))
1562 /* Containers have no cost. */
1563 case TREE_LIST:
1564 case TREE_VEC:
1565 case BLOCK:
1566 case COMPONENT_REF:
1567 case BIT_FIELD_REF:
1568 case INDIRECT_REF:
1569 case ALIGN_INDIRECT_REF:
1570 case MISALIGNED_INDIRECT_REF:
1571 case ARRAY_REF:
1572 case ARRAY_RANGE_REF:
1573 case OBJ_TYPE_REF:
1574 case EXC_PTR_EXPR: /* ??? */
1575 case FILTER_EXPR: /* ??? */
1576 case COMPOUND_EXPR:
1577 case BIND_EXPR:
1578 case WITH_CLEANUP_EXPR:
1579 case NOP_EXPR:
1580 case VIEW_CONVERT_EXPR:
1581 case SAVE_EXPR:
1582 case ADDR_EXPR:
1583 case COMPLEX_EXPR:
1584 case RANGE_EXPR:
1585 case CASE_LABEL_EXPR:
1586 case SSA_NAME:
1587 case CATCH_EXPR:
1588 case EH_FILTER_EXPR:
1589 case STATEMENT_LIST:
1590 case ERROR_MARK:
1591 case NON_LVALUE_EXPR:
1592 case FDESC_EXPR:
1593 case VA_ARG_EXPR:
1594 case TRY_CATCH_EXPR:
1595 case TRY_FINALLY_EXPR:
1596 case LABEL_EXPR:
1597 case GOTO_EXPR:
1598 case RETURN_EXPR:
1599 case EXIT_EXPR:
1600 case LOOP_EXPR:
1601 case PHI_NODE:
1602 case WITH_SIZE_EXPR:
1603 break;
1605 /* We don't account constants for now. Assume that the cost is amortized
1606 by operations that do use them. We may re-consider this decision once
1607 we are able to optimize the tree before estimating its size and break
1608 out static initializers. */
1609 case IDENTIFIER_NODE:
1610 case INTEGER_CST:
1611 case REAL_CST:
1612 case COMPLEX_CST:
1613 case VECTOR_CST:
1614 case STRING_CST:
1615 *walk_subtrees = 0;
1616 return NULL;
1618 /* Try to estimate the cost of assignments. We have three cases to
1619 deal with:
1620 1) Simple assignments to registers;
1621 2) Stores to things that must live in memory. This includes
1622 "normal" stores to scalars, but also assignments of large
1623 structures, or constructors of big arrays;
1624 3) TARGET_EXPRs.
1626 Let us look at the first two cases, assuming we have "a = b + C":
1627 <modify_expr <var_decl "a"> <plus_expr <var_decl "b"> <constant C>>
1628 If "a" is a GIMPLE register, the assignment to it is free on almost
1629 any target, because "a" usually ends up in a real register. Hence
1630 the only cost of this expression comes from the PLUS_EXPR, and we
1631 can ignore the MODIFY_EXPR.
1632 If "a" is not a GIMPLE register, the assignment to "a" will most
1633 likely be a real store, so the cost of the MODIFY_EXPR is the cost
1634 of moving something into "a", which we compute using the function
1635 estimate_move_cost.
1637 The third case deals with TARGET_EXPRs, for which the semantics are
1638 that a temporary is assigned, unless the TARGET_EXPR itself is being
1639 assigned to something else. In the latter case we do not need the
1640 temporary. E.g. in <modify_expr <var_decl "a"> <target_expr>>, the
1641 MODIFY_EXPR is free. */
1642 case INIT_EXPR:
1643 case MODIFY_EXPR:
1644 /* Is the right and side a TARGET_EXPR? */
1645 if (TREE_CODE (TREE_OPERAND (x, 1)) == TARGET_EXPR)
1646 break;
1647 /* ... fall through ... */
1649 case TARGET_EXPR:
1650 x = TREE_OPERAND (x, 0);
1651 /* Is this an assignments to a register? */
1652 if (is_gimple_reg (x))
1653 break;
1654 /* Otherwise it's a store, so fall through to compute the move cost. */
1656 case CONSTRUCTOR:
1657 *count += estimate_move_cost (TREE_TYPE (x));
1658 break;
1660 /* Assign cost of 1 to usual operations.
1661 ??? We may consider mapping RTL costs to this. */
1662 case COND_EXPR:
1663 case VEC_COND_EXPR:
1665 case PLUS_EXPR:
1666 case MINUS_EXPR:
1667 case MULT_EXPR:
1669 case FIX_TRUNC_EXPR:
1670 case FIX_CEIL_EXPR:
1671 case FIX_FLOOR_EXPR:
1672 case FIX_ROUND_EXPR:
1674 case NEGATE_EXPR:
1675 case FLOAT_EXPR:
1676 case MIN_EXPR:
1677 case MAX_EXPR:
1678 case ABS_EXPR:
1680 case LSHIFT_EXPR:
1681 case RSHIFT_EXPR:
1682 case LROTATE_EXPR:
1683 case RROTATE_EXPR:
1685 case BIT_IOR_EXPR:
1686 case BIT_XOR_EXPR:
1687 case BIT_AND_EXPR:
1688 case BIT_NOT_EXPR:
1690 case TRUTH_ANDIF_EXPR:
1691 case TRUTH_ORIF_EXPR:
1692 case TRUTH_AND_EXPR:
1693 case TRUTH_OR_EXPR:
1694 case TRUTH_XOR_EXPR:
1695 case TRUTH_NOT_EXPR:
1697 case LT_EXPR:
1698 case LE_EXPR:
1699 case GT_EXPR:
1700 case GE_EXPR:
1701 case EQ_EXPR:
1702 case NE_EXPR:
1703 case ORDERED_EXPR:
1704 case UNORDERED_EXPR:
1706 case UNLT_EXPR:
1707 case UNLE_EXPR:
1708 case UNGT_EXPR:
1709 case UNGE_EXPR:
1710 case UNEQ_EXPR:
1711 case LTGT_EXPR:
1713 case CONVERT_EXPR:
1715 case CONJ_EXPR:
1717 case PREDECREMENT_EXPR:
1718 case PREINCREMENT_EXPR:
1719 case POSTDECREMENT_EXPR:
1720 case POSTINCREMENT_EXPR:
1722 case SWITCH_EXPR:
1724 case ASM_EXPR:
1726 case REALIGN_LOAD_EXPR:
1728 case RESX_EXPR:
1729 *count += 1;
1730 break;
1732 /* Few special cases of expensive operations. This is useful
1733 to avoid inlining on functions having too many of these. */
1734 case TRUNC_DIV_EXPR:
1735 case CEIL_DIV_EXPR:
1736 case FLOOR_DIV_EXPR:
1737 case ROUND_DIV_EXPR:
1738 case EXACT_DIV_EXPR:
1739 case TRUNC_MOD_EXPR:
1740 case CEIL_MOD_EXPR:
1741 case FLOOR_MOD_EXPR:
1742 case ROUND_MOD_EXPR:
1743 case RDIV_EXPR:
1744 *count += 10;
1745 break;
1746 case CALL_EXPR:
1748 tree decl = get_callee_fndecl (x);
1749 tree arg;
1751 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1752 switch (DECL_FUNCTION_CODE (decl))
1754 case BUILT_IN_CONSTANT_P:
1755 *walk_subtrees = 0;
1756 return NULL_TREE;
1757 case BUILT_IN_EXPECT:
1758 return NULL_TREE;
1759 default:
1760 break;
1763 /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
1764 that does use function declaration to figure out the arguments. */
1765 if (!decl)
1767 for (arg = TREE_OPERAND (x, 1); arg; arg = TREE_CHAIN (arg))
1768 *count += estimate_move_cost (TREE_TYPE (TREE_VALUE (arg)));
1770 else
1772 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1773 *count += estimate_move_cost (TREE_TYPE (arg));
1776 *count += PARAM_VALUE (PARAM_INLINE_CALL_COST);
1777 break;
1779 default:
1780 gcc_unreachable ();
1782 return NULL;
1785 /* Estimate number of instructions that will be created by expanding EXPR. */
1788 estimate_num_insns (tree expr)
1790 int num = 0;
1791 struct pointer_set_t *visited_nodes;
1792 basic_block bb;
1793 block_stmt_iterator bsi;
1794 struct function *my_function;
1796 /* If we're given an entire function, walk the CFG. */
1797 if (TREE_CODE (expr) == FUNCTION_DECL)
1799 my_function = DECL_STRUCT_FUNCTION (expr);
1800 gcc_assert (my_function && my_function->cfg);
1801 visited_nodes = pointer_set_create ();
1802 FOR_EACH_BB_FN (bb, my_function)
1804 for (bsi = bsi_start (bb);
1805 !bsi_end_p (bsi);
1806 bsi_next (&bsi))
1808 walk_tree (bsi_stmt_ptr (bsi), estimate_num_insns_1,
1809 &num, visited_nodes);
1812 pointer_set_destroy (visited_nodes);
1814 else
1815 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
1817 return num;
1820 typedef struct function *function_p;
1822 DEF_VEC_P(function_p);
1823 DEF_VEC_ALLOC_P(function_p,heap);
1825 /* Initialized with NOGC, making this poisonous to the garbage collector. */
1826 static VEC(function_p,heap) *cfun_stack;
1828 void
1829 push_cfun (struct function *new_cfun)
1831 VEC_safe_push (function_p, heap, cfun_stack, cfun);
1832 cfun = new_cfun;
1835 void
1836 pop_cfun (void)
1838 cfun = VEC_pop (function_p, cfun_stack);
1841 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
1842 static void
1843 add_lexical_block (tree current_block, tree new_block)
1845 tree *blk_p;
1847 /* Walk to the last sub-block. */
1848 for (blk_p = &BLOCK_SUBBLOCKS (current_block);
1849 *blk_p;
1850 blk_p = &TREE_CHAIN (*blk_p))
1852 *blk_p = new_block;
1853 BLOCK_SUPERCONTEXT (new_block) = current_block;
1854 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
1857 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
1859 static bool
1860 expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
1862 inline_data *id;
1863 tree t;
1864 tree use_retvar;
1865 tree fn;
1866 splay_tree st;
1867 tree args;
1868 tree return_slot_addr;
1869 tree modify_dest;
1870 location_t saved_location;
1871 struct cgraph_edge *cg_edge;
1872 const char *reason;
1873 basic_block return_block;
1874 edge e;
1875 block_stmt_iterator bsi, stmt_bsi;
1876 bool successfully_inlined = FALSE;
1877 tree t_step;
1878 tree var;
1879 struct cgraph_node *old_node;
1880 tree decl;
1882 /* See what we've got. */
1883 id = (inline_data *) data;
1884 t = *tp;
1886 /* Set input_location here so we get the right instantiation context
1887 if we call instantiate_decl from inlinable_function_p. */
1888 saved_location = input_location;
1889 if (EXPR_HAS_LOCATION (t))
1890 input_location = EXPR_LOCATION (t);
1892 /* From here on, we're only interested in CALL_EXPRs. */
1893 if (TREE_CODE (t) != CALL_EXPR)
1894 goto egress;
1896 /* First, see if we can figure out what function is being called.
1897 If we cannot, then there is no hope of inlining the function. */
1898 fn = get_callee_fndecl (t);
1899 if (!fn)
1900 goto egress;
1902 /* Turn forward declarations into real ones. */
1903 fn = cgraph_node (fn)->decl;
1905 /* If fn is a declaration of a function in a nested scope that was
1906 globally declared inline, we don't set its DECL_INITIAL.
1907 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1908 C++ front-end uses it for cdtors to refer to their internal
1909 declarations, that are not real functions. Fortunately those
1910 don't have trees to be saved, so we can tell by checking their
1911 DECL_SAVED_TREE. */
1912 if (! DECL_INITIAL (fn)
1913 && DECL_ABSTRACT_ORIGIN (fn)
1914 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
1915 fn = DECL_ABSTRACT_ORIGIN (fn);
1917 /* Objective C and fortran still calls tree_rest_of_compilation directly.
1918 Kill this check once this is fixed. */
1919 if (!id->current_node->analyzed)
1920 goto egress;
1922 cg_edge = cgraph_edge (id->current_node, stmt);
1924 /* Constant propagation on argument done during previous inlining
1925 may create new direct call. Produce an edge for it. */
1926 if (!cg_edge)
1928 struct cgraph_node *dest = cgraph_node (fn);
1930 /* We have missing edge in the callgraph. This can happen in one case
1931 where previous inlining turned indirect call into direct call by
1932 constant propagating arguments. In all other cases we hit a bug
1933 (incorrect node sharing is most common reason for missing edges. */
1934 gcc_assert (dest->needed || !flag_unit_at_a_time);
1935 cgraph_create_edge (id->node, dest, stmt,
1936 bb->count, bb->loop_depth)->inline_failed
1937 = N_("originally indirect function call not considered for inlining");
1938 goto egress;
1941 /* Don't try to inline functions that are not well-suited to
1942 inlining. */
1943 if (!cgraph_inline_p (cg_edge, &reason))
1945 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1947 sorry ("%Jinlining failed in call to %qF: %s", fn, fn, reason);
1948 sorry ("called from here");
1950 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
1951 && !DECL_IN_SYSTEM_HEADER (fn)
1952 && strlen (reason)
1953 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn)))
1955 warning (0, "%Jinlining failed in call to %qF: %s", fn, fn, reason);
1956 warning (0, "called from here");
1958 goto egress;
1961 #ifdef ENABLE_CHECKING
1962 if (cg_edge->callee->decl != id->node->decl)
1963 verify_cgraph_node (cg_edge->callee);
1964 #endif
1966 /* We will be inlining this callee. */
1968 id->eh_region = lookup_stmt_eh_region (stmt);
1970 /* Split the block holding the CALL_EXPR. */
1972 e = split_block (bb, stmt);
1973 bb = e->src;
1974 return_block = e->dest;
1975 remove_edge (e);
1977 /* split_block splits before the statement, work around this by moving
1978 the call into the first half_bb. Not pretty, but seems easier than
1979 doing the CFG manipulation by hand when the CALL_EXPR is in the last
1980 statement in BB. */
1981 stmt_bsi = bsi_last (bb);
1982 bsi = bsi_start (return_block);
1983 if (!bsi_end_p (bsi))
1984 bsi_move_before (&stmt_bsi, &bsi);
1985 else
1987 tree stmt = bsi_stmt (stmt_bsi);
1988 bsi_remove (&stmt_bsi);
1989 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
1991 stmt_bsi = bsi_start (return_block);
1993 /* Build a block containing code to initialize the arguments, the
1994 actual inline expansion of the body, and a label for the return
1995 statements within the function to jump to. The type of the
1996 statement expression is the return type of the function call. */
1997 id->block = make_node (BLOCK);
1998 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
1999 add_lexical_block (TREE_BLOCK (stmt), id->block);
2002 /* Local declarations will be replaced by their equivalents in this
2003 map. */
2004 st = id->decl_map;
2005 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
2006 NULL, NULL);
2008 /* Initialize the parameters. */
2009 args = TREE_OPERAND (t, 1);
2010 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (t))
2012 return_slot_addr = TREE_VALUE (args);
2013 args = TREE_CHAIN (args);
2015 else
2016 return_slot_addr = NULL_TREE;
2018 initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2), fn, bb);
2020 /* Record the function we are about to inline. */
2021 id->callee = fn;
2023 /* Return statements in the function body will be replaced by jumps
2024 to the RET_LABEL. */
2026 gcc_assert (DECL_INITIAL (fn));
2027 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
2029 /* Find the lhs to which the result of this call is assigned. */
2030 modify_dest = stmt;
2031 if (TREE_CODE (modify_dest) == MODIFY_EXPR)
2033 modify_dest = TREE_OPERAND (modify_dest, 0);
2035 /* The function which we are inlining might not return a value,
2036 in which case we should issue a warning that the function
2037 does not return a value. In that case the optimizers will
2038 see that the variable to which the value is assigned was not
2039 initialized. We do not want to issue a warning about that
2040 uninitialized variable. */
2041 if (DECL_P (modify_dest))
2042 TREE_NO_WARNING (modify_dest) = 1;
2044 else
2045 modify_dest = NULL;
2047 /* Declare the return variable for the function. */
2048 decl = declare_return_variable (id, return_slot_addr,
2049 modify_dest, &use_retvar);
2050 /* Do this only if declare_return_variable created a new one. */
2051 if (decl && !return_slot_addr && decl != modify_dest)
2052 declare_inline_vars (id->block, decl);
2054 /* After we've initialized the parameters, we insert the body of the
2055 function itself. */
2056 old_node = id->current_node;
2058 /* Anoint the callee-to-be-duplicated as the "current_node." When
2059 CALL_EXPRs within callee are duplicated, the edges from callee to
2060 callee's callees (caller's grandchildren) will be cloned. */
2061 id->current_node = cg_edge->callee;
2063 /* This is it. Duplicate the callee body. Assume callee is
2064 pre-gimplified. Note that we must not alter the caller
2065 function in any way before this point, as this CALL_EXPR may be
2066 a self-referential call; if we're calling ourselves, we need to
2067 duplicate our body before altering anything. */
2068 copy_body (id, bb->count, bb->frequency, bb, return_block);
2069 id->current_node = old_node;
2071 /* Clean up. */
2072 splay_tree_delete (id->decl_map);
2073 id->decl_map = st;
2075 /* If the inlined function returns a result that we care about,
2076 clobber the CALL_EXPR with a reference to the return variable. */
2077 if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
2079 *tp = use_retvar;
2080 maybe_clean_or_replace_eh_stmt (stmt, stmt);
2082 else
2083 /* We're modifying a TSI owned by gimple_expand_calls_inline();
2084 tsi_delink() will leave the iterator in a sane state. */
2085 bsi_remove (&stmt_bsi);
2087 bsi_next (&bsi);
2088 if (bsi_end_p (bsi))
2089 tree_purge_dead_eh_edges (return_block);
2091 /* If the value of the new expression is ignored, that's OK. We
2092 don't warn about this for CALL_EXPRs, so we shouldn't warn about
2093 the equivalent inlined version either. */
2094 TREE_USED (*tp) = 1;
2096 /* Output the inlining info for this abstract function, since it has been
2097 inlined. If we don't do this now, we can lose the information about the
2098 variables in the function when the blocks get blown away as soon as we
2099 remove the cgraph node. */
2100 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
2102 /* Update callgraph if needed. */
2103 cgraph_remove_node (cg_edge->callee);
2105 /* Declare the 'auto' variables added with this inlined body. */
2106 record_vars (BLOCK_VARS (id->block));
2107 id->block = NULL_TREE;
2109 /* Add local static vars in this inlined callee to caller. */
2110 for (t_step = id->callee_cfun->unexpanded_var_list;
2111 t_step;
2112 t_step = TREE_CHAIN (t_step))
2114 var = TREE_VALUE (t_step);
2115 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2116 record_vars (var);
2118 successfully_inlined = TRUE;
2120 egress:
2121 input_location = saved_location;
2122 return successfully_inlined;
2125 /* Expand call statements reachable from STMT_P.
2126 We can only have CALL_EXPRs as the "toplevel" tree code or nested
2127 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
2128 unfortunately not use that function here because we need a pointer
2129 to the CALL_EXPR, not the tree itself. */
2131 static bool
2132 gimple_expand_calls_inline (basic_block bb, inline_data *id)
2134 block_stmt_iterator bsi;
2136 /* Register specific tree functions. */
2137 tree_register_cfg_hooks ();
2138 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2140 tree *expr_p = bsi_stmt_ptr (bsi);
2141 tree stmt = *expr_p;
2143 if (TREE_CODE (*expr_p) == MODIFY_EXPR)
2144 expr_p = &TREE_OPERAND (*expr_p, 1);
2145 if (TREE_CODE (*expr_p) == WITH_SIZE_EXPR)
2146 expr_p = &TREE_OPERAND (*expr_p, 0);
2147 if (TREE_CODE (*expr_p) == CALL_EXPR)
2148 if (expand_call_inline (bb, stmt, expr_p, id))
2149 return true;
2151 return false;
2154 /* Expand calls to inline functions in the body of FN. */
2156 void
2157 optimize_inline_calls (tree fn)
2159 inline_data id;
2160 tree prev_fn;
2161 basic_block bb;
2162 /* There is no point in performing inlining if errors have already
2163 occurred -- and we might crash if we try to inline invalid
2164 code. */
2165 if (errorcount || sorrycount)
2166 return;
2168 /* Clear out ID. */
2169 memset (&id, 0, sizeof (id));
2171 id.current_node = id.node = cgraph_node (fn);
2172 id.caller = fn;
2173 /* Or any functions that aren't finished yet. */
2174 prev_fn = NULL_TREE;
2175 if (current_function_decl)
2177 id.caller = current_function_decl;
2178 prev_fn = current_function_decl;
2180 push_gimplify_context ();
2182 /* Reach the trees by walking over the CFG, and note the
2183 enclosing basic-blocks in the call edges. */
2184 /* We walk the blocks going forward, because inlined function bodies
2185 will split id->current_basic_block, and the new blocks will
2186 follow it; we'll trudge through them, processing their CALL_EXPRs
2187 along the way. */
2188 FOR_EACH_BB (bb)
2189 gimple_expand_calls_inline (bb, &id);
2192 pop_gimplify_context (NULL);
2193 /* Renumber the (code) basic_blocks consecutively. */
2194 compact_blocks ();
2195 /* Renumber the lexical scoping (non-code) blocks consecutively. */
2196 number_blocks (fn);
2198 #ifdef ENABLE_CHECKING
2200 struct cgraph_edge *e;
2202 verify_cgraph_node (id.node);
2204 /* Double check that we inlined everything we are supposed to inline. */
2205 for (e = id.node->callees; e; e = e->next_callee)
2206 gcc_assert (e->inline_failed);
2208 #endif
2209 /* We need to rescale frequencies again to peak at REG_BR_PROB_BASE
2210 as inlining loops might increase the maximum. */
2211 if (ENTRY_BLOCK_PTR->count)
2212 counts_to_freqs ();
2213 fold_cond_expr_cond ();
2216 /* FN is a function that has a complete body, and CLONE is a function whose
2217 body is to be set to a copy of FN, mapping argument declarations according
2218 to the ARG_MAP splay_tree. */
2220 void
2221 clone_body (tree clone, tree fn, void *arg_map)
2223 inline_data id;
2225 /* Clone the body, as if we were making an inline call. But, remap the
2226 parameters in the callee to the parameters of caller. */
2227 memset (&id, 0, sizeof (id));
2228 id.caller = clone;
2229 id.callee = fn;
2230 id.callee_cfun = DECL_STRUCT_FUNCTION (fn);
2231 id.decl_map = (splay_tree)arg_map;
2233 /* Cloning is treated slightly differently from inlining. Set
2234 CLONING_P so that it's clear which operation we're performing. */
2235 id.cloning_p = true;
2237 /* We're not inside any EH region. */
2238 id.eh_region = -1;
2240 /* Actually copy the body. */
2241 append_to_statement_list_force (copy_generic_body (&id), &DECL_SAVED_TREE (clone));
2244 /* Save duplicate body in FN. MAP is used to pass around splay tree
2245 used to update arguments in restore_body. */
2247 /* Make and return duplicate of body in FN. Put copies of DECL_ARGUMENTS
2248 in *arg_copy and of the static chain, if any, in *sc_copy. */
2250 void
2251 save_body (tree fn, tree *arg_copy, tree *sc_copy)
2253 inline_data id;
2254 tree newdecl, *parg;
2255 basic_block fn_entry_block;
2257 memset (&id, 0, sizeof (id));
2258 id.callee = fn;
2259 id.callee_cfun = DECL_STRUCT_FUNCTION (fn);
2260 id.caller = fn;
2261 id.node = cgraph_node (fn);
2262 id.saving_p = true;
2263 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2264 *arg_copy = DECL_ARGUMENTS (fn);
2266 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
2268 tree new = copy_node (*parg);
2270 lang_hooks.dup_lang_specific_decl (new);
2271 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*parg);
2272 insert_decl_map (&id, *parg, new);
2273 TREE_CHAIN (new) = TREE_CHAIN (*parg);
2274 *parg = new;
2277 *sc_copy = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2278 if (*sc_copy)
2280 tree new = copy_node (*sc_copy);
2282 lang_hooks.dup_lang_specific_decl (new);
2283 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*sc_copy);
2284 insert_decl_map (&id, *sc_copy, new);
2285 TREE_CHAIN (new) = TREE_CHAIN (*sc_copy);
2286 *sc_copy = new;
2289 /* We're not inside any EH region. */
2290 id.eh_region = -1;
2292 insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn));
2294 /* Actually copy the body, including a new (struct function *) and CFG.
2295 EH info is also duplicated so its labels point into the copied
2296 CFG, not the original. */
2297 fn_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fn));
2298 newdecl = copy_body (&id, fn_entry_block->count, fn_entry_block->frequency, NULL, NULL);
2299 DECL_STRUCT_FUNCTION (fn)->saved_cfg = DECL_STRUCT_FUNCTION (newdecl)->cfg;
2300 DECL_STRUCT_FUNCTION (fn)->saved_eh = DECL_STRUCT_FUNCTION (newdecl)->eh;
2302 /* Clean up. */
2303 splay_tree_delete (id.decl_map);
2306 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2308 tree
2309 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2311 enum tree_code code = TREE_CODE (*tp);
2313 /* We make copies of most nodes. */
2314 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
2315 || code == TREE_LIST
2316 || code == TREE_VEC
2317 || code == TYPE_DECL)
2319 /* Because the chain gets clobbered when we make a copy, we save it
2320 here. */
2321 tree chain = TREE_CHAIN (*tp);
2322 tree new;
2324 /* Copy the node. */
2325 new = copy_node (*tp);
2327 /* Propagate mudflap marked-ness. */
2328 if (flag_mudflap && mf_marked_p (*tp))
2329 mf_mark (new);
2331 *tp = new;
2333 /* Now, restore the chain, if appropriate. That will cause
2334 walk_tree to walk into the chain as well. */
2335 if (code == PARM_DECL || code == TREE_LIST)
2336 TREE_CHAIN (*tp) = chain;
2338 /* For now, we don't update BLOCKs when we make copies. So, we
2339 have to nullify all BIND_EXPRs. */
2340 if (TREE_CODE (*tp) == BIND_EXPR)
2341 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2344 else if (TREE_CODE_CLASS (code) == tcc_type)
2345 *walk_subtrees = 0;
2346 else if (TREE_CODE_CLASS (code) == tcc_declaration)
2347 *walk_subtrees = 0;
2348 else if (TREE_CODE_CLASS (code) == tcc_constant)
2349 *walk_subtrees = 0;
2350 else
2351 gcc_assert (code != STATEMENT_LIST);
2352 return NULL_TREE;
2355 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2356 information indicating to what new SAVE_EXPR this one should be mapped,
2357 use that one. Otherwise, create a new node and enter it in ST. FN is
2358 the function into which the copy will be placed. */
2360 static void
2361 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2363 splay_tree st = (splay_tree) st_;
2364 splay_tree_node n;
2365 tree t;
2367 /* See if we already encountered this SAVE_EXPR. */
2368 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2370 /* If we didn't already remap this SAVE_EXPR, do so now. */
2371 if (!n)
2373 t = copy_node (*tp);
2375 /* Remember this SAVE_EXPR. */
2376 splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2377 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2378 splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2380 else
2382 /* We've already walked into this SAVE_EXPR; don't do it again. */
2383 *walk_subtrees = 0;
2384 t = (tree) n->value;
2387 /* Replace this SAVE_EXPR with the copy. */
2388 *tp = t;
2391 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
2392 copies the declaration and enters it in the splay_tree in DATA (which is
2393 really an `inline_data *'). */
2395 static tree
2396 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2397 void *data)
2399 inline_data *id = (inline_data *) data;
2401 /* Don't walk into types. */
2402 if (TYPE_P (*tp))
2403 *walk_subtrees = 0;
2405 else if (TREE_CODE (*tp) == LABEL_EXPR)
2407 tree decl = TREE_OPERAND (*tp, 0);
2409 /* Copy the decl and remember the copy. */
2410 insert_decl_map (id, decl,
2411 copy_decl_for_inlining (decl, DECL_CONTEXT (decl),
2412 DECL_CONTEXT (decl)));
2415 return NULL_TREE;
2418 /* Perform any modifications to EXPR required when it is unsaved. Does
2419 not recurse into EXPR's subtrees. */
2421 static void
2422 unsave_expr_1 (tree expr)
2424 switch (TREE_CODE (expr))
2426 case TARGET_EXPR:
2427 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
2428 It's OK for this to happen if it was part of a subtree that
2429 isn't immediately expanded, such as operand 2 of another
2430 TARGET_EXPR. */
2431 if (TREE_OPERAND (expr, 1))
2432 break;
2434 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
2435 TREE_OPERAND (expr, 3) = NULL_TREE;
2436 break;
2438 default:
2439 break;
2443 /* Called via walk_tree when an expression is unsaved. Using the
2444 splay_tree pointed to by ST (which is really a `splay_tree'),
2445 remaps all local declarations to appropriate replacements. */
2447 static tree
2448 unsave_r (tree *tp, int *walk_subtrees, void *data)
2450 inline_data *id = (inline_data *) data;
2451 splay_tree st = id->decl_map;
2452 splay_tree_node n;
2454 /* Only a local declaration (variable or label). */
2455 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
2456 || TREE_CODE (*tp) == LABEL_DECL)
2458 /* Lookup the declaration. */
2459 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2461 /* If it's there, remap it. */
2462 if (n)
2463 *tp = (tree) n->value;
2466 else if (TREE_CODE (*tp) == STATEMENT_LIST)
2467 copy_statement_list (tp);
2468 else if (TREE_CODE (*tp) == BIND_EXPR)
2469 copy_bind_expr (tp, walk_subtrees, id);
2470 else if (TREE_CODE (*tp) == SAVE_EXPR)
2471 remap_save_expr (tp, st, walk_subtrees);
2472 else
2474 copy_tree_r (tp, walk_subtrees, NULL);
2476 /* Do whatever unsaving is required. */
2477 unsave_expr_1 (*tp);
2480 /* Keep iterating. */
2481 return NULL_TREE;
2484 /* Copies everything in EXPR and replaces variables, labels
2485 and SAVE_EXPRs local to EXPR. */
2487 tree
2488 unsave_expr_now (tree expr)
2490 inline_data id;
2492 /* There's nothing to do for NULL_TREE. */
2493 if (expr == 0)
2494 return expr;
2496 /* Set up ID. */
2497 memset (&id, 0, sizeof (id));
2498 id.callee = current_function_decl;
2499 id.caller = current_function_decl;
2500 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2502 /* Walk the tree once to find local labels. */
2503 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
2505 /* Walk the tree again, copying, remapping, and unsaving. */
2506 walk_tree (&expr, unsave_r, &id, NULL);
2508 /* Clean up. */
2509 splay_tree_delete (id.decl_map);
2511 return expr;
2514 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
2516 static tree
2517 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
2519 if (*tp == data)
2520 return (tree) data;
2521 else
2522 return NULL;
2525 bool
2526 debug_find_tree (tree top, tree search)
2528 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
2532 /* Declare the variables created by the inliner. Add all the variables in
2533 VARS to BIND_EXPR. */
2535 static void
2536 declare_inline_vars (tree block, tree vars)
2538 tree t;
2539 for (t = vars; t; t = TREE_CHAIN (t))
2540 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
2542 if (block)
2543 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
2546 /* Returns true if we're inlining. */
2547 static inline bool
2548 inlining_p (inline_data *id)
2550 return (!id->saving_p && !id->cloning_p);