Daily bump.
[official-gcc.git] / gcc / cp / optimize.c
blob286122355665668004e27fdfe59e6843cdd0ab72
1 /* Perform optimizations on tree structure.
2 Copyright (C) 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
3 Written by Mark Michell (mark@codesourcery.com).
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "tree.h"
25 #include "cp-tree.h"
26 #include "rtl.h"
27 #include "insn-config.h"
28 #include "input.h"
29 #include "integrate.h"
30 #include "toplev.h"
31 #include "varray.h"
32 #include "ggc.h"
33 #include "params.h"
34 #include "hashtab.h"
36 /* To Do:
38 o In order to make inlining-on-trees work, we pessimized
39 function-local static constants. In particular, they are now
40 always output, even when not addressed. Fix this by treating
41 function-local static constants just like global static
42 constants; the back-end already knows not to output them if they
43 are not needed.
45 o Provide heuristics to clamp inlining of recursive template
46 calls? */
48 /* Data required for function inlining. */
50 typedef struct inline_data
52 /* A stack of the functions we are inlining. For example, if we are
53 compiling `f', which calls `g', which calls `h', and we are
54 inlining the body of `h', the stack will contain, `h', followed
55 by `g', followed by `f'. The first few elements of the stack may
56 contain other functions that we know we should not recurse into,
57 even though they are not directly being inlined. */
58 varray_type fns;
59 /* The index of the first element of FNS that really represents an
60 inlined function. */
61 unsigned first_inlined_fn;
62 /* The label to jump to when a return statement is encountered. If
63 this value is NULL, then return statements will simply be
64 remapped as return statements, rather than as jumps. */
65 tree ret_label;
66 /* The map from local declarations in the inlined function to
67 equivalents in the function into which it is being inlined. */
68 splay_tree decl_map;
69 /* Nonzero if we are currently within the cleanup for a
70 TARGET_EXPR. */
71 int in_target_cleanup_p;
72 /* A stack of the TARGET_EXPRs that we are currently processing. */
73 varray_type target_exprs;
74 /* A list of the functions current function has inlined. */
75 varray_type inlined_fns;
76 /* The approximate number of statements we have inlined in the
77 current call stack. */
78 int inlined_stmts;
79 /* We use the same mechanism to build clones that we do to perform
80 inlining. However, there are a few places where we need to
81 distinguish between those two situations. This flag is true nif
82 we are cloning, rather than inlining. */
83 bool cloning_p;
84 /* Hash table used to prevent walk_tree from visiting the same node
85 umpteen million times. */
86 htab_t tree_pruner;
87 } inline_data;
89 /* Prototypes. */
91 static tree initialize_inlined_parameters PARAMS ((inline_data *, tree, tree));
92 static tree declare_return_variable PARAMS ((inline_data *, tree *));
93 static tree copy_body_r PARAMS ((tree *, int *, void *));
94 static tree copy_body PARAMS ((inline_data *));
95 static tree expand_call_inline PARAMS ((tree *, int *, void *));
96 static void expand_calls_inline PARAMS ((tree *, inline_data *));
97 static int inlinable_function_p PARAMS ((tree, inline_data *));
98 static tree remap_decl PARAMS ((tree, inline_data *));
99 static void remap_block PARAMS ((tree, tree, inline_data *));
100 static void copy_scope_stmt PARAMS ((tree *, int *, inline_data *));
101 static tree calls_setjmp_r PARAMS ((tree *, int *, void *));
102 static void update_cloned_parm PARAMS ((tree, tree));
103 static void dump_function PARAMS ((enum tree_dump_index, tree));
105 /* The approximate number of instructions per statement. This number
106 need not be particularly accurate; it is used only to make
107 decisions about when a function is too big to inline. */
108 #define INSNS_PER_STMT (10)
110 /* Remap DECL during the copying of the BLOCK tree for the function. */
112 static tree
113 remap_decl (decl, id)
114 tree decl;
115 inline_data *id;
117 splay_tree_node n;
118 tree fn;
120 /* We only remap local variables in the current function. */
121 fn = VARRAY_TOP_TREE (id->fns);
122 if (!nonstatic_local_decl_p (decl) || DECL_CONTEXT (decl) != fn)
123 return NULL_TREE;
125 /* See if we have remapped this declaration. */
126 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
127 /* If we didn't already have an equivalent for this declaration,
128 create one now. */
129 if (!n)
131 tree t;
133 /* Make a copy of the variable or label. */
134 t = copy_decl_for_inlining (decl, fn,
135 VARRAY_TREE (id->fns, 0));
137 /* The decl T could be a dynamic array or other variable size type,
138 in which case some fields need to be remapped because they may
139 contain SAVE_EXPRs. */
140 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
141 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
142 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE
143 && TYPE_DOMAIN (TREE_TYPE (t)))
145 TREE_TYPE (t) = copy_node (TREE_TYPE (t));
146 TYPE_DOMAIN (TREE_TYPE (t))
147 = copy_node (TYPE_DOMAIN (TREE_TYPE (t)));
148 walk_tree (&TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (t))),
149 copy_body_r, id, NULL);
152 if (!DECL_NAME (t) && TREE_TYPE (t)
153 && ANON_AGGR_TYPE_P (TREE_TYPE ((t))))
155 /* For a VAR_DECL of anonymous type, we must also copy the
156 member VAR_DECLS here and rechain the
157 DECL_ANON_UNION_ELEMS. */
158 tree members = NULL;
159 tree src;
161 for (src = DECL_ANON_UNION_ELEMS (t); src;
162 src = TREE_CHAIN (src))
164 tree member = remap_decl (TREE_VALUE (src), id);
166 my_friendly_assert (!TREE_PURPOSE (src), 20010529);
167 members = tree_cons (NULL, member, members);
169 DECL_ANON_UNION_ELEMS (t) = nreverse (members);
172 /* Remember it, so that if we encounter this local entity
173 again we can reuse this copy. */
174 n = splay_tree_insert (id->decl_map,
175 (splay_tree_key) decl,
176 (splay_tree_value) t);
179 return (tree) n->value;
182 /* Copy the SCOPE_STMT_BLOCK associated with SCOPE_STMT to contain
183 remapped versions of the variables therein. And hook the new block
184 into the block-tree. If non-NULL, the DECLS are declarations to
185 add to use instead of the BLOCK_VARS in the old block. */
187 static void
188 remap_block (scope_stmt, decls, id)
189 tree scope_stmt;
190 tree decls;
191 inline_data *id;
193 /* We cannot do this in the cleanup for a TARGET_EXPR since we do
194 not know whether or not expand_expr will actually write out the
195 code we put there. If it does not, then we'll have more BLOCKs
196 than block-notes, and things will go awry. At some point, we
197 should make the back-end handle BLOCK notes in a tidier way,
198 without requiring a strict correspondence to the block-tree; then
199 this check can go. */
200 if (id->in_target_cleanup_p)
202 SCOPE_STMT_BLOCK (scope_stmt) = NULL_TREE;
203 return;
206 /* If this is the beginning of a scope, remap the associated BLOCK. */
207 if (SCOPE_BEGIN_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
209 tree old_block;
210 tree new_block;
211 tree old_var;
212 tree fn;
214 /* Make the new block. */
215 old_block = SCOPE_STMT_BLOCK (scope_stmt);
216 new_block = make_node (BLOCK);
217 TREE_USED (new_block) = TREE_USED (old_block);
218 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
219 SCOPE_STMT_BLOCK (scope_stmt) = new_block;
221 /* Remap its variables. */
222 for (old_var = decls ? decls : BLOCK_VARS (old_block);
223 old_var;
224 old_var = TREE_CHAIN (old_var))
226 tree new_var;
228 /* Remap the variable. */
229 new_var = remap_decl (old_var, id);
230 /* If we didn't remap this variable, so we can't mess with
231 its TREE_CHAIN. If we remapped this variable to
232 something other than a declaration (say, if we mapped it
233 to a constant), then we must similarly omit any mention
234 of it here. */
235 if (!new_var || !DECL_P (new_var))
237 else
239 TREE_CHAIN (new_var) = BLOCK_VARS (new_block);
240 BLOCK_VARS (new_block) = new_var;
243 /* We put the BLOCK_VARS in reverse order; fix that now. */
244 BLOCK_VARS (new_block) = nreverse (BLOCK_VARS (new_block));
245 fn = VARRAY_TREE (id->fns, 0);
246 if (id->cloning_p)
247 /* We're building a clone; DECL_INITIAL is still
248 error_mark_node, and current_binding_level is the parm
249 binding level. */
250 insert_block (new_block);
251 else
253 /* Attach this new block after the DECL_INITIAL block for the
254 function into which this block is being inlined. In
255 rest_of_compilation we will straighten out the BLOCK tree. */
256 tree *first_block;
257 if (DECL_INITIAL (fn))
258 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
259 else
260 first_block = &DECL_INITIAL (fn);
261 BLOCK_CHAIN (new_block) = *first_block;
262 *first_block = new_block;
264 /* Remember the remapped block. */
265 splay_tree_insert (id->decl_map,
266 (splay_tree_key) old_block,
267 (splay_tree_value) new_block);
269 /* If this is the end of a scope, set the SCOPE_STMT_BLOCK to be the
270 remapped block. */
271 else if (SCOPE_END_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
273 splay_tree_node n;
275 /* Find this block in the table of remapped things. */
276 n = splay_tree_lookup (id->decl_map,
277 (splay_tree_key) SCOPE_STMT_BLOCK (scope_stmt));
278 my_friendly_assert (n != NULL, 19991203);
279 SCOPE_STMT_BLOCK (scope_stmt) = (tree) n->value;
283 /* Copy the SCOPE_STMT pointed to by TP. */
285 static void
286 copy_scope_stmt (tp, walk_subtrees, id)
287 tree *tp;
288 int *walk_subtrees;
289 inline_data *id;
291 tree block;
293 /* Remember whether or not this statement was nullified. When
294 making a copy, copy_tree_r always sets SCOPE_NULLIFIED_P (and
295 doesn't copy the SCOPE_STMT_BLOCK) to free callers from having to
296 deal with copying BLOCKs if they do not wish to do so. */
297 block = SCOPE_STMT_BLOCK (*tp);
298 /* Copy (and replace) the statement. */
299 copy_tree_r (tp, walk_subtrees, NULL);
300 /* Restore the SCOPE_STMT_BLOCK. */
301 SCOPE_STMT_BLOCK (*tp) = block;
303 /* Remap the associated block. */
304 remap_block (*tp, NULL_TREE, id);
307 /* Called from copy_body via walk_tree. DATA is really an
308 `inline_data *'. */
310 static tree
311 copy_body_r (tp, walk_subtrees, data)
312 tree *tp;
313 int *walk_subtrees;
314 void *data;
316 inline_data* id;
317 tree fn;
319 /* Set up. */
320 id = (inline_data *) data;
321 fn = VARRAY_TOP_TREE (id->fns);
323 /* All automatic variables should have a DECL_CONTEXT indicating
324 what function they come from. */
325 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
326 && DECL_NAMESPACE_SCOPE_P (*tp))
327 my_friendly_assert (DECL_EXTERNAL (*tp) || TREE_STATIC (*tp),
328 19991113);
330 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
331 GOTO_STMT with the RET_LABEL as its target. */
332 if (TREE_CODE (*tp) == RETURN_STMT && id->ret_label)
334 tree return_stmt = *tp;
335 tree goto_stmt;
337 /* Build the GOTO_STMT. */
338 goto_stmt = build_stmt (GOTO_STMT, id->ret_label);
339 TREE_CHAIN (goto_stmt) = TREE_CHAIN (return_stmt);
341 /* If we're returning something, just turn that into an
342 assignment into the equivalent of the original
343 RESULT_DECL. */
344 if (RETURN_EXPR (return_stmt))
346 *tp = build_stmt (EXPR_STMT,
347 RETURN_EXPR (return_stmt));
348 STMT_IS_FULL_EXPR_P (*tp) = 1;
349 /* And then jump to the end of the function. */
350 TREE_CHAIN (*tp) = goto_stmt;
352 /* If we're not returning anything just do the jump. */
353 else
354 *tp = goto_stmt;
356 /* Local variables and labels need to be replaced by equivalent
357 variables. We don't want to copy static variables; there's only
358 one of those, no matter how many times we inline the containing
359 function. */
360 else if (nonstatic_local_decl_p (*tp) && DECL_CONTEXT (*tp) == fn)
362 tree new_decl;
364 /* Remap the declaration. */
365 new_decl = remap_decl (*tp, id);
366 my_friendly_assert (new_decl != NULL_TREE, 19991203);
367 /* Replace this variable with the copy. */
368 STRIP_TYPE_NOPS (new_decl);
369 *tp = new_decl;
371 else if (nonstatic_local_decl_p (*tp)
372 && DECL_CONTEXT (*tp) != VARRAY_TREE (id->fns, 0))
373 my_friendly_abort (0);
374 else if (TREE_CODE (*tp) == SAVE_EXPR)
375 remap_save_expr (tp, id->decl_map, VARRAY_TREE (id->fns, 0),
376 walk_subtrees);
377 else if (TREE_CODE (*tp) == UNSAVE_EXPR)
378 /* UNSAVE_EXPRs should not be generated until expansion time. */
379 my_friendly_abort (19991113);
380 /* For a SCOPE_STMT, we must copy the associated block so that we
381 can write out debugging information for the inlined variables. */
382 else if (TREE_CODE (*tp) == SCOPE_STMT && !id->in_target_cleanup_p)
383 copy_scope_stmt (tp, walk_subtrees, id);
384 /* Otherwise, just copy the node. Note that copy_tree_r already
385 knows not to copy VAR_DECLs, etc., so this is safe. */
386 else
388 copy_tree_r (tp, walk_subtrees, NULL);
390 /* The copied TARGET_EXPR has never been expanded, even if the
391 original node was expanded already. */
392 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
394 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
395 TREE_OPERAND (*tp, 3) = NULL_TREE;
397 else if (TREE_CODE (*tp) == MODIFY_EXPR
398 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
399 && nonstatic_local_decl_p (TREE_OPERAND (*tp, 0))
400 && DECL_CONTEXT (TREE_OPERAND (*tp, 0)) == fn)
402 /* Some assignments VAR = VAR; don't generate any rtl code
403 and thus don't count as variable modification. Avoid
404 keeping bogosities like 0 = 0. */
405 tree decl = TREE_OPERAND (*tp, 0), value;
406 splay_tree_node n;
408 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
409 if (n)
411 value = (tree) n->value;
412 STRIP_TYPE_NOPS (value);
413 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
414 *tp = value;
419 /* Keep iterating. */
420 return NULL_TREE;
423 /* Make a copy of the body of FN so that it can be inserted inline in
424 another function. */
426 static tree
427 copy_body (id)
428 inline_data *id;
430 tree body;
432 body = DECL_SAVED_TREE (VARRAY_TOP_TREE (id->fns));
433 walk_tree (&body, copy_body_r, id, NULL);
435 return body;
438 /* Generate code to initialize the parameters of the function at the
439 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
441 static tree
442 initialize_inlined_parameters (id, args, fn)
443 inline_data *id;
444 tree args;
445 tree fn;
447 tree init_stmts;
448 tree parms;
449 tree a;
450 tree p;
452 /* Figure out what the parameters are. */
453 parms = DECL_ARGUMENTS (fn);
455 /* Start with no initializations whatsoever. */
456 init_stmts = NULL_TREE;
458 /* Loop through the parameter declarations, replacing each with an
459 equivalent VAR_DECL, appropriately initialized. */
460 for (p = parms, a = args; p; a = TREE_CHAIN (a), p = TREE_CHAIN (p))
462 tree init_stmt;
463 tree var;
464 tree value;
466 /* Find the initializer. */
467 value = TREE_VALUE (a);
468 /* If the parameter is never assigned to, we may not need to
469 create a new variable here at all. Instead, we may be able
470 to just use the argument value. */
471 if (TREE_READONLY (p)
472 && !TREE_ADDRESSABLE (p)
473 && !TREE_SIDE_EFFECTS (value))
475 /* Simplify the value, if possible. */
476 value = fold (decl_constant_value (value));
478 /* We can't risk substituting complex expressions. They
479 might contain variables that will be assigned to later.
480 Theoretically, we could check the expression to see if
481 all of the variables that determine its value are
482 read-only, but we don't bother. */
483 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
485 /* If this is a declaration, wrap it a NOP_EXPR so that
486 we don't try to put the VALUE on the list of
487 BLOCK_VARS. */
488 if (DECL_P (value))
489 value = build1 (NOP_EXPR, TREE_TYPE (value), value);
491 splay_tree_insert (id->decl_map,
492 (splay_tree_key) p,
493 (splay_tree_value) value);
494 continue;
498 /* Make an equivalent VAR_DECL. */
499 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
500 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
501 that way, when the PARM_DECL is encountered, it will be
502 automatically replaced by the VAR_DECL. */
503 splay_tree_insert (id->decl_map,
504 (splay_tree_key) p,
505 (splay_tree_value) var);
507 /* Declare this new variable. */
508 init_stmt = build_stmt (DECL_STMT, var);
509 TREE_CHAIN (init_stmt) = init_stmts;
510 init_stmts = init_stmt;
512 /* Initialize this VAR_DECL from the equivalent argument. If
513 the argument is an object, created via a constructor or copy,
514 this will not result in an extra copy: the TARGET_EXPR
515 representing the argument will be bound to VAR, and the
516 object will be constructed in VAR. */
517 if (! TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
518 DECL_INITIAL (var) = value;
519 else
521 /* Even if P was TREE_READONLY, the new VAR should not be.
522 In the original code, we would have constructed a
523 temporary, and then the function body would have never
524 changed the value of P. However, now, we will be
525 constructing VAR directly. The constructor body may
526 change its value multiple times as it is being
527 constructed. Therefore, it must not be TREE_READONLY;
528 the back-end assumes that TREE_READONLY variable is
529 assigned to only once. */
530 TREE_READONLY (var) = 0;
532 /* Build a run-time initialization. */
533 init_stmt = build_stmt (EXPR_STMT,
534 build (INIT_EXPR, TREE_TYPE (p),
535 var, value));
536 /* Add this initialization to the list. Note that we want the
537 declaration *after* the initialization because we are going
538 to reverse all the initialization statements below. */
539 TREE_CHAIN (init_stmt) = init_stmts;
540 init_stmts = init_stmt;
544 /* The initialization statements have been built up in reverse
545 order. Straighten them out now. */
546 return nreverse (init_stmts);
549 /* Declare a return variable to replace the RESULT_DECL for the
550 function we are calling. An appropriate DECL_STMT is returned.
551 The USE_STMT is filled in to contain a use of the declaration to
552 indicate the return value of the function. */
554 static tree
555 declare_return_variable (id, use_stmt)
556 struct inline_data *id;
557 tree *use_stmt;
559 tree fn = VARRAY_TOP_TREE (id->fns);
560 tree result = DECL_RESULT (fn);
561 tree var;
562 int aggregate_return_p;
564 /* We don't need to do anything for functions that don't return
565 anything. */
566 if (!result || VOID_TYPE_P (TREE_TYPE (result)))
568 *use_stmt = NULL_TREE;
569 return NULL_TREE;
572 /* Figure out whether or not FN returns an aggregate. */
573 aggregate_return_p = IS_AGGR_TYPE (TREE_TYPE (result));
575 /* If FN returns an aggregate then the caller will always create the
576 temporary (using a TARGET_EXPR) and the call will be the
577 initializing expression for the TARGET_EXPR. If we were just to
578 create a new VAR_DECL here, then the result of this function
579 would be copied (bitwise) into the variable initialized by the
580 TARGET_EXPR. That's incorrect, so we must transform any
581 references to the RESULT into references to the target. */
582 if (aggregate_return_p)
584 my_friendly_assert (VARRAY_ACTIVE_SIZE (id->target_exprs) != 0,
585 20000430);
586 var = TREE_OPERAND (VARRAY_TOP_TREE (id->target_exprs), 0);
587 my_friendly_assert
588 (same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (var),
589 TREE_TYPE (result)),
590 20000430);
592 /* Otherwise, make an appropriate copy. */
593 else
594 var = copy_decl_for_inlining (result, fn, VARRAY_TREE (id->fns, 0));
596 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
597 way, when the RESULT_DECL is encountered, it will be
598 automatically replaced by the VAR_DECL. */
599 splay_tree_insert (id->decl_map,
600 (splay_tree_key) result,
601 (splay_tree_value) var);
603 /* Build the USE_STMT. */
604 *use_stmt = build_stmt (EXPR_STMT, var);
606 /* Build the declaration statement if FN does not return an
607 aggregate. */
608 if (!aggregate_return_p)
609 return build_stmt (DECL_STMT, var);
610 /* If FN does return an aggregate, there's no need to declare the
611 return variable; we're using a variable in our caller's frame. */
612 else
613 return NULL_TREE;
616 /* Returns non-zero if FN is a function that can be inlined. */
618 static int
619 inlinable_function_p (fn, id)
620 tree fn;
621 inline_data *id;
623 int inlinable;
625 /* If we've already decided this function shouldn't be inlined,
626 there's no need to check again. */
627 if (DECL_UNINLINABLE (fn))
628 return 0;
630 /* Assume it is not inlinable. */
631 inlinable = 0;
633 /* If we're not inlining things, then nothing is inlinable. */
634 if (!flag_inline_trees)
636 /* If the function was not declared `inline', then we don't inline
637 it. */
638 else if (!DECL_INLINE (fn))
640 /* We can't inline varargs functions. */
641 else if (varargs_function_p (fn))
643 /* We can't inline functions that are too big.
644 * Only allow a single function to eat up half of our budget. */
645 else if (DECL_NUM_STMTS (fn) * INSNS_PER_STMT > MAX_INLINE_INSNS / 2)
647 /* All is well. We can inline this function. Traditionally, GCC
648 has refused to inline functions using alloca, or functions whose
649 values are returned in a PARALLEL, and a few other such obscure
650 conditions. We are not equally constrained at the tree level. */
651 else
652 inlinable = 1;
654 /* Squirrel away the result so that we don't have to check again. */
655 DECL_UNINLINABLE (fn) = !inlinable;
657 /* Even if this function is not itself too big to inline, it might
658 be that we've done so much inlining already that we don't want to
659 risk too much inlining any more and thus halve the acceptable size. */
660 if ((DECL_NUM_STMTS (fn) + id->inlined_stmts) * INSNS_PER_STMT
661 > MAX_INLINE_INSNS
662 && DECL_NUM_STMTS (fn) * INSNS_PER_STMT > MAX_INLINE_INSNS / 4)
663 inlinable = 0;
665 /* We can inline a template instantiation only if it's fully
666 instantiated. */
667 if (inlinable
668 && DECL_TEMPLATE_INFO (fn)
669 && TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn)))
671 fn = instantiate_decl (fn, /*defer_ok=*/0);
672 inlinable = !TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn));
675 /* If we don't have the function body available, we can't inline
676 it. */
677 if (!DECL_SAVED_TREE (fn))
678 inlinable = 0;
680 /* Don't do recursive inlining, either. We don't record this in
681 DECL_UNINLINABLE; we may be able to inline this function later. */
682 if (inlinable)
684 size_t i;
686 for (i = 0; i < VARRAY_ACTIVE_SIZE (id->fns); ++i)
687 if (VARRAY_TREE (id->fns, i) == fn)
688 return 0;
690 if (inlinable && DECL_LANG_SPECIFIC (fn) && DECL_INLINED_FNS (fn))
692 int j;
693 tree inlined_fns = DECL_INLINED_FNS (fn);
695 for (j = 0; j < TREE_VEC_LENGTH (inlined_fns); ++j)
696 if (TREE_VEC_ELT (inlined_fns, j) == VARRAY_TREE (id->fns, 0))
697 return 0;
701 /* Return the result. */
702 return inlinable;
705 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
707 static tree
708 expand_call_inline (tp, walk_subtrees, data)
709 tree *tp;
710 int *walk_subtrees;
711 void *data;
713 inline_data *id;
714 tree t;
715 tree expr;
716 tree chain;
717 tree fn;
718 tree scope_stmt;
719 tree use_stmt;
720 tree arg_inits;
721 tree *inlined_body;
722 splay_tree st;
724 /* See what we've got. */
725 id = (inline_data *) data;
726 t = *tp;
728 /* Recurse, but letting recursive invocations know that we are
729 inside the body of a TARGET_EXPR. */
730 if (TREE_CODE (*tp) == TARGET_EXPR)
732 int i, len = first_rtl_op (TARGET_EXPR);
734 /* We're walking our own subtrees. */
735 *walk_subtrees = 0;
737 /* Push *TP on the stack of pending TARGET_EXPRs. */
738 VARRAY_PUSH_TREE (id->target_exprs, *tp);
740 /* Actually walk over them. This loop is the body of
741 walk_trees, omitting the case where the TARGET_EXPR
742 itself is handled. */
743 for (i = 0; i < len; ++i)
745 if (i == 2)
746 ++id->in_target_cleanup_p;
747 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data,
748 id->tree_pruner);
749 if (i == 2)
750 --id->in_target_cleanup_p;
753 /* We're done with this TARGET_EXPR now. */
754 VARRAY_POP (id->target_exprs);
756 return NULL_TREE;
759 if (TREE_CODE_CLASS (TREE_CODE (t)) == 't')
760 /* Because types were not copied in copy_body, CALL_EXPRs beneath
761 them should not be expanded. This can happen if the type is a
762 dynamic array type, for example. */
763 *walk_subtrees = 0;
765 /* From here on, we're only interested in CALL_EXPRs. */
766 if (TREE_CODE (t) != CALL_EXPR)
767 return NULL_TREE;
769 /* First, see if we can figure out what function is being called.
770 If we cannot, then there is no hope of inlining the function. */
771 fn = get_callee_fndecl (t);
772 if (!fn)
773 return NULL_TREE;
775 /* Don't try to inline functions that are not well-suited to
776 inlining. */
777 if (!inlinable_function_p (fn, id))
778 return NULL_TREE;
780 /* Set the current filename and line number to the function we are
781 inlining so that when we create new _STMT nodes here they get
782 line numbers corresponding to the function we are calling. We
783 wrap the whole inlined body in an EXPR_WITH_FILE_AND_LINE as well
784 because individual statements don't record the filename. */
785 push_srcloc (fn->decl.filename, fn->decl.linenum);
787 /* Build a statement-expression containing code to initialize the
788 arguments, the actual inline expansion of the body, and a label
789 for the return statements within the function to jump to. The
790 type of the statement expression is the return type of the
791 function call. */
792 expr = build_min (STMT_EXPR, TREE_TYPE (TREE_TYPE (fn)), NULL_TREE);
794 /* Local declarations will be replaced by their equivalents in this
795 map. */
796 st = id->decl_map;
797 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
798 NULL, NULL);
800 /* Initialize the parameters. */
801 arg_inits = initialize_inlined_parameters (id, TREE_OPERAND (t, 1), fn);
802 /* Expand any inlined calls in the initializers. Do this before we
803 push FN on the stack of functions we are inlining; we want to
804 inline calls to FN that appear in the initializers for the
805 parameters. */
806 expand_calls_inline (&arg_inits, id);
807 /* And add them to the tree. */
808 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), arg_inits);
810 /* Record the function we are about to inline so that we can avoid
811 recursing into it. */
812 VARRAY_PUSH_TREE (id->fns, fn);
814 /* Record the function we are about to inline if optimize_function
815 has not been called on it yet and we don't have it in the list. */
816 if (DECL_LANG_SPECIFIC (fn) && !DECL_INLINED_FNS (fn))
818 int i;
820 for (i = VARRAY_ACTIVE_SIZE (id->inlined_fns) - 1; i >= 0; i--)
821 if (VARRAY_TREE (id->inlined_fns, i) == fn)
822 break;
823 if (i < 0)
824 VARRAY_PUSH_TREE (id->inlined_fns, fn);
827 /* Return statements in the function body will be replaced by jumps
828 to the RET_LABEL. */
829 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
830 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
832 /* Create a block to put the parameters in. We have to do this
833 after the parameters have been remapped because remapping
834 parameters is different from remapping ordinary variables. */
835 scope_stmt = build_stmt (SCOPE_STMT, DECL_INITIAL (fn));
836 SCOPE_BEGIN_P (scope_stmt) = 1;
837 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
838 remap_block (scope_stmt, DECL_ARGUMENTS (fn), id);
839 TREE_CHAIN (scope_stmt) = STMT_EXPR_STMT (expr);
840 STMT_EXPR_STMT (expr) = scope_stmt;
842 /* Tell the debugging backends that this block represents the
843 outermost scope of the inlined function. */
844 if (SCOPE_STMT_BLOCK (scope_stmt))
845 BLOCK_ABSTRACT_ORIGIN (SCOPE_STMT_BLOCK (scope_stmt)) = DECL_ORIGIN (fn);
847 /* Declare the return variable for the function. */
848 STMT_EXPR_STMT (expr)
849 = chainon (STMT_EXPR_STMT (expr),
850 declare_return_variable (id, &use_stmt));
852 /* After we've initialized the parameters, we insert the body of the
853 function itself. */
854 inlined_body = &STMT_EXPR_STMT (expr);
855 while (*inlined_body)
856 inlined_body = &TREE_CHAIN (*inlined_body);
857 *inlined_body = copy_body (id);
859 /* Close the block for the parameters. */
860 scope_stmt = build_stmt (SCOPE_STMT, DECL_INITIAL (fn));
861 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
862 my_friendly_assert (DECL_INITIAL (fn)
863 && TREE_CODE (DECL_INITIAL (fn)) == BLOCK,
864 19991203);
865 remap_block (scope_stmt, NULL_TREE, id);
866 STMT_EXPR_STMT (expr)
867 = chainon (STMT_EXPR_STMT (expr), scope_stmt);
869 /* After the body of the function comes the RET_LABEL. This must come
870 before we evaluate the returned value below, because that evalulation
871 may cause RTL to be generated. */
872 STMT_EXPR_STMT (expr)
873 = chainon (STMT_EXPR_STMT (expr),
874 build_stmt (LABEL_STMT, id->ret_label));
876 /* Finally, mention the returned value so that the value of the
877 statement-expression is the returned value of the function. */
878 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), use_stmt);
880 /* Clean up. */
881 splay_tree_delete (id->decl_map);
882 id->decl_map = st;
884 /* The new expression has side-effects if the old one did. */
885 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (t);
887 /* Replace the call by the inlined body. Wrap it in an
888 EXPR_WITH_FILE_LOCATION so that we'll get debugging line notes
889 pointing to the right place. */
890 chain = TREE_CHAIN (*tp);
891 *tp = build_expr_wfl (expr, DECL_SOURCE_FILE (fn), DECL_SOURCE_LINE (fn),
892 /*col=*/0);
893 EXPR_WFL_EMIT_LINE_NOTE (*tp) = 1;
894 TREE_CHAIN (*tp) = chain;
895 pop_srcloc ();
897 /* If the value of the new expression is ignored, that's OK. We
898 don't warn about this for CALL_EXPRs, so we shouldn't warn about
899 the equivalent inlined version either. */
900 TREE_USED (*tp) = 1;
902 /* Our function now has more statements than it did before. */
903 DECL_NUM_STMTS (VARRAY_TREE (id->fns, 0)) += DECL_NUM_STMTS (fn);
904 id->inlined_stmts += DECL_NUM_STMTS (fn);
906 /* Recurse into the body of the just inlined function. */
907 expand_calls_inline (inlined_body, id);
908 VARRAY_POP (id->fns);
910 /* If we've returned to the top level, clear out the record of how
911 much inlining has been done. */
912 if (VARRAY_ACTIVE_SIZE (id->fns) == id->first_inlined_fn)
913 id->inlined_stmts = 0;
915 /* Don't walk into subtrees. We've already handled them above. */
916 *walk_subtrees = 0;
918 /* Keep iterating. */
919 return NULL_TREE;
922 /* Walk over the entire tree *TP, replacing CALL_EXPRs with inline
923 expansions as appropriate. */
925 static void
926 expand_calls_inline (tp, id)
927 tree *tp;
928 inline_data *id;
930 /* Search through *TP, replacing all calls to inline functions by
931 appropriate equivalents. Use walk_tree in no-duplicates mode
932 to avoid exponential time complexity. (We can't just use
933 walk_tree_without_duplicates, because of the special TARGET_EXPR
934 handling in expand_calls. The hash table is set up in
935 optimize_function. */
936 walk_tree (tp, expand_call_inline, id, id->tree_pruner);
939 /* Optimize the body of FN. */
941 void
942 optimize_function (fn)
943 tree fn;
945 dump_function (TDI_original, fn);
947 /* While in this function, we may choose to go off and compile
948 another function. For example, we might instantiate a function
949 in the hopes of inlining it. Normally, that wouldn't trigger any
950 actual RTL code-generation -- but it will if the template is
951 actually needed. (For example, if it's address is taken, or if
952 some other function already refers to the template.) If
953 code-generation occurs, then garbage collection will occur, so we
954 must protect ourselves, just as we do while building up the body
955 of the function. */
956 ++function_depth;
958 /* Expand calls to inline functions. */
959 if (flag_inline_trees)
961 inline_data id;
962 tree prev_fn;
963 struct saved_scope *s;
965 /* Clear out ID. */
966 memset (&id, 0, sizeof (id));
968 /* Don't allow recursion into FN. */
969 VARRAY_TREE_INIT (id.fns, 32, "fns");
970 VARRAY_PUSH_TREE (id.fns, fn);
971 /* Or any functions that aren't finished yet. */
972 prev_fn = NULL_TREE;
973 if (current_function_decl)
975 VARRAY_PUSH_TREE (id.fns, current_function_decl);
976 prev_fn = current_function_decl;
978 for (s = scope_chain; s; s = s->prev)
979 if (s->function_decl && s->function_decl != prev_fn)
981 VARRAY_PUSH_TREE (id.fns, s->function_decl);
982 prev_fn = s->function_decl;
985 /* Create the stack of TARGET_EXPRs. */
986 VARRAY_TREE_INIT (id.target_exprs, 32, "target_exprs");
988 /* Create the list of functions this call will inline. */
989 VARRAY_TREE_INIT (id.inlined_fns, 32, "inlined_fns");
991 /* Keep track of the low-water mark, i.e., the point where
992 the first real inlining is represented in ID.FNS. */
993 id.first_inlined_fn = VARRAY_ACTIVE_SIZE (id.fns);
995 /* Replace all calls to inline functions with the bodies of those
996 functions. */
997 id.tree_pruner = htab_create (37, htab_hash_pointer,
998 htab_eq_pointer, NULL);
999 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
1001 /* Clean up. */
1002 htab_delete (id.tree_pruner);
1003 VARRAY_FREE (id.fns);
1004 VARRAY_FREE (id.target_exprs);
1005 if (DECL_LANG_SPECIFIC (fn))
1007 tree ifn = make_tree_vec (VARRAY_ACTIVE_SIZE (id.inlined_fns));
1009 memcpy (&TREE_VEC_ELT (ifn, 0), &VARRAY_TREE (id.inlined_fns, 0),
1010 VARRAY_ACTIVE_SIZE (id.inlined_fns) * sizeof (tree));
1011 DECL_INLINED_FNS (fn) = ifn;
1013 VARRAY_FREE (id.inlined_fns);
1016 /* Undo the call to ggc_push_context above. */
1017 --function_depth;
1019 dump_function (TDI_optimized, fn);
1022 /* Called from calls_setjmp_p via walk_tree. */
1024 static tree
1025 calls_setjmp_r (tp, walk_subtrees, data)
1026 tree *tp;
1027 int *walk_subtrees ATTRIBUTE_UNUSED;
1028 void *data ATTRIBUTE_UNUSED;
1030 /* We're only interested in FUNCTION_DECLS. */
1031 if (TREE_CODE (*tp) != FUNCTION_DECL)
1032 return NULL_TREE;
1034 return setjmp_call_p (*tp) ? *tp : NULL_TREE;
1037 /* Returns non-zero if FN calls `setjmp' or some other function that
1038 can return more than once. This function is conservative; it may
1039 occasionally return a non-zero value even when FN does not actually
1040 call `setjmp'. */
1043 calls_setjmp_p (fn)
1044 tree fn;
1046 return walk_tree_without_duplicates (&DECL_SAVED_TREE (fn),
1047 calls_setjmp_r,
1048 NULL) != NULL_TREE;
1051 /* CLONED_PARM is a copy of CLONE, generated for a cloned constructor
1052 or destructor. Update it to ensure that the source-position for
1053 the cloned parameter matches that for the original, and that the
1054 debugging generation code will be able to find the original PARM. */
1056 static void
1057 update_cloned_parm (parm, cloned_parm)
1058 tree parm;
1059 tree cloned_parm;
1061 DECL_ABSTRACT_ORIGIN (cloned_parm) = parm;
1063 /* We may have taken its address. */
1064 TREE_ADDRESSABLE (cloned_parm) = TREE_ADDRESSABLE (parm);
1066 /* The definition might have different constness. */
1067 TREE_READONLY (cloned_parm) = TREE_READONLY (parm);
1069 TREE_USED (cloned_parm) = TREE_USED (parm);
1071 /* The name may have changed from the declaration. */
1072 DECL_NAME (cloned_parm) = DECL_NAME (parm);
1073 DECL_SOURCE_FILE (cloned_parm) = DECL_SOURCE_FILE (parm);
1074 DECL_SOURCE_LINE (cloned_parm) = DECL_SOURCE_LINE (parm);
1078 /* FN is a function that has a complete body. Clone the body as
1079 necessary. Returns non-zero if there's no longer any need to
1080 process the main body. */
1083 maybe_clone_body (fn)
1084 tree fn;
1086 inline_data id;
1087 tree clone;
1088 int first = 1;
1090 /* We only clone constructors and destructors. */
1091 if (!DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (fn)
1092 && !DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P (fn))
1093 return 0;
1095 /* Emit the DWARF1 abstract instance. */
1096 note_deferral_of_defined_inline_function (fn);
1098 /* We know that any clones immediately follow FN in the TYPE_METHODS
1099 list. */
1100 for (clone = TREE_CHAIN (fn);
1101 clone && DECL_CLONED_FUNCTION_P (clone);
1102 clone = TREE_CHAIN (clone), first = 0)
1104 tree parm;
1105 tree clone_parm;
1106 int parmno;
1108 /* Update CLONE's source position information to match FN's. */
1109 DECL_SOURCE_FILE (clone) = DECL_SOURCE_FILE (fn);
1110 DECL_SOURCE_LINE (clone) = DECL_SOURCE_LINE (fn);
1111 DECL_INLINE (clone) = DECL_INLINE (fn);
1112 DECL_DECLARED_INLINE_P (clone) = DECL_DECLARED_INLINE_P (fn);
1113 DECL_COMDAT (clone) = DECL_COMDAT (fn);
1114 DECL_WEAK (clone) = DECL_WEAK (fn);
1115 DECL_ONE_ONLY (clone) = DECL_ONE_ONLY (fn);
1116 DECL_SECTION_NAME (clone) = DECL_SECTION_NAME (fn);
1117 DECL_USE_TEMPLATE (clone) = DECL_USE_TEMPLATE (fn);
1118 DECL_EXTERNAL (clone) = DECL_EXTERNAL (fn);
1119 DECL_INTERFACE_KNOWN (clone) = DECL_INTERFACE_KNOWN (fn);
1120 DECL_NOT_REALLY_EXTERN (clone) = DECL_NOT_REALLY_EXTERN (fn);
1121 TREE_PUBLIC (clone) = TREE_PUBLIC (fn);
1123 /* Adjust the parameter names and locations. */
1124 parm = DECL_ARGUMENTS (fn);
1125 clone_parm = DECL_ARGUMENTS (clone);
1126 /* Update the `this' parameter, which is always first.
1127 Sometimes, we end update the `this' parameter twice because
1128 we process it again in the loop below. That is harmless. */
1129 update_cloned_parm (parm, clone_parm);
1130 if (DECL_HAS_IN_CHARGE_PARM_P (fn))
1131 parm = TREE_CHAIN (parm);
1132 if (DECL_HAS_VTT_PARM_P (fn))
1133 parm = TREE_CHAIN (parm);
1134 if (DECL_HAS_VTT_PARM_P (clone))
1135 clone_parm = TREE_CHAIN (clone_parm);
1136 for (; parm;
1137 parm = TREE_CHAIN (parm), clone_parm = TREE_CHAIN (clone_parm))
1139 /* Update this paramter. */
1140 update_cloned_parm (parm, clone_parm);
1141 /* We should only give unused information for one clone. */
1142 if (!first)
1143 TREE_USED (clone_parm) = 1;
1146 /* Start processing the function. */
1147 push_to_top_level ();
1148 start_function (NULL_TREE, clone, NULL_TREE, SF_PRE_PARSED);
1150 /* Just clone the body, as if we were making an inline call.
1151 But, remap the parameters in the callee to the parameters of
1152 caller. If there's an in-charge parameter, map it to an
1153 appropriate constant. */
1154 memset (&id, 0, sizeof (id));
1155 VARRAY_TREE_INIT (id.fns, 2, "fns");
1156 VARRAY_PUSH_TREE (id.fns, clone);
1157 VARRAY_PUSH_TREE (id.fns, fn);
1159 /* Cloning is treated slightly differently from inlining. Set
1160 CLONING_P so that its clear which operation we're performing. */
1161 id.cloning_p = true;
1163 /* Remap the parameters. */
1164 id.decl_map = splay_tree_new (splay_tree_compare_pointers,
1165 NULL, NULL);
1166 for (parmno = 0,
1167 parm = DECL_ARGUMENTS (fn),
1168 clone_parm = DECL_ARGUMENTS (clone);
1169 parm;
1170 ++parmno,
1171 parm = TREE_CHAIN (parm))
1173 /* Map the in-charge parameter to an appropriate constant. */
1174 if (DECL_HAS_IN_CHARGE_PARM_P (fn) && parmno == 1)
1176 tree in_charge;
1177 in_charge = in_charge_arg_for_name (DECL_NAME (clone));
1178 splay_tree_insert (id.decl_map,
1179 (splay_tree_key) parm,
1180 (splay_tree_value) in_charge);
1182 else if (DECL_ARTIFICIAL (parm)
1183 && DECL_NAME (parm) == vtt_parm_identifier)
1185 /* For a subobject constructor or destructor, the next
1186 argument is the VTT parameter. Remap the VTT_PARM
1187 from the CLONE to this parameter. */
1188 if (DECL_HAS_VTT_PARM_P (clone))
1190 DECL_ABSTRACT_ORIGIN (clone_parm) = parm;
1191 splay_tree_insert (id.decl_map,
1192 (splay_tree_key) parm,
1193 (splay_tree_value) clone_parm);
1194 clone_parm = TREE_CHAIN (clone_parm);
1196 /* Otherwise, map the VTT parameter to `NULL'. */
1197 else
1199 splay_tree_insert (id.decl_map,
1200 (splay_tree_key) parm,
1201 (splay_tree_value) null_pointer_node);
1204 /* Map other parameters to their equivalents in the cloned
1205 function. */
1206 else
1208 splay_tree_insert (id.decl_map,
1209 (splay_tree_key) parm,
1210 (splay_tree_value) clone_parm);
1211 clone_parm = TREE_CHAIN (clone_parm);
1215 /* Actually copy the body. */
1216 TREE_CHAIN (DECL_SAVED_TREE (clone)) = copy_body (&id);
1218 /* There are as many statements in the clone as in the
1219 original. */
1220 DECL_NUM_STMTS (clone) = DECL_NUM_STMTS (fn);
1222 /* Clean up. */
1223 splay_tree_delete (id.decl_map);
1224 VARRAY_FREE (id.fns);
1226 /* Now, expand this function into RTL, if appropriate. */
1227 function_name_declared_p = 1;
1228 finish_function (0);
1229 BLOCK_ABSTRACT_ORIGIN (DECL_INITIAL (clone)) = DECL_INITIAL (fn);
1230 expand_body (clone);
1231 pop_from_top_level ();
1234 /* We don't need to process the original function any further. */
1235 return 1;
1238 /* Dump FUNCTION_DECL FN as tree dump PHASE. */
1240 static void
1241 dump_function (phase, fn)
1242 enum tree_dump_index phase;
1243 tree fn;
1245 FILE *stream;
1246 int flags;
1248 stream = dump_begin (phase, &flags);
1249 if (stream)
1251 fprintf (stream, "\n;; Function %s",
1252 decl_as_string (fn, TFF_DECL_SPECIFIERS));
1253 fprintf (stream, " (%s)", decl_as_string (DECL_ASSEMBLER_NAME (fn), 0));
1254 fprintf (stream, "\n\n");
1256 dump_node (fn, TDF_SLIM | flags, stream);
1257 dump_end (phase, stream);