1 /* Perform optimizations on tree structure.
2 Copyright (C) 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
3 Written by Mark Michell (mark@codesourcery.com).
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
27 #include "insn-config.h"
29 #include "integrate.h"
38 o In order to make inlining-on-trees work, we pessimized
39 function-local static constants. In particular, they are now
40 always output, even when not addressed. Fix this by treating
41 function-local static constants just like global static
42 constants; the back-end already knows not to output them if they
45 o Provide heuristics to clamp inlining of recursive template
48 /* Data required for function inlining. */
50 typedef struct inline_data
52 /* A stack of the functions we are inlining. For example, if we are
53 compiling `f', which calls `g', which calls `h', and we are
54 inlining the body of `h', the stack will contain, `h', followed
55 by `g', followed by `f'. The first few elements of the stack may
56 contain other functions that we know we should not recurse into,
57 even though they are not directly being inlined. */
59 /* The index of the first element of FNS that really represents an
61 unsigned first_inlined_fn
;
62 /* The label to jump to when a return statement is encountered. If
63 this value is NULL, then return statements will simply be
64 remapped as return statements, rather than as jumps. */
66 /* The map from local declarations in the inlined function to
67 equivalents in the function into which it is being inlined. */
69 /* Nonzero if we are currently within the cleanup for a
71 int in_target_cleanup_p
;
72 /* A stack of the TARGET_EXPRs that we are currently processing. */
73 varray_type target_exprs
;
74 /* A list of the functions current function has inlined. */
75 varray_type inlined_fns
;
76 /* The approximate number of statements we have inlined in the
77 current call stack. */
79 /* We use the same mechanism to build clones that we do to perform
80 inlining. However, there are a few places where we need to
81 distinguish between those two situations. This flag is true nif
82 we are cloning, rather than inlining. */
84 /* Hash table used to prevent walk_tree from visiting the same node
85 umpteen million times. */
91 static tree initialize_inlined_parameters
PARAMS ((inline_data
*, tree
, tree
));
92 static tree declare_return_variable
PARAMS ((inline_data
*, tree
*));
93 static tree copy_body_r
PARAMS ((tree
*, int *, void *));
94 static tree copy_body
PARAMS ((inline_data
*));
95 static tree expand_call_inline
PARAMS ((tree
*, int *, void *));
96 static void expand_calls_inline
PARAMS ((tree
*, inline_data
*));
97 static int inlinable_function_p
PARAMS ((tree
, inline_data
*));
98 static tree remap_decl
PARAMS ((tree
, inline_data
*));
99 static void remap_block
PARAMS ((tree
, tree
, inline_data
*));
100 static void copy_scope_stmt
PARAMS ((tree
*, int *, inline_data
*));
101 static tree calls_setjmp_r
PARAMS ((tree
*, int *, void *));
102 static void update_cloned_parm
PARAMS ((tree
, tree
));
104 /* The approximate number of instructions per statement. This number
105 need not be particularly accurate; it is used only to make
106 decisions about when a function is too big to inline. */
107 #define INSNS_PER_STMT (10)
109 /* Remap DECL during the copying of the BLOCK tree for the function.
110 DATA is really an `inline_data *'. */
113 remap_decl (decl
, id
)
120 /* We only remap local variables in the current function. */
121 fn
= VARRAY_TOP_TREE (id
->fns
);
122 if (!nonstatic_local_decl_p (decl
) || DECL_CONTEXT (decl
) != fn
)
125 /* See if we have remapped this declaration. */
126 n
= splay_tree_lookup (id
->decl_map
, (splay_tree_key
) decl
);
127 /* If we didn't already have an equivalent for this declaration,
133 /* Make a copy of the variable or label. */
134 t
= copy_decl_for_inlining (decl
, fn
,
135 VARRAY_TREE (id
->fns
, 0));
137 /* The decl T could be a dynamic array or other variable size type,
138 in which case some fields need to be remapped because they may
139 contain SAVE_EXPRs. */
140 walk_tree (&DECL_SIZE (t
), copy_body_r
, id
, NULL
);
141 walk_tree (&DECL_SIZE_UNIT (t
), copy_body_r
, id
, NULL
);
142 if (TREE_TYPE (t
) && TREE_CODE (TREE_TYPE (t
)) == ARRAY_TYPE
143 && TYPE_DOMAIN (TREE_TYPE (t
)))
145 TREE_TYPE (t
) = copy_node (TREE_TYPE (t
));
146 TYPE_DOMAIN (TREE_TYPE (t
))
147 = copy_node (TYPE_DOMAIN (TREE_TYPE (t
)));
148 walk_tree (&TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (t
))),
149 copy_body_r
, id
, NULL
);
152 /* Remember it, so that if we encounter this local entity
153 again we can reuse this copy. */
154 n
= splay_tree_insert (id
->decl_map
,
155 (splay_tree_key
) decl
,
156 (splay_tree_value
) t
);
159 return (tree
) n
->value
;
162 /* Copy the SCOPE_STMT_BLOCK associated with SCOPE_STMT to contain
163 remapped versions of the variables therein. And hook the new block
164 into the block-tree. If non-NULL, the DECLS are declarations to
165 add to use instead of the BLOCK_VARS in the old block. */
168 remap_block (scope_stmt
, decls
, id
)
173 /* We cannot do this in the cleanup for a TARGET_EXPR since we do
174 not know whether or not expand_expr will actually write out the
175 code we put there. If it does not, then we'll have more BLOCKs
176 than block-notes, and things will go awry. At some point, we
177 should make the back-end handle BLOCK notes in a tidier way,
178 without requiring a strict correspondence to the block-tree; then
179 this check can go. */
180 if (id
->in_target_cleanup_p
)
182 SCOPE_STMT_BLOCK (scope_stmt
) = NULL_TREE
;
186 /* If this is the beginning of a scope, remap the associated BLOCK. */
187 if (SCOPE_BEGIN_P (scope_stmt
) && SCOPE_STMT_BLOCK (scope_stmt
))
194 /* Make the new block. */
195 old_block
= SCOPE_STMT_BLOCK (scope_stmt
);
196 new_block
= make_node (BLOCK
);
197 TREE_USED (new_block
) = TREE_USED (old_block
);
198 BLOCK_ABSTRACT_ORIGIN (new_block
) = old_block
;
199 SCOPE_STMT_BLOCK (scope_stmt
) = new_block
;
201 /* Remap its variables. */
202 for (old_var
= decls
? decls
: BLOCK_VARS (old_block
);
204 old_var
= TREE_CHAIN (old_var
))
208 /* Remap the variable. */
209 new_var
= remap_decl (old_var
, id
);
210 /* If we didn't remap this variable, so we can't mess with
211 its TREE_CHAIN. If we remapped this variable to
212 something other than a declaration (say, if we mapped it
213 to a constant), then we must similarly omit any mention
215 if (!new_var
|| !DECL_P (new_var
))
219 TREE_CHAIN (new_var
) = BLOCK_VARS (new_block
);
220 BLOCK_VARS (new_block
) = new_var
;
223 /* We put the BLOCK_VARS in reverse order; fix that now. */
224 BLOCK_VARS (new_block
) = nreverse (BLOCK_VARS (new_block
));
225 fn
= VARRAY_TREE (id
->fns
, 0);
227 /* We're building a clone; DECL_INITIAL is still
228 error_mark_node, and current_binding_level is the parm
230 insert_block (new_block
);
233 /* Attach this new block after the DECL_INITIAL block for the
234 function into which this block is being inlined. In
235 rest_of_compilation we will straighten out the BLOCK tree. */
237 if (DECL_INITIAL (fn
))
238 first_block
= &BLOCK_CHAIN (DECL_INITIAL (fn
));
240 first_block
= &DECL_INITIAL (fn
);
241 BLOCK_CHAIN (new_block
) = *first_block
;
242 *first_block
= new_block
;
244 /* Remember the remapped block. */
245 splay_tree_insert (id
->decl_map
,
246 (splay_tree_key
) old_block
,
247 (splay_tree_value
) new_block
);
249 /* If this is the end of a scope, set the SCOPE_STMT_BLOCK to be the
251 else if (SCOPE_END_P (scope_stmt
) && SCOPE_STMT_BLOCK (scope_stmt
))
255 /* Find this block in the table of remapped things. */
256 n
= splay_tree_lookup (id
->decl_map
,
257 (splay_tree_key
) SCOPE_STMT_BLOCK (scope_stmt
));
258 my_friendly_assert (n
!= NULL
, 19991203);
259 SCOPE_STMT_BLOCK (scope_stmt
) = (tree
) n
->value
;
263 /* Copy the SCOPE_STMT pointed to by TP. */
266 copy_scope_stmt (tp
, walk_subtrees
, id
)
273 /* Remember whether or not this statement was nullified. When
274 making a copy, copy_tree_r always sets SCOPE_NULLIFIED_P (and
275 doesn't copy the SCOPE_STMT_BLOCK) to free callers from having to
276 deal with copying BLOCKs if they do not wish to do so. */
277 block
= SCOPE_STMT_BLOCK (*tp
);
278 /* Copy (and replace) the statement. */
279 copy_tree_r (tp
, walk_subtrees
, NULL
);
280 /* Restore the SCOPE_STMT_BLOCK. */
281 SCOPE_STMT_BLOCK (*tp
) = block
;
283 /* Remap the associated block. */
284 remap_block (*tp
, NULL_TREE
, id
);
287 /* Called from copy_body via walk_tree. DATA is really an
291 copy_body_r (tp
, walk_subtrees
, data
)
300 id
= (inline_data
*) data
;
301 fn
= VARRAY_TOP_TREE (id
->fns
);
303 /* All automatic variables should have a DECL_CONTEXT indicating
304 what function they come from. */
305 if ((TREE_CODE (*tp
) == VAR_DECL
|| TREE_CODE (*tp
) == LABEL_DECL
)
306 && DECL_NAMESPACE_SCOPE_P (*tp
))
307 my_friendly_assert (DECL_EXTERNAL (*tp
) || TREE_STATIC (*tp
),
310 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
311 GOTO_STMT with the RET_LABEL as its target. */
312 if (TREE_CODE (*tp
) == RETURN_STMT
&& id
->ret_label
)
314 tree return_stmt
= *tp
;
317 /* Build the GOTO_STMT. */
318 goto_stmt
= build_stmt (GOTO_STMT
, id
->ret_label
);
319 TREE_CHAIN (goto_stmt
) = TREE_CHAIN (return_stmt
);
321 /* If we're returning something, just turn that into an
322 assignment into the equivalent of the original
324 if (RETURN_EXPR (return_stmt
))
326 *tp
= build_stmt (EXPR_STMT
,
327 RETURN_EXPR (return_stmt
));
328 STMT_IS_FULL_EXPR_P (*tp
) = 1;
329 /* And then jump to the end of the function. */
330 TREE_CHAIN (*tp
) = goto_stmt
;
332 /* If we're not returning anything just do the jump. */
336 /* Local variables and labels need to be replaced by equivalent
337 variables. We don't want to copy static variables; there's only
338 one of those, no matter how many times we inline the containing
340 else if (nonstatic_local_decl_p (*tp
) && DECL_CONTEXT (*tp
) == fn
)
344 /* Remap the declaration. */
345 new_decl
= remap_decl (*tp
, id
);
346 my_friendly_assert (new_decl
!= NULL_TREE
, 19991203);
347 /* Replace this variable with the copy. */
348 STRIP_TYPE_NOPS (new_decl
);
351 else if (nonstatic_local_decl_p (*tp
)
352 && DECL_CONTEXT (*tp
) != VARRAY_TREE (id
->fns
, 0))
353 my_friendly_abort (0);
354 else if (TREE_CODE (*tp
) == SAVE_EXPR
)
355 remap_save_expr (tp
, id
->decl_map
, VARRAY_TREE (id
->fns
, 0),
357 else if (TREE_CODE (*tp
) == UNSAVE_EXPR
)
358 /* UNSAVE_EXPRs should not be generated until expansion time. */
359 my_friendly_abort (19991113);
360 /* For a SCOPE_STMT, we must copy the associated block so that we
361 can write out debugging information for the inlined variables. */
362 else if (TREE_CODE (*tp
) == SCOPE_STMT
&& !id
->in_target_cleanup_p
)
363 copy_scope_stmt (tp
, walk_subtrees
, id
);
364 /* Otherwise, just copy the node. Note that copy_tree_r already
365 knows not to copy VAR_DECLs, etc., so this is safe. */
368 copy_tree_r (tp
, walk_subtrees
, NULL
);
370 /* The copied TARGET_EXPR has never been expanded, even if the
371 original node was expanded already. */
372 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
374 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
375 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
377 else if (TREE_CODE (*tp
) == MODIFY_EXPR
378 && TREE_OPERAND (*tp
, 0) == TREE_OPERAND (*tp
, 1)
379 && nonstatic_local_decl_p (TREE_OPERAND (*tp
, 0))
380 && DECL_CONTEXT (TREE_OPERAND (*tp
, 0)) == fn
)
382 /* Some assignments VAR = VAR; don't generate any rtl code
383 and thus don't count as variable modification. Avoid
384 keeping bogosities like 0 = 0. */
385 tree decl
= TREE_OPERAND (*tp
, 0), value
;
388 n
= splay_tree_lookup (id
->decl_map
, (splay_tree_key
) decl
);
391 value
= (tree
) n
->value
;
392 STRIP_TYPE_NOPS (value
);
393 if (TREE_CONSTANT (value
) || TREE_READONLY_DECL_P (value
))
399 /* Keep iterating. */
403 /* Make a copy of the body of FN so that it can be inserted inline in
412 body
= DECL_SAVED_TREE (VARRAY_TOP_TREE (id
->fns
));
413 walk_tree (&body
, copy_body_r
, id
, NULL
);
418 /* Generate code to initialize the parameters of the function at the
419 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
422 initialize_inlined_parameters (id
, args
, fn
)
432 /* Figure out what the parameters are. */
433 parms
= DECL_ARGUMENTS (fn
);
435 /* Start with no initializations whatsoever. */
436 init_stmts
= NULL_TREE
;
438 /* Loop through the parameter declarations, replacing each with an
439 equivalent VAR_DECL, appropriately initialized. */
440 for (p
= parms
, a
= args
; p
; a
= TREE_CHAIN (a
), p
= TREE_CHAIN (p
))
446 /* Find the initializer. */
447 value
= TREE_VALUE (a
);
448 /* If the parameter is never assigned to, we may not need to
449 create a new variable here at all. Instead, we may be able
450 to just use the argument value. */
451 if (TREE_READONLY (p
)
452 && !TREE_ADDRESSABLE (p
)
453 && !TREE_SIDE_EFFECTS (value
))
455 /* Simplify the value, if possible. */
456 value
= fold (decl_constant_value (value
));
458 /* We can't risk substituting complex expressions. They
459 might contain variables that will be assigned to later.
460 Theoretically, we could check the expression to see if
461 all of the variables that determine its value are
462 read-only, but we don't bother. */
463 if (TREE_CONSTANT (value
) || TREE_READONLY_DECL_P (value
))
465 /* If this is a declaration, wrap it a NOP_EXPR so that
466 we don't try to put the VALUE on the list of
469 value
= build1 (NOP_EXPR
, TREE_TYPE (value
), value
);
471 splay_tree_insert (id
->decl_map
,
473 (splay_tree_value
) value
);
478 /* Make an equivalent VAR_DECL. */
479 var
= copy_decl_for_inlining (p
, fn
, VARRAY_TREE (id
->fns
, 0));
480 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
481 that way, when the PARM_DECL is encountered, it will be
482 automatically replaced by the VAR_DECL. */
483 splay_tree_insert (id
->decl_map
,
485 (splay_tree_value
) var
);
487 /* Declare this new variable. */
488 init_stmt
= build_stmt (DECL_STMT
, var
);
489 TREE_CHAIN (init_stmt
) = init_stmts
;
490 init_stmts
= init_stmt
;
492 /* Initialize this VAR_DECL from the equivalent argument. If
493 the argument is an object, created via a constructor or copy,
494 this will not result in an extra copy: the TARGET_EXPR
495 representing the argument will be bound to VAR, and the
496 object will be constructed in VAR. */
497 if (! TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p
)))
498 DECL_INITIAL (var
) = value
;
501 /* Even if P was TREE_READONLY, the new VAR should not be.
502 In the original code, we would have constructed a
503 temporary, and then the function body would have never
504 changed the value of P. However, now, we will be
505 constructing VAR directly. The constructor body may
506 change its value multiple times as it is being
507 constructed. Therefore, it must not be TREE_READONLY;
508 the back-end assumes that TREE_READONLY variable is
509 assigned to only once. */
510 TREE_READONLY (var
) = 0;
512 /* Build a run-time initialization. */
513 init_stmt
= build_stmt (EXPR_STMT
,
514 build (INIT_EXPR
, TREE_TYPE (p
),
516 /* Add this initialization to the list. Note that we want the
517 declaration *after* the initialization because we are going
518 to reverse all the initialization statements below. */
519 TREE_CHAIN (init_stmt
) = init_stmts
;
520 init_stmts
= init_stmt
;
524 /* The initialization statements have been built up in reverse
525 order. Straighten them out now. */
526 return nreverse (init_stmts
);
529 /* Declare a return variable to replace the RESULT_DECL for the
530 function we are calling. An appropriate DECL_STMT is returned.
531 The USE_STMT is filled in to contain a use of the declaration to
532 indicate the return value of the function. */
535 declare_return_variable (id
, use_stmt
)
536 struct inline_data
*id
;
539 tree fn
= VARRAY_TOP_TREE (id
->fns
);
540 tree result
= DECL_RESULT (fn
);
542 int aggregate_return_p
;
544 /* We don't need to do anything for functions that don't return
546 if (!result
|| VOID_TYPE_P (TREE_TYPE (result
)))
548 *use_stmt
= NULL_TREE
;
552 /* Figure out whether or not FN returns an aggregate. */
553 aggregate_return_p
= IS_AGGR_TYPE (TREE_TYPE (result
));
555 /* If FN returns an aggregate then the caller will always create the
556 temporary (using a TARGET_EXPR) and the call will be the
557 initializing expression for the TARGET_EXPR. If we were just to
558 create a new VAR_DECL here, then the result of this function
559 would be copied (bitwise) into the variable initialized by the
560 TARGET_EXPR. That's incorrect, so we must transform any
561 references to the RESULT into references to the target. */
562 if (aggregate_return_p
)
564 my_friendly_assert (VARRAY_ACTIVE_SIZE (id
->target_exprs
) != 0,
566 var
= TREE_OPERAND (VARRAY_TOP_TREE (id
->target_exprs
), 0);
568 (same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (var
),
572 /* Otherwise, make an appropriate copy. */
574 var
= copy_decl_for_inlining (result
, fn
, VARRAY_TREE (id
->fns
, 0));
576 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
577 way, when the RESULT_DECL is encountered, it will be
578 automatically replaced by the VAR_DECL. */
579 splay_tree_insert (id
->decl_map
,
580 (splay_tree_key
) result
,
581 (splay_tree_value
) var
);
583 /* Build the USE_STMT. */
584 *use_stmt
= build_stmt (EXPR_STMT
, var
);
586 /* Build the declaration statement if FN does not return an
588 if (!aggregate_return_p
)
589 return build_stmt (DECL_STMT
, var
);
590 /* If FN does return an aggregate, there's no need to declare the
591 return variable; we're using a variable in our caller's frame. */
596 /* Returns non-zero if FN is a function that can be inlined. */
599 inlinable_function_p (fn
, id
)
605 /* If we've already decided this function shouldn't be inlined,
606 there's no need to check again. */
607 if (DECL_UNINLINABLE (fn
))
610 /* Assume it is not inlinable. */
613 /* If we're not inlining things, then nothing is inlinable. */
614 if (!flag_inline_trees
)
616 /* If the function was not declared `inline', then we don't inline
618 else if (!DECL_INLINE (fn
))
620 /* We can't inline varargs functions. */
621 else if (varargs_function_p (fn
))
623 /* We can't inline functions that are too big. */
624 else if (DECL_NUM_STMTS (fn
) * INSNS_PER_STMT
> MAX_INLINE_INSNS
)
626 /* All is well. We can inline this function. Traditionally, GCC
627 has refused to inline functions using alloca, or functions whose
628 values are returned in a PARALLEL, and a few other such obscure
629 conditions. We are not equally constrained at the tree level. */
633 /* Squirrel away the result so that we don't have to check again. */
634 DECL_UNINLINABLE (fn
) = !inlinable
;
636 /* Even if this function is not itself too big to inline, it might
637 be that we've done so much inlining already that we don't want to
638 risk inlining any more. */
639 if ((DECL_NUM_STMTS (fn
) + id
->inlined_stmts
) * INSNS_PER_STMT
643 /* We can inline a template instantiation only if it's fully
646 && DECL_TEMPLATE_INFO (fn
)
647 && TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn
)))
649 fn
= instantiate_decl (fn
, /*defer_ok=*/0);
650 inlinable
= !TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn
));
653 /* If we don't have the function body available, we can't inline
655 if (!DECL_SAVED_TREE (fn
))
658 /* Don't do recursive inlining, either. We don't record this in
659 DECL_UNINLINABLE; we may be able to inline this function later. */
664 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (id
->fns
); ++i
)
665 if (VARRAY_TREE (id
->fns
, i
) == fn
)
668 if (inlinable
&& DECL_LANG_SPECIFIC (fn
) && DECL_INLINED_FNS (fn
))
671 tree inlined_fns
= DECL_INLINED_FNS (fn
);
673 for (j
= 0; j
< TREE_VEC_LENGTH (inlined_fns
); ++j
)
674 if (TREE_VEC_ELT (inlined_fns
, j
) == VARRAY_TREE (id
->fns
, 0))
679 /* Return the result. */
683 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
686 expand_call_inline (tp
, walk_subtrees
, data
)
702 /* See what we've got. */
703 id
= (inline_data
*) data
;
706 /* Recurse, but letting recursive invocations know that we are
707 inside the body of a TARGET_EXPR. */
708 if (TREE_CODE (*tp
) == TARGET_EXPR
)
710 int i
, len
= first_rtl_op (TARGET_EXPR
);
712 /* We're walking our own subtrees. */
715 /* Push *TP on the stack of pending TARGET_EXPRs. */
716 VARRAY_PUSH_TREE (id
->target_exprs
, *tp
);
718 /* Actually walk over them. This loop is the body of
719 walk_trees, omitting the case where the TARGET_EXPR
720 itself is handled. */
721 for (i
= 0; i
< len
; ++i
)
724 ++id
->in_target_cleanup_p
;
725 walk_tree (&TREE_OPERAND (*tp
, i
), expand_call_inline
, data
,
728 --id
->in_target_cleanup_p
;
731 /* We're done with this TARGET_EXPR now. */
732 VARRAY_POP (id
->target_exprs
);
738 /* Because types were not copied in copy_body, CALL_EXPRs beneath
739 them should not be expanded. This can happen if the type is a
740 dynamic array type, for example. */
743 /* From here on, we're only interested in CALL_EXPRs. */
744 if (TREE_CODE (t
) != CALL_EXPR
)
747 /* First, see if we can figure out what function is being called.
748 If we cannot, then there is no hope of inlining the function. */
749 fn
= get_callee_fndecl (t
);
753 /* Don't try to inline functions that are not well-suited to
755 if (!inlinable_function_p (fn
, id
))
758 /* Set the current filename and line number to the function we are
759 inlining so that when we create new _STMT nodes here they get
760 line numbers corresponding to the function we are calling. We
761 wrap the whole inlined body in an EXPR_WITH_FILE_AND_LINE as well
762 because individual statements don't record the filename. */
763 push_srcloc (fn
->decl
.filename
, fn
->decl
.linenum
);
765 /* Build a statement-expression containing code to initialize the
766 arguments, the actual inline expansion of the body, and a label
767 for the return statements within the function to jump to. The
768 type of the statement expression is the return type of the
770 expr
= build_min (STMT_EXPR
, TREE_TYPE (TREE_TYPE (fn
)), NULL_TREE
);
772 /* Local declarations will be replaced by their equivalents in this
775 id
->decl_map
= splay_tree_new (splay_tree_compare_pointers
,
778 /* Initialize the parameters. */
779 arg_inits
= initialize_inlined_parameters (id
, TREE_OPERAND (t
, 1), fn
);
780 /* Expand any inlined calls in the initializers. Do this before we
781 push FN on the stack of functions we are inlining; we want to
782 inline calls to FN that appear in the initializers for the
784 expand_calls_inline (&arg_inits
, id
);
785 /* And add them to the tree. */
786 STMT_EXPR_STMT (expr
) = chainon (STMT_EXPR_STMT (expr
), arg_inits
);
788 /* Record the function we are about to inline so that we can avoid
789 recursing into it. */
790 VARRAY_PUSH_TREE (id
->fns
, fn
);
792 /* Record the function we are about to inline if optimize_function
793 has not been called on it yet and we don't have it in the list. */
794 if (DECL_LANG_SPECIFIC (fn
) && !DECL_INLINED_FNS (fn
))
798 for (i
= VARRAY_ACTIVE_SIZE (id
->inlined_fns
) - 1; i
>= 0; i
--)
799 if (VARRAY_TREE (id
->inlined_fns
, i
) == fn
)
802 VARRAY_PUSH_TREE (id
->inlined_fns
, fn
);
805 /* Return statements in the function body will be replaced by jumps
807 id
->ret_label
= build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
808 DECL_CONTEXT (id
->ret_label
) = VARRAY_TREE (id
->fns
, 0);
810 /* Create a block to put the parameters in. We have to do this
811 after the parameters have been remapped because remapping
812 parameters is different from remapping ordinary variables. */
813 scope_stmt
= build_stmt (SCOPE_STMT
, DECL_INITIAL (fn
));
814 SCOPE_BEGIN_P (scope_stmt
) = 1;
815 SCOPE_NO_CLEANUPS_P (scope_stmt
) = 1;
816 remap_block (scope_stmt
, DECL_ARGUMENTS (fn
), id
);
817 TREE_CHAIN (scope_stmt
) = STMT_EXPR_STMT (expr
);
818 STMT_EXPR_STMT (expr
) = scope_stmt
;
820 /* Tell the debugging backends that this block represents the
821 outermost scope of the inlined function. */
822 if (SCOPE_STMT_BLOCK (scope_stmt
))
823 BLOCK_ABSTRACT_ORIGIN (SCOPE_STMT_BLOCK (scope_stmt
)) = DECL_ORIGIN (fn
);
825 /* Declare the return variable for the function. */
826 STMT_EXPR_STMT (expr
)
827 = chainon (STMT_EXPR_STMT (expr
),
828 declare_return_variable (id
, &use_stmt
));
830 /* After we've initialized the parameters, we insert the body of the
832 inlined_body
= &STMT_EXPR_STMT (expr
);
833 while (*inlined_body
)
834 inlined_body
= &TREE_CHAIN (*inlined_body
);
835 *inlined_body
= copy_body (id
);
837 /* Close the block for the parameters. */
838 scope_stmt
= build_stmt (SCOPE_STMT
, DECL_INITIAL (fn
));
839 SCOPE_NO_CLEANUPS_P (scope_stmt
) = 1;
840 my_friendly_assert (DECL_INITIAL (fn
)
841 && TREE_CODE (DECL_INITIAL (fn
)) == BLOCK
,
843 remap_block (scope_stmt
, NULL_TREE
, id
);
844 STMT_EXPR_STMT (expr
)
845 = chainon (STMT_EXPR_STMT (expr
), scope_stmt
);
847 /* After the body of the function comes the RET_LABEL. This must come
848 before we evaluate the returned value below, because that evalulation
849 may cause RTL to be generated. */
850 STMT_EXPR_STMT (expr
)
851 = chainon (STMT_EXPR_STMT (expr
),
852 build_stmt (LABEL_STMT
, id
->ret_label
));
854 /* Finally, mention the returned value so that the value of the
855 statement-expression is the returned value of the function. */
856 STMT_EXPR_STMT (expr
) = chainon (STMT_EXPR_STMT (expr
), use_stmt
);
859 splay_tree_delete (id
->decl_map
);
862 /* The new expression has side-effects if the old one did. */
863 TREE_SIDE_EFFECTS (expr
) = TREE_SIDE_EFFECTS (t
);
865 /* Replace the call by the inlined body. Wrap it in an
866 EXPR_WITH_FILE_LOCATION so that we'll get debugging line notes
867 pointing to the right place. */
868 chain
= TREE_CHAIN (*tp
);
869 *tp
= build_expr_wfl (expr
, DECL_SOURCE_FILE (fn
), DECL_SOURCE_LINE (fn
),
871 EXPR_WFL_EMIT_LINE_NOTE (*tp
) = 1;
872 TREE_CHAIN (*tp
) = chain
;
875 /* If the value of the new expression is ignored, that's OK. We
876 don't warn about this for CALL_EXPRs, so we shouldn't warn about
877 the equivalent inlined version either. */
880 /* Our function now has more statements than it did before. */
881 DECL_NUM_STMTS (VARRAY_TREE (id
->fns
, 0)) += DECL_NUM_STMTS (fn
);
882 id
->inlined_stmts
+= DECL_NUM_STMTS (fn
);
884 /* Recurse into the body of the just inlined function. */
885 expand_calls_inline (inlined_body
, id
);
886 VARRAY_POP (id
->fns
);
888 /* If we've returned to the top level, clear out the record of how
889 much inlining has been done. */
890 if (VARRAY_ACTIVE_SIZE (id
->fns
) == id
->first_inlined_fn
)
891 id
->inlined_stmts
= 0;
893 /* Don't walk into subtrees. We've already handled them above. */
896 /* Keep iterating. */
900 /* Walk over the entire tree *TP, replacing CALL_EXPRs with inline
901 expansions as appropriate. */
904 expand_calls_inline (tp
, id
)
908 /* Search through *TP, replacing all calls to inline functions by
909 appropriate equivalents. Use walk_tree in no-duplicates mode
910 to avoid exponential time complexity. (We can't just use
911 walk_tree_without_duplicates, because of the special TARGET_EXPR
912 handling in expand_calls. The hash table is set up in
913 optimize_function. */
914 walk_tree (tp
, expand_call_inline
, id
, id
->tree_pruner
);
917 /* Optimize the body of FN. */
920 optimize_function (fn
)
923 /* While in this function, we may choose to go off and compile
924 another function. For example, we might instantiate a function
925 in the hopes of inlining it. Normally, that wouldn't trigger any
926 actual RTL code-generation -- but it will if the template is
927 actually needed. (For example, if it's address is taken, or if
928 some other function already refers to the template.) If
929 code-generation occurs, then garbage collection will occur, so we
930 must protect ourselves, just as we do while building up the body
934 /* Expand calls to inline functions. */
935 if (flag_inline_trees
)
939 struct saved_scope
*s
;
942 memset (&id
, 0, sizeof (id
));
944 /* Don't allow recursion into FN. */
945 VARRAY_TREE_INIT (id
.fns
, 32, "fns");
946 VARRAY_PUSH_TREE (id
.fns
, fn
);
947 /* Or any functions that aren't finished yet. */
949 if (current_function_decl
)
951 VARRAY_PUSH_TREE (id
.fns
, current_function_decl
);
952 prev_fn
= current_function_decl
;
954 for (s
= scope_chain
; s
; s
= s
->prev
)
955 if (s
->function_decl
&& s
->function_decl
!= prev_fn
)
957 VARRAY_PUSH_TREE (id
.fns
, s
->function_decl
);
958 prev_fn
= s
->function_decl
;
961 /* Create the stack of TARGET_EXPRs. */
962 VARRAY_TREE_INIT (id
.target_exprs
, 32, "target_exprs");
964 /* Create the list of functions this call will inline. */
965 VARRAY_TREE_INIT (id
.inlined_fns
, 32, "inlined_fns");
967 /* Keep track of the low-water mark, i.e., the point where
968 the first real inlining is represented in ID.FNS. */
969 id
.first_inlined_fn
= VARRAY_ACTIVE_SIZE (id
.fns
);
971 /* Replace all calls to inline functions with the bodies of those
973 id
.tree_pruner
= htab_create (37, htab_hash_pointer
,
974 htab_eq_pointer
, NULL
);
975 expand_calls_inline (&DECL_SAVED_TREE (fn
), &id
);
978 htab_delete (id
.tree_pruner
);
979 VARRAY_FREE (id
.fns
);
980 VARRAY_FREE (id
.target_exprs
);
981 if (DECL_LANG_SPECIFIC (fn
))
983 tree ifn
= make_tree_vec (VARRAY_ACTIVE_SIZE (id
.inlined_fns
));
985 memcpy (&TREE_VEC_ELT (ifn
, 0), &VARRAY_TREE (id
.inlined_fns
, 0),
986 VARRAY_ACTIVE_SIZE (id
.inlined_fns
) * sizeof (tree
));
987 DECL_INLINED_FNS (fn
) = ifn
;
989 VARRAY_FREE (id
.inlined_fns
);
992 /* Undo the call to ggc_push_context above. */
996 /* Called from calls_setjmp_p via walk_tree. */
999 calls_setjmp_r (tp
, walk_subtrees
, data
)
1001 int *walk_subtrees ATTRIBUTE_UNUSED
;
1002 void *data ATTRIBUTE_UNUSED
;
1004 /* We're only interested in FUNCTION_DECLS. */
1005 if (TREE_CODE (*tp
) != FUNCTION_DECL
)
1008 return setjmp_call_p (*tp
) ? *tp
: NULL_TREE
;
1011 /* Returns non-zero if FN calls `setjmp' or some other function that
1012 can return more than once. This function is conservative; it may
1013 occasionally return a non-zero value even when FN does not actually
1020 return walk_tree_without_duplicates (&DECL_SAVED_TREE (fn
),
1025 /* CLONED_PARM is a copy of CLONE, generated for a cloned constructor
1026 or destructor. Update it to ensure that the source-position for
1027 the cloned parameter matches that for the original, and that the
1028 debugging generation code will be able to find the original PARM. */
1031 update_cloned_parm (parm
, cloned_parm
)
1035 DECL_ABSTRACT_ORIGIN (cloned_parm
) = parm
;
1037 /* We may have taken its address. */
1038 TREE_ADDRESSABLE (cloned_parm
) = TREE_ADDRESSABLE (parm
);
1040 /* The definition might have different constness. */
1041 TREE_READONLY (cloned_parm
) = TREE_READONLY (parm
);
1043 TREE_USED (cloned_parm
) = TREE_USED (parm
);
1045 /* The name may have changed from the declaration. */
1046 DECL_NAME (cloned_parm
) = DECL_NAME (parm
);
1047 DECL_SOURCE_FILE (cloned_parm
) = DECL_SOURCE_FILE (parm
);
1048 DECL_SOURCE_LINE (cloned_parm
) = DECL_SOURCE_LINE (parm
);
1051 /* FN is a function that has a complete body. Clone the body as
1052 necessary. Returns non-zero if there's no longer any need to
1053 process the main body. */
1056 maybe_clone_body (fn
)
1063 /* We only clone constructors and destructors. */
1064 if (!DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (fn
)
1065 && !DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P (fn
))
1068 /* Emit the DWARF1 abstract instance. */
1069 note_deferral_of_defined_inline_function (fn
);
1071 /* We know that any clones immediately follow FN in the TYPE_METHODS
1073 for (clone
= TREE_CHAIN (fn
);
1074 clone
&& DECL_CLONED_FUNCTION_P (clone
);
1075 clone
= TREE_CHAIN (clone
), first
= 0)
1081 /* Update CLONE's source position information to match FN's. */
1082 DECL_SOURCE_FILE (clone
) = DECL_SOURCE_FILE (fn
);
1083 DECL_SOURCE_LINE (clone
) = DECL_SOURCE_LINE (fn
);
1084 DECL_INLINE (clone
) = DECL_INLINE (fn
);
1085 DECL_DECLARED_INLINE_P (clone
) = DECL_DECLARED_INLINE_P (fn
);
1086 DECL_COMDAT (clone
) = DECL_COMDAT (fn
);
1087 DECL_WEAK (clone
) = DECL_WEAK (fn
);
1088 DECL_ONE_ONLY (clone
) = DECL_ONE_ONLY (fn
);
1089 DECL_SECTION_NAME (clone
) = DECL_SECTION_NAME (fn
);
1090 DECL_USE_TEMPLATE (clone
) = DECL_USE_TEMPLATE (fn
);
1091 DECL_EXTERNAL (clone
) = DECL_EXTERNAL (fn
);
1092 DECL_INTERFACE_KNOWN (clone
) = DECL_INTERFACE_KNOWN (fn
);
1093 DECL_NOT_REALLY_EXTERN (clone
) = DECL_NOT_REALLY_EXTERN (fn
);
1094 TREE_PUBLIC (clone
) = TREE_PUBLIC (fn
);
1096 /* Adjust the parameter names and locations. */
1097 parm
= DECL_ARGUMENTS (fn
);
1098 clone_parm
= DECL_ARGUMENTS (clone
);
1099 /* Update the `this' parameter, which is always first.
1100 Sometimes, we end update the `this' parameter twice because
1101 we process it again in the loop below. That is harmless. */
1102 update_cloned_parm (parm
, clone_parm
);
1103 if (DECL_HAS_IN_CHARGE_PARM_P (fn
))
1104 parm
= TREE_CHAIN (parm
);
1105 if (DECL_HAS_VTT_PARM_P (fn
))
1106 parm
= TREE_CHAIN (parm
);
1107 if (DECL_HAS_VTT_PARM_P (clone
))
1108 clone_parm
= TREE_CHAIN (clone_parm
);
1110 parm
= TREE_CHAIN (parm
), clone_parm
= TREE_CHAIN (clone_parm
))
1112 /* Update this paramter. */
1113 update_cloned_parm (parm
, clone_parm
);
1114 /* We should only give unused information for one clone. */
1116 TREE_USED (clone_parm
) = 1;
1119 /* Start processing the function. */
1120 push_to_top_level ();
1121 start_function (NULL_TREE
, clone
, NULL_TREE
, SF_PRE_PARSED
);
1123 /* Just clone the body, as if we were making an inline call.
1124 But, remap the parameters in the callee to the parameters of
1125 caller. If there's an in-charge parameter, map it to an
1126 appropriate constant. */
1127 memset (&id
, 0, sizeof (id
));
1128 VARRAY_TREE_INIT (id
.fns
, 2, "fns");
1129 VARRAY_PUSH_TREE (id
.fns
, clone
);
1130 VARRAY_PUSH_TREE (id
.fns
, fn
);
1132 /* Cloning is treated slightly differently from inlining. Set
1133 CLONING_P so that its clear which operation we're performing. */
1134 id
.cloning_p
= true;
1136 /* Remap the parameters. */
1137 id
.decl_map
= splay_tree_new (splay_tree_compare_pointers
,
1140 parm
= DECL_ARGUMENTS (fn
),
1141 clone_parm
= DECL_ARGUMENTS (clone
);
1144 parm
= TREE_CHAIN (parm
))
1146 /* Map the in-charge parameter to an appropriate constant. */
1147 if (DECL_HAS_IN_CHARGE_PARM_P (fn
) && parmno
== 1)
1150 in_charge
= in_charge_arg_for_name (DECL_NAME (clone
));
1151 splay_tree_insert (id
.decl_map
,
1152 (splay_tree_key
) parm
,
1153 (splay_tree_value
) in_charge
);
1155 else if (DECL_ARTIFICIAL (parm
)
1156 && DECL_NAME (parm
) == vtt_parm_identifier
)
1158 /* For a subobject constructor or destructor, the next
1159 argument is the VTT parameter. Remap the VTT_PARM
1160 from the CLONE to this parameter. */
1161 if (DECL_HAS_VTT_PARM_P (clone
))
1163 DECL_ABSTRACT_ORIGIN (clone_parm
) = parm
;
1164 splay_tree_insert (id
.decl_map
,
1165 (splay_tree_key
) parm
,
1166 (splay_tree_value
) clone_parm
);
1167 clone_parm
= TREE_CHAIN (clone_parm
);
1169 /* Otherwise, map the VTT parameter to `NULL'. */
1172 splay_tree_insert (id
.decl_map
,
1173 (splay_tree_key
) parm
,
1174 (splay_tree_value
) null_pointer_node
);
1177 /* Map other parameters to their equivalents in the cloned
1181 splay_tree_insert (id
.decl_map
,
1182 (splay_tree_key
) parm
,
1183 (splay_tree_value
) clone_parm
);
1184 clone_parm
= TREE_CHAIN (clone_parm
);
1188 /* Actually copy the body. */
1189 TREE_CHAIN (DECL_SAVED_TREE (clone
)) = copy_body (&id
);
1191 /* There are as many statements in the clone as in the
1193 DECL_NUM_STMTS (clone
) = DECL_NUM_STMTS (fn
);
1196 splay_tree_delete (id
.decl_map
);
1197 VARRAY_FREE (id
.fns
);
1199 /* Now, expand this function into RTL, if appropriate. */
1200 finish_function (0);
1201 BLOCK_ABSTRACT_ORIGIN (DECL_INITIAL (clone
)) = DECL_INITIAL (fn
);
1202 expand_body (clone
);
1203 pop_from_top_level ();
1206 /* We don't need to process the original function any further. */