1 /* Control and data flow functions for trees.
2 Copyright 2001, 2002, 2003 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
24 #include "coretypes.h"
28 #include "tree-inline.h"
34 #include "insn-config.h"
35 #include "integrate.h"
38 #include "splay-tree.h"
39 #include "langhooks.h"
44 /* This should be eventually be generalized to other languages, but
45 this would require a shared function-as-trees infrastructure. */
46 #ifndef INLINER_FOR_JAVA
48 #else /* INLINER_FOR_JAVA */
50 #include "java-tree.h"
51 #endif /* INLINER_FOR_JAVA */
53 /* 0 if we should not perform inlining.
54 1 if we should expand functions calls inline at the tree level.
55 2 if we should consider *all* functions to be inline
58 int flag_inline_trees
= 0;
62 o In order to make inlining-on-trees work, we pessimized
63 function-local static constants. In particular, they are now
64 always output, even when not addressed. Fix this by treating
65 function-local static constants just like global static
66 constants; the back-end already knows not to output them if they
69 o Provide heuristics to clamp inlining of recursive template
72 /* Data required for function inlining. */
74 typedef struct inline_data
76 /* A stack of the functions we are inlining. For example, if we are
77 compiling `f', which calls `g', which calls `h', and we are
78 inlining the body of `h', the stack will contain, `h', followed
79 by `g', followed by `f'. The first few elements of the stack may
80 contain other functions that we know we should not recurse into,
81 even though they are not directly being inlined. */
83 /* The index of the first element of FNS that really represents an
85 unsigned first_inlined_fn
;
86 /* The label to jump to when a return statement is encountered. If
87 this value is NULL, then return statements will simply be
88 remapped as return statements, rather than as jumps. */
90 /* The map from local declarations in the inlined function to
91 equivalents in the function into which it is being inlined. */
93 /* Nonzero if we are currently within the cleanup for a
95 int in_target_cleanup_p
;
96 /* A list of the functions current function has inlined. */
97 varray_type inlined_fns
;
98 /* The approximate number of instructions we have inlined in the
99 current call stack. */
101 /* We use the same mechanism to build clones that we do to perform
102 inlining. However, there are a few places where we need to
103 distinguish between those two situations. This flag is true if
104 we are cloning, rather than inlining. */
106 /* Hash table used to prevent walk_tree from visiting the same node
107 umpteen million times. */
109 /* Decl of function we are inlining into. */
116 static tree
declare_return_variable (inline_data
*, tree
, tree
*);
117 static tree
copy_body_r (tree
*, int *, void *);
118 static tree
copy_body (inline_data
*);
119 static tree
expand_call_inline (tree
*, int *, void *);
120 static void expand_calls_inline (tree
*, inline_data
*);
121 static bool inlinable_function_p (tree
);
122 static tree
remap_decl (tree
, inline_data
*);
123 static tree
remap_type (tree
, inline_data
*);
124 #ifndef INLINER_FOR_JAVA
125 static tree
initialize_inlined_parameters (inline_data
*, tree
, tree
);
126 static void remap_block (tree
, tree
, inline_data
*);
127 static void copy_scope_stmt (tree
*, int *, inline_data
*);
128 #else /* INLINER_FOR_JAVA */
129 static tree
initialize_inlined_parameters (inline_data
*, tree
, tree
, tree
);
130 static void remap_block (tree
*, tree
, inline_data
*);
131 static tree
add_stmt_to_compound (tree
, tree
, tree
);
132 #endif /* INLINER_FOR_JAVA */
134 /* Remap DECL during the copying of the BLOCK tree for the function. */
137 remap_decl (tree decl
, inline_data
*id
)
142 /* We only remap local variables in the current function. */
143 fn
= VARRAY_TOP_TREE (id
->fns
);
144 if (! (*lang_hooks
.tree_inlining
.auto_var_in_fn_p
) (decl
, fn
))
147 /* See if we have remapped this declaration. */
148 n
= splay_tree_lookup (id
->decl_map
, (splay_tree_key
) decl
);
150 /* If we didn't already have an equivalent for this declaration,
156 /* Make a copy of the variable or label. */
157 t
= copy_decl_for_inlining (decl
, fn
, VARRAY_TREE (id
->fns
, 0));
159 /* Remap types, if necessary. */
160 TREE_TYPE (t
) = remap_type (TREE_TYPE (t
), id
);
161 if (TREE_CODE (t
) == TYPE_DECL
)
162 DECL_ORIGINAL_TYPE (t
) = remap_type (DECL_ORIGINAL_TYPE (t
), id
);
163 else if (TREE_CODE (t
) == PARM_DECL
)
164 DECL_ARG_TYPE_AS_WRITTEN (t
)
165 = remap_type (DECL_ARG_TYPE_AS_WRITTEN (t
), id
);
167 /* Remap sizes as necessary. */
168 walk_tree (&DECL_SIZE (t
), copy_body_r
, id
, NULL
);
169 walk_tree (&DECL_SIZE_UNIT (t
), copy_body_r
, id
, NULL
);
171 #ifndef INLINER_FOR_JAVA
172 if (! DECL_NAME (t
) && TREE_TYPE (t
)
173 && (*lang_hooks
.tree_inlining
.anon_aggr_type_p
) (TREE_TYPE (t
)))
175 /* For a VAR_DECL of anonymous type, we must also copy the
176 member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS. */
180 for (src
= DECL_ANON_UNION_ELEMS (t
); src
;
181 src
= TREE_CHAIN (src
))
183 tree member
= remap_decl (TREE_VALUE (src
), id
);
185 if (TREE_PURPOSE (src
))
187 members
= tree_cons (NULL
, member
, members
);
189 DECL_ANON_UNION_ELEMS (t
) = nreverse (members
);
191 #endif /* not INLINER_FOR_JAVA */
193 /* Remember it, so that if we encounter this local entity
194 again we can reuse this copy. */
195 n
= splay_tree_insert (id
->decl_map
,
196 (splay_tree_key
) decl
,
197 (splay_tree_value
) t
);
200 return (tree
) n
->value
;
204 remap_type (tree type
, inline_data
*id
)
206 splay_tree_node node
;
212 /* See if we have remapped this type. */
213 node
= splay_tree_lookup (id
->decl_map
, (splay_tree_key
) type
);
215 return (tree
) node
->value
;
217 /* The type only needs remapping if it's variably modified. */
218 if (! variably_modified_type_p (type
))
220 splay_tree_insert (id
->decl_map
, (splay_tree_key
) type
,
221 (splay_tree_value
) type
);
225 /* We do need a copy. build and register it now. */
226 new = copy_node (type
);
227 splay_tree_insert (id
->decl_map
, (splay_tree_key
) type
,
228 (splay_tree_value
) new);
230 /* This is a new type, not a copy of an old type. Need to reassociate
231 variants. We can handle everything except the main variant lazily. */
232 t
= TYPE_MAIN_VARIANT (type
);
235 t
= remap_type (t
, id
);
236 TYPE_MAIN_VARIANT (new) = t
;
237 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t
);
238 TYPE_NEXT_VARIANT (t
) = new;
242 TYPE_MAIN_VARIANT (new) = new;
243 TYPE_NEXT_VARIANT (new) = NULL
;
246 /* Lazily create pointer and reference types. */
247 TYPE_POINTER_TO (new) = NULL
;
248 TYPE_REFERENCE_TO (new) = NULL
;
250 switch (TREE_CODE (new))
257 t
= TYPE_MIN_VALUE (new);
258 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
259 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r
, id
, NULL
);
260 t
= TYPE_MAX_VALUE (new);
261 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
262 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r
, id
, NULL
);
266 TREE_TYPE (new) = t
= remap_type (TREE_TYPE (new), id
);
267 if (TYPE_MODE (new) == ptr_mode
)
268 TYPE_POINTER_TO (t
) = new;
272 TREE_TYPE (new) = t
= remap_type (TREE_TYPE (new), id
);
273 if (TYPE_MODE (new) == ptr_mode
)
274 TYPE_REFERENCE_TO (t
) = new;
279 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id
);
280 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r
, id
, NULL
);
284 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id
);
285 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id
);
290 case QUAL_UNION_TYPE
:
291 walk_tree (&TYPE_FIELDS (new), copy_body_r
, id
, NULL
);
298 /* Shouldn't have been thought variable sized. */
302 walk_tree (&TYPE_SIZE (new), copy_body_r
, id
, NULL
);
303 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r
, id
, NULL
);
308 #ifndef INLINER_FOR_JAVA
309 /* Copy the SCOPE_STMT_BLOCK associated with SCOPE_STMT to contain
310 remapped versions of the variables therein. And hook the new block
311 into the block-tree. If non-NULL, the DECLS are declarations to
312 add to use instead of the BLOCK_VARS in the old block. */
313 #else /* INLINER_FOR_JAVA */
314 /* Copy the BLOCK to contain remapped versions of the variables
315 therein. And hook the new block into the block-tree. */
316 #endif /* INLINER_FOR_JAVA */
319 #ifndef INLINER_FOR_JAVA
320 remap_block (tree scope_stmt
, tree decls
, inline_data
*id
)
321 #else /* INLINER_FOR_JAVA */
322 remap_block (tree
*block
, tree decls
, inline_data
*id
)
323 #endif /* INLINER_FOR_JAVA */
325 #ifndef INLINER_FOR_JAVA
326 /* We cannot do this in the cleanup for a TARGET_EXPR since we do
327 not know whether or not expand_expr will actually write out the
328 code we put there. If it does not, then we'll have more BLOCKs
329 than block-notes, and things will go awry. At some point, we
330 should make the back-end handle BLOCK notes in a tidier way,
331 without requiring a strict correspondence to the block-tree; then
332 this check can go. */
333 if (id
->in_target_cleanup_p
)
335 SCOPE_STMT_BLOCK (scope_stmt
) = NULL_TREE
;
339 /* If this is the beginning of a scope, remap the associated BLOCK. */
340 if (SCOPE_BEGIN_P (scope_stmt
) && SCOPE_STMT_BLOCK (scope_stmt
))
347 /* Make the new block. */
348 old_block
= SCOPE_STMT_BLOCK (scope_stmt
);
349 new_block
= make_node (BLOCK
);
350 TREE_USED (new_block
) = TREE_USED (old_block
);
351 BLOCK_ABSTRACT_ORIGIN (new_block
) = old_block
;
352 SCOPE_STMT_BLOCK (scope_stmt
) = new_block
;
354 /* Remap its variables. */
355 for (old_var
= decls
? decls
: BLOCK_VARS (old_block
);
357 old_var
= TREE_CHAIN (old_var
))
361 /* Remap the variable. */
362 new_var
= remap_decl (old_var
, id
);
363 /* If we didn't remap this variable, so we can't mess with
364 its TREE_CHAIN. If we remapped this variable to
365 something other than a declaration (say, if we mapped it
366 to a constant), then we must similarly omit any mention
368 if (!new_var
|| !DECL_P (new_var
))
372 TREE_CHAIN (new_var
) = BLOCK_VARS (new_block
);
373 BLOCK_VARS (new_block
) = new_var
;
376 /* We put the BLOCK_VARS in reverse order; fix that now. */
377 BLOCK_VARS (new_block
) = nreverse (BLOCK_VARS (new_block
));
378 fn
= VARRAY_TREE (id
->fns
, 0);
380 /* We're building a clone; DECL_INITIAL is still
381 error_mark_node, and current_binding_level is the parm
383 (*lang_hooks
.decls
.insert_block
) (new_block
);
386 /* Attach this new block after the DECL_INITIAL block for the
387 function into which this block is being inlined. In
388 rest_of_compilation we will straighten out the BLOCK tree. */
390 if (DECL_INITIAL (fn
))
391 first_block
= &BLOCK_CHAIN (DECL_INITIAL (fn
));
393 first_block
= &DECL_INITIAL (fn
);
394 BLOCK_CHAIN (new_block
) = *first_block
;
395 *first_block
= new_block
;
397 /* Remember the remapped block. */
398 splay_tree_insert (id
->decl_map
,
399 (splay_tree_key
) old_block
,
400 (splay_tree_value
) new_block
);
402 /* If this is the end of a scope, set the SCOPE_STMT_BLOCK to be the
404 else if (SCOPE_END_P (scope_stmt
) && SCOPE_STMT_BLOCK (scope_stmt
))
408 /* Find this block in the table of remapped things. */
409 n
= splay_tree_lookup (id
->decl_map
,
410 (splay_tree_key
) SCOPE_STMT_BLOCK (scope_stmt
));
413 SCOPE_STMT_BLOCK (scope_stmt
) = (tree
) n
->value
;
415 #else /* INLINER_FOR_JAVA */
421 /* Make the new block. */
423 new_block
= make_node (BLOCK
);
424 TREE_USED (new_block
) = TREE_USED (old_block
);
425 BLOCK_ABSTRACT_ORIGIN (new_block
) = old_block
;
426 BLOCK_SUBBLOCKS (new_block
) = BLOCK_SUBBLOCKS (old_block
);
427 TREE_SIDE_EFFECTS (new_block
) = TREE_SIDE_EFFECTS (old_block
);
428 TREE_TYPE (new_block
) = TREE_TYPE (old_block
);
431 /* Remap its variables. */
432 for (old_var
= decls
? decls
: BLOCK_VARS (old_block
);
434 old_var
= TREE_CHAIN (old_var
))
438 /* All local class initialization flags go in the outermost
440 if (LOCAL_CLASS_INITIALIZATION_FLAG_P (old_var
))
442 /* We may already have one. */
443 if (! splay_tree_lookup (id
->decl_map
, (splay_tree_key
) old_var
))
445 tree outermost_block
;
446 new_var
= remap_decl (old_var
, id
);
447 DECL_ABSTRACT_ORIGIN (new_var
) = NULL
;
448 outermost_block
= DECL_SAVED_TREE (current_function_decl
);
449 TREE_CHAIN (new_var
) = BLOCK_VARS (outermost_block
);
450 BLOCK_VARS (outermost_block
) = new_var
;
455 /* Remap the variable. */
456 new_var
= remap_decl (old_var
, id
);
457 /* If we didn't remap this variable, so we can't mess with
458 its TREE_CHAIN. If we remapped this variable to
459 something other than a declaration (say, if we mapped it
460 to a constant), then we must similarly omit any mention
462 if (!new_var
|| !DECL_P (new_var
))
466 TREE_CHAIN (new_var
) = BLOCK_VARS (new_block
);
467 BLOCK_VARS (new_block
) = new_var
;
470 /* We put the BLOCK_VARS in reverse order; fix that now. */
471 BLOCK_VARS (new_block
) = nreverse (BLOCK_VARS (new_block
));
472 fn
= VARRAY_TREE (id
->fns
, 0);
473 /* Remember the remapped block. */
474 splay_tree_insert (id
->decl_map
,
475 (splay_tree_key
) old_block
,
476 (splay_tree_value
) new_block
);
477 #endif /* INLINER_FOR_JAVA */
480 #ifndef INLINER_FOR_JAVA
481 /* Copy the SCOPE_STMT pointed to by TP. */
484 copy_scope_stmt (tree
*tp
, int *walk_subtrees
, inline_data
*id
)
488 /* Remember whether or not this statement was nullified. When
489 making a copy, copy_tree_r always sets SCOPE_NULLIFIED_P (and
490 doesn't copy the SCOPE_STMT_BLOCK) to free callers from having to
491 deal with copying BLOCKs if they do not wish to do so. */
492 block
= SCOPE_STMT_BLOCK (*tp
);
493 /* Copy (and replace) the statement. */
494 copy_tree_r (tp
, walk_subtrees
, NULL
);
495 /* Restore the SCOPE_STMT_BLOCK. */
496 SCOPE_STMT_BLOCK (*tp
) = block
;
498 /* Remap the associated block. */
499 remap_block (*tp
, NULL_TREE
, id
);
501 #endif /* not INLINER_FOR_JAVA */
503 /* Called from copy_body via walk_tree. DATA is really an
506 copy_body_r (tree
*tp
, int *walk_subtrees
, void *data
)
512 id
= (inline_data
*) data
;
513 fn
= VARRAY_TOP_TREE (id
->fns
);
516 /* All automatic variables should have a DECL_CONTEXT indicating
517 what function they come from. */
518 if ((TREE_CODE (*tp
) == VAR_DECL
|| TREE_CODE (*tp
) == LABEL_DECL
)
519 && DECL_NAMESPACE_SCOPE_P (*tp
))
520 if (! DECL_EXTERNAL (*tp
) && ! TREE_STATIC (*tp
))
524 #ifdef INLINER_FOR_JAVA
525 if (TREE_CODE (*tp
) == BLOCK
)
526 remap_block (tp
, NULL_TREE
, id
);
529 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
530 GOTO_STMT with the RET_LABEL as its target. */
531 #ifndef INLINER_FOR_JAVA
532 if (TREE_CODE (*tp
) == RETURN_STMT
&& id
->ret_label
)
533 #else /* INLINER_FOR_JAVA */
534 if (TREE_CODE (*tp
) == RETURN_EXPR
&& id
->ret_label
)
535 #endif /* INLINER_FOR_JAVA */
537 tree return_stmt
= *tp
;
540 /* Build the GOTO_STMT. */
541 #ifndef INLINER_FOR_JAVA
542 goto_stmt
= build_stmt (GOTO_STMT
, id
->ret_label
);
543 TREE_CHAIN (goto_stmt
) = TREE_CHAIN (return_stmt
);
544 GOTO_FAKE_P (goto_stmt
) = 1;
545 #else /* INLINER_FOR_JAVA */
546 tree assignment
= TREE_OPERAND (return_stmt
, 0);
547 goto_stmt
= build1 (GOTO_EXPR
, void_type_node
, id
->ret_label
);
548 TREE_SIDE_EFFECTS (goto_stmt
) = 1;
549 #endif /* INLINER_FOR_JAVA */
551 /* If we're returning something, just turn that into an
552 assignment into the equivalent of the original
554 #ifndef INLINER_FOR_JAVA
555 if (RETURN_STMT_EXPR (return_stmt
))
557 *tp
= build_stmt (EXPR_STMT
,
558 RETURN_STMT_EXPR (return_stmt
));
559 STMT_IS_FULL_EXPR_P (*tp
) = 1;
560 /* And then jump to the end of the function. */
561 TREE_CHAIN (*tp
) = goto_stmt
;
563 #else /* INLINER_FOR_JAVA */
566 copy_body_r (&assignment
, walk_subtrees
, data
);
567 *tp
= build (COMPOUND_EXPR
, void_type_node
, assignment
, goto_stmt
);
568 TREE_SIDE_EFFECTS (*tp
) = 1;
570 #endif /* INLINER_FOR_JAVA */
571 /* If we're not returning anything just do the jump. */
575 /* Local variables and labels need to be replaced by equivalent
576 variables. We don't want to copy static variables; there's only
577 one of those, no matter how many times we inline the containing
579 else if ((*lang_hooks
.tree_inlining
.auto_var_in_fn_p
) (*tp
, fn
))
583 /* Remap the declaration. */
584 new_decl
= remap_decl (*tp
, id
);
587 /* Replace this variable with the copy. */
588 STRIP_TYPE_NOPS (new_decl
);
592 else if (nonstatic_local_decl_p (*tp
)
593 && DECL_CONTEXT (*tp
) != VARRAY_TREE (id
->fns
, 0))
596 else if (TREE_CODE (*tp
) == SAVE_EXPR
)
597 remap_save_expr (tp
, id
->decl_map
, VARRAY_TREE (id
->fns
, 0),
599 else if (TREE_CODE (*tp
) == UNSAVE_EXPR
)
600 /* UNSAVE_EXPRs should not be generated until expansion time. */
602 #ifndef INLINER_FOR_JAVA
603 /* For a SCOPE_STMT, we must copy the associated block so that we
604 can write out debugging information for the inlined variables. */
605 else if (TREE_CODE (*tp
) == SCOPE_STMT
&& !id
->in_target_cleanup_p
)
606 copy_scope_stmt (tp
, walk_subtrees
, id
);
607 #else /* INLINER_FOR_JAVA */
608 else if (TREE_CODE (*tp
) == LABELED_BLOCK_EXPR
)
610 /* We need a new copy of this labeled block; the EXIT_BLOCK_EXPR
611 will refer to it, so save a copy ready for remapping. We
612 save it in the decl_map, although it isn't a decl. */
613 tree new_block
= copy_node (*tp
);
614 splay_tree_insert (id
->decl_map
,
615 (splay_tree_key
) *tp
,
616 (splay_tree_value
) new_block
);
619 else if (TREE_CODE (*tp
) == EXIT_BLOCK_EXPR
)
622 = splay_tree_lookup (id
->decl_map
,
623 (splay_tree_key
) TREE_OPERAND (*tp
, 0));
624 /* We _must_ have seen the enclosing LABELED_BLOCK_EXPR. */
627 *tp
= copy_node (*tp
);
628 TREE_OPERAND (*tp
, 0) = (tree
) n
->value
;
630 #endif /* INLINER_FOR_JAVA */
631 /* Types may need remapping as well. */
632 else if (TYPE_P (*tp
))
633 *tp
= remap_type (*tp
, id
);
635 /* Otherwise, just copy the node. Note that copy_tree_r already
636 knows not to copy VAR_DECLs, etc., so this is safe. */
639 if (TREE_CODE (*tp
) == MODIFY_EXPR
640 && TREE_OPERAND (*tp
, 0) == TREE_OPERAND (*tp
, 1)
641 && ((*lang_hooks
.tree_inlining
.auto_var_in_fn_p
)
642 (TREE_OPERAND (*tp
, 0), fn
)))
644 /* Some assignments VAR = VAR; don't generate any rtl code
645 and thus don't count as variable modification. Avoid
646 keeping bogosities like 0 = 0. */
647 tree decl
= TREE_OPERAND (*tp
, 0), value
;
650 n
= splay_tree_lookup (id
->decl_map
, (splay_tree_key
) decl
);
653 value
= (tree
) n
->value
;
654 STRIP_TYPE_NOPS (value
);
655 if (TREE_CONSTANT (value
) || TREE_READONLY_DECL_P (value
))
658 return copy_body_r (tp
, walk_subtrees
, data
);
662 else if (TREE_CODE (*tp
) == ADDR_EXPR
663 && ((*lang_hooks
.tree_inlining
.auto_var_in_fn_p
)
664 (TREE_OPERAND (*tp
, 0), fn
)))
666 /* Get rid of &* from inline substitutions. It can occur when
667 someone takes the address of a parm or return slot passed by
668 invisible reference. */
669 tree decl
= TREE_OPERAND (*tp
, 0), value
;
672 n
= splay_tree_lookup (id
->decl_map
, (splay_tree_key
) decl
);
675 value
= (tree
) n
->value
;
676 if (TREE_CODE (value
) == INDIRECT_REF
)
678 *tp
= convert (TREE_TYPE (*tp
), TREE_OPERAND (value
, 0));
679 return copy_body_r (tp
, walk_subtrees
, data
);
684 copy_tree_r (tp
, walk_subtrees
, NULL
);
686 TREE_TYPE (*tp
) = remap_type (TREE_TYPE (*tp
), id
);
688 /* The copied TARGET_EXPR has never been expanded, even if the
689 original node was expanded already. */
690 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
692 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
693 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
697 /* Keep iterating. */
701 /* Make a copy of the body of FN so that it can be inserted inline in
705 copy_body (inline_data
*id
)
709 body
= DECL_SAVED_TREE (VARRAY_TOP_TREE (id
->fns
));
710 walk_tree (&body
, copy_body_r
, id
, NULL
);
715 /* Generate code to initialize the parameters of the function at the
716 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
719 #ifndef INLINER_FOR_JAVA
720 initialize_inlined_parameters (inline_data
*id
, tree args
, tree fn
)
721 #else /* INLINER_FOR_JAVA */
722 initialize_inlined_parameters (inline_data
*id
, tree args
, tree fn
, tree block
)
723 #endif /* INLINER_FOR_JAVA */
729 #ifdef INLINER_FOR_JAVA
730 tree vars
= NULL_TREE
;
731 #endif /* INLINER_FOR_JAVA */
734 /* Figure out what the parameters are. */
738 /* Start with no initializations whatsoever. */
739 init_stmts
= NULL_TREE
;
741 /* Loop through the parameter declarations, replacing each with an
742 equivalent VAR_DECL, appropriately initialized. */
743 for (p
= parms
, a
= args
; p
;
744 a
= a
? TREE_CHAIN (a
) : a
, p
= TREE_CHAIN (p
))
746 #ifndef INLINER_FOR_JAVA
749 #endif /* not INLINER_FOR_JAVA */
756 /* Find the initializer. */
757 value
= (*lang_hooks
.tree_inlining
.convert_parm_for_inlining
)
758 (p
, a
? TREE_VALUE (a
) : NULL_TREE
, fn
, argnum
);
760 /* If the parameter is never assigned to, we may not need to
761 create a new variable here at all. Instead, we may be able
762 to just use the argument value. */
763 if (TREE_READONLY (p
)
764 && !TREE_ADDRESSABLE (p
)
765 && value
&& !TREE_SIDE_EFFECTS (value
))
767 /* Simplify the value, if possible. */
768 value
= fold (DECL_P (value
) ? decl_constant_value (value
) : value
);
770 /* We can't risk substituting complex expressions. They
771 might contain variables that will be assigned to later.
772 Theoretically, we could check the expression to see if
773 all of the variables that determine its value are
774 read-only, but we don't bother. */
775 if (TREE_CONSTANT (value
) || TREE_READONLY_DECL_P (value
))
777 /* If this is a declaration, wrap it a NOP_EXPR so that
778 we don't try to put the VALUE on the list of
781 value
= build1 (NOP_EXPR
, TREE_TYPE (value
), value
);
783 /* If this is a constant, make sure it has the right type. */
784 else if (TREE_TYPE (value
) != TREE_TYPE (p
))
785 value
= fold (build1 (NOP_EXPR
, TREE_TYPE (p
), value
));
787 splay_tree_insert (id
->decl_map
,
789 (splay_tree_value
) value
);
794 /* Make an equivalent VAR_DECL. */
795 var
= copy_decl_for_inlining (p
, fn
, VARRAY_TREE (id
->fns
, 0));
797 /* See if the frontend wants to pass this by invisible reference. If
798 so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
799 replace uses of the PARM_DECL with dereferences. */
800 if (TREE_TYPE (var
) != TREE_TYPE (p
)
801 && POINTER_TYPE_P (TREE_TYPE (var
))
802 && TREE_TYPE (TREE_TYPE (var
)) == TREE_TYPE (p
))
803 var_sub
= build1 (INDIRECT_REF
, TREE_TYPE (p
), var
);
807 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
808 that way, when the PARM_DECL is encountered, it will be
809 automatically replaced by the VAR_DECL. */
810 splay_tree_insert (id
->decl_map
,
812 (splay_tree_value
) var_sub
);
814 /* Declare this new variable. */
815 #ifndef INLINER_FOR_JAVA
816 init_stmt
= build_stmt (DECL_STMT
, var
);
817 TREE_CHAIN (init_stmt
) = init_stmts
;
818 init_stmts
= init_stmt
;
819 #else /* INLINER_FOR_JAVA */
820 TREE_CHAIN (var
) = vars
;
822 #endif /* INLINER_FOR_JAVA */
824 /* Initialize this VAR_DECL from the equivalent argument. If
825 the argument is an object, created via a constructor or copy,
826 this will not result in an extra copy: the TARGET_EXPR
827 representing the argument will be bound to VAR, and the
828 object will be constructed in VAR. */
829 if (! TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p
)))
830 #ifndef INLINER_FOR_JAVA
831 DECL_INITIAL (var
) = value
;
834 /* Even if P was TREE_READONLY, the new VAR should not be.
835 In the original code, we would have constructed a
836 temporary, and then the function body would have never
837 changed the value of P. However, now, we will be
838 constructing VAR directly. The constructor body may
839 change its value multiple times as it is being
840 constructed. Therefore, it must not be TREE_READONLY;
841 the back-end assumes that TREE_READONLY variable is
842 assigned to only once. */
843 TREE_READONLY (var
) = 0;
845 /* Build a run-time initialization. */
846 init_stmt
= build_stmt (EXPR_STMT
,
847 build (INIT_EXPR
, TREE_TYPE (p
),
849 /* Add this initialization to the list. Note that we want the
850 declaration *after* the initialization because we are going
851 to reverse all the initialization statements below. */
852 TREE_CHAIN (init_stmt
) = init_stmts
;
853 init_stmts
= init_stmt
;
856 /* See if we need to clean up the declaration. */
857 cleanup
= (*lang_hooks
.maybe_build_cleanup
) (var
);
861 /* Build the cleanup statement. */
862 cleanup_stmt
= build_stmt (CLEANUP_STMT
, var
, cleanup
);
863 /* Add it to the *front* of the list; the list will be
865 TREE_CHAIN (cleanup_stmt
) = init_stmts
;
866 init_stmts
= cleanup_stmt
;
868 #else /* INLINER_FOR_JAVA */
870 tree assignment
= build (MODIFY_EXPR
, TREE_TYPE (p
), var
, value
);
871 init_stmts
= add_stmt_to_compound (init_stmts
, TREE_TYPE (p
),
876 /* Java objects don't ever need constructing when being
877 passed as arguments because only call by reference is
881 #endif /* INLINER_FOR_JAVA */
884 #ifndef INLINER_FOR_JAVA
885 /* Evaluate trailing arguments. */
886 for (; a
; a
= TREE_CHAIN (a
))
889 tree value
= TREE_VALUE (a
);
891 if (! value
|| ! TREE_SIDE_EFFECTS (value
))
894 init_stmt
= build_stmt (EXPR_STMT
, value
);
895 TREE_CHAIN (init_stmt
) = init_stmts
;
896 init_stmts
= init_stmt
;
899 /* The initialization statements have been built up in reverse
900 order. Straighten them out now. */
901 return nreverse (init_stmts
);
902 #else /* INLINER_FOR_JAVA */
903 BLOCK_VARS (block
) = nreverse (vars
);
905 #endif /* INLINER_FOR_JAVA */
908 /* Declare a return variable to replace the RESULT_DECL for the
909 function we are calling. An appropriate DECL_STMT is returned.
910 The USE_STMT is filled in to contain a use of the declaration to
911 indicate the return value of the function. */
913 #ifndef INLINER_FOR_JAVA
915 declare_return_variable (struct inline_data
*id
, tree return_slot_addr
,
917 #else /* INLINER_FOR_JAVA */
919 declare_return_variable (struct inline_data
*id
, tree return_slot_addr
,
921 #endif /* INLINER_FOR_JAVA */
923 tree fn
= VARRAY_TOP_TREE (id
->fns
);
924 tree result
= DECL_RESULT (fn
);
925 #ifndef INLINER_FOR_JAVA
927 #endif /* not INLINER_FOR_JAVA */
928 int need_return_decl
= 1;
930 /* We don't need to do anything for functions that don't return
932 if (!result
|| VOID_TYPE_P (TREE_TYPE (result
)))
934 #ifndef INLINER_FOR_JAVA
935 *use_stmt
= NULL_TREE
;
936 #else /* INLINER_FOR_JAVA */
938 #endif /* INLINER_FOR_JAVA */
942 #ifndef INLINER_FOR_JAVA
943 var
= ((*lang_hooks
.tree_inlining
.copy_res_decl_for_inlining
)
944 (result
, fn
, VARRAY_TREE (id
->fns
, 0), id
->decl_map
,
945 &need_return_decl
, return_slot_addr
));
947 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
948 way, when the RESULT_DECL is encountered, it will be
949 automatically replaced by the VAR_DECL. */
950 splay_tree_insert (id
->decl_map
,
951 (splay_tree_key
) result
,
952 (splay_tree_value
) var
);
954 /* Build the USE_STMT. If the return type of the function was
955 promoted, convert it back to the expected type. */
956 if (TREE_TYPE (var
) == TREE_TYPE (TREE_TYPE (fn
)))
957 *use_stmt
= build_stmt (EXPR_STMT
, var
);
959 *use_stmt
= build_stmt (EXPR_STMT
,
960 build1 (NOP_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
962 TREE_ADDRESSABLE (*use_stmt
) = 1;
964 /* Build the declaration statement if FN does not return an
966 if (need_return_decl
)
967 return build_stmt (DECL_STMT
, var
);
968 #else /* INLINER_FOR_JAVA */
969 *var
= ((*lang_hooks
.tree_inlining
.copy_res_decl_for_inlining
)
970 (result
, fn
, VARRAY_TREE (id
->fns
, 0), id
->decl_map
,
971 &need_return_decl
, return_slot_addr
));
973 splay_tree_insert (id
->decl_map
,
974 (splay_tree_key
) result
,
975 (splay_tree_value
) *var
);
976 DECL_IGNORED_P (*var
) = 1;
977 if (need_return_decl
)
979 #endif /* INLINER_FOR_JAVA */
980 /* If FN does return an aggregate, there's no need to declare the
981 return variable; we're using a variable in our caller's frame. */
986 /* Returns nonzero if a function can be inlined as a tree. */
989 tree_inlinable_function_p (tree fn
)
991 return inlinable_function_p (fn
);
994 static const char *inline_forbidden_reason
;
997 inline_forbidden_p_1 (tree
*nodep
, int *walk_subtrees ATTRIBUTE_UNUSED
,
1001 tree fn
= (tree
) fnp
;
1004 switch (TREE_CODE (node
))
1007 /* Refuse to inline alloca call unless user explicitly forced so as
1008 this may change program's memory overhead drastically when the
1009 function using alloca is called in loop. In GCC present in
1010 SPEC2000 inlining into schedule_block cause it to require 2GB of
1011 RAM instead of 256MB. */
1012 if (alloca_call_p (node
)
1013 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
)))
1015 inline_forbidden_reason
1016 = N_("%Jfunction '%F' can never be inlined because it uses "
1017 "alloca (override using the always_inline attribute)");
1020 t
= get_callee_fndecl (node
);
1025 /* We cannot inline functions that call setjmp. */
1026 if (setjmp_call_p (t
))
1028 inline_forbidden_reason
1029 = N_("%Jfunction '%F' can never be inlined because it uses setjmp");
1033 if (DECL_BUILT_IN (t
))
1034 switch (DECL_FUNCTION_CODE (t
))
1036 /* We cannot inline functions that take a variable number of
1038 case BUILT_IN_VA_START
:
1039 case BUILT_IN_STDARG_START
:
1040 case BUILT_IN_NEXT_ARG
:
1041 case BUILT_IN_VA_END
:
1043 inline_forbidden_reason
1044 = N_("%Jfunction '%F' can never be inlined because it "
1045 "uses variable argument lists");
1048 case BUILT_IN_LONGJMP
:
1050 /* We can't inline functions that call __builtin_longjmp at
1051 all. The non-local goto machinery really requires the
1052 destination be in a different function. If we allow the
1053 function calling __builtin_longjmp to be inlined into the
1054 function calling __builtin_setjmp, Things will Go Awry. */
1055 /* ??? Need front end help to identify "regular" non-local
1057 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
)
1059 inline_forbidden_reason
1060 = N_("%Jfunction '%F' can never be inlined because "
1061 "it uses setjmp-longjmp exception handling");
1071 #ifndef INLINER_FOR_JAVA
1073 /* We cannot inline functions that contain other functions. */
1074 if (TREE_CODE (TREE_OPERAND (node
, 0)) == FUNCTION_DECL
1075 && DECL_INITIAL (TREE_OPERAND (node
, 0)))
1077 inline_forbidden_reason
1078 = N_("%Jfunction '%F' can never be inlined "
1079 "because it contains a nested function");
1086 t
= TREE_OPERAND (node
, 0);
1088 /* We will not inline a function which uses computed goto. The
1089 addresses of its local labels, which may be tucked into
1090 global storage, are of course not constant across
1091 instantiations, which causes unexpected behavior. */
1092 if (TREE_CODE (t
) != LABEL_DECL
)
1094 inline_forbidden_reason
1095 = N_("%Jfunction '%F' can never be inlined "
1096 "because it contains a computed goto");
1100 /* We cannot inline a nested function that jumps to a nonlocal
1102 if (TREE_CODE (t
) == LABEL_DECL
&& DECL_CONTEXT (t
) != fn
)
1104 inline_forbidden_reason
1105 = N_("%Jfunction '%F' can never be inlined "
1106 "because it contains a nonlocal goto");
1114 /* We cannot inline a function of the form
1116 void F (int i) { struct S { int ar[i]; } s; }
1118 Attempting to do so produces a catch-22.
1119 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1120 UNION_TYPE nodes, then it goes into infinite recursion on a
1121 structure containing a pointer to its own type. If it doesn't,
1122 then the type node for S doesn't get adjusted properly when
1123 F is inlined, and we abort in find_function_data. */
1124 for (t
= TYPE_FIELDS (node
); t
; t
= TREE_CHAIN (t
))
1125 if (variably_modified_type_p (TREE_TYPE (t
)))
1127 inline_forbidden_reason
1128 = N_("%Jfunction '%F' can never be inlined "
1129 "because it uses variable sized variables");
1140 /* Return subexpression representing possible alloca call, if any. */
1142 inline_forbidden_p (tree fndecl
)
1144 location_t saved_loc
= input_location
;
1145 tree ret
= walk_tree_without_duplicates
1146 (&DECL_SAVED_TREE (fndecl
), inline_forbidden_p_1
, fndecl
);
1147 input_location
= saved_loc
;
1151 /* Returns nonzero if FN is a function that does not have any
1152 fundamental inline blocking properties. */
1155 inlinable_function_p (tree fn
)
1157 bool inlinable
= true;
1159 /* If we've already decided this function shouldn't be inlined,
1160 there's no need to check again. */
1161 if (DECL_UNINLINABLE (fn
))
1164 /* See if there is any language-specific reason it cannot be
1165 inlined. (It is important that this hook be called early because
1166 in C++ it may result in template instantiation.)
1167 If the function is not inlinable for language-specific reasons,
1168 it is left up to the langhook to explain why. */
1169 inlinable
= !(*lang_hooks
.tree_inlining
.cannot_inline_tree_fn
) (&fn
);
1171 /* If we don't have the function body available, we can't inline it.
1172 However, this should not be recorded since we also get here for
1173 forward declared inline functions. Therefore, return at once. */
1174 if (!DECL_SAVED_TREE (fn
))
1177 /* If we're not inlining at all, then we cannot inline this function. */
1178 else if (!flag_inline_trees
)
1181 /* Only try to inline functions if DECL_INLINE is set. This should be
1182 true for all functions declared `inline', and for all other functions
1183 as well with -finline-functions.
1185 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1186 it's the front-end that must set DECL_INLINE in this case, because
1187 dwarf2out loses if a function that does not have DECL_INLINE set is
1188 inlined anyway. That is why we have both DECL_INLINE and
1189 DECL_DECLARED_INLINE_P. */
1190 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1191 here should be redundant. */
1192 else if (!DECL_INLINE (fn
) && !flag_unit_at_a_time
)
1195 #ifdef INLINER_FOR_JAVA
1196 /* Synchronized methods can't be inlined. This is a bug. */
1197 else if (METHOD_SYNCHRONIZED (fn
))
1199 #endif /* INLINER_FOR_JAVA */
1201 else if (inline_forbidden_p (fn
))
1203 /* See if we should warn about uninlinable functions. Previously,
1204 some of these warnings would be issued while trying to expand
1205 the function inline, but that would cause multiple warnings
1206 about functions that would for example call alloca. But since
1207 this a property of the function, just one warning is enough.
1208 As a bonus we can now give more details about the reason why a
1209 function is not inlinable.
1210 We only warn for functions declared `inline' by the user. */
1211 bool do_warning
= (warn_inline
1213 && DECL_DECLARED_INLINE_P (fn
)
1214 && !DECL_IN_SYSTEM_HEADER (fn
));
1217 warning (inline_forbidden_reason
, fn
, fn
);
1222 /* Squirrel away the result so that we don't have to check again. */
1223 DECL_UNINLINABLE (fn
) = !inlinable
;
1228 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
1231 expand_call_inline (tree
*tp
, int *walk_subtrees
, void *data
)
1237 #ifndef INLINER_FOR_JAVA
1241 #else /* INLINER_FOR_JAVA */
1243 #endif /* INLINER_FOR_JAVA */
1249 tree return_slot_addr
;
1252 /* See what we've got. */
1253 id
= (inline_data
*) data
;
1256 /* Recurse, but letting recursive invocations know that we are
1257 inside the body of a TARGET_EXPR. */
1258 if (TREE_CODE (*tp
) == TARGET_EXPR
)
1260 #ifndef INLINER_FOR_JAVA
1261 int i
, len
= first_rtl_op (TARGET_EXPR
);
1263 /* We're walking our own subtrees. */
1266 /* Actually walk over them. This loop is the body of
1267 walk_trees, omitting the case where the TARGET_EXPR
1268 itself is handled. */
1269 for (i
= 0; i
< len
; ++i
)
1272 ++id
->in_target_cleanup_p
;
1273 walk_tree (&TREE_OPERAND (*tp
, i
), expand_call_inline
, data
,
1276 --id
->in_target_cleanup_p
;
1280 #else /* INLINER_FOR_JAVA */
1282 #endif /* INLINER_FOR_JAVA */
1284 else if (TREE_CODE (t
) == EXPR_WITH_FILE_LOCATION
)
1286 /* We're walking the subtree directly. */
1288 /* Update the source position. */
1289 push_srcloc (EXPR_WFL_FILENAME (t
), EXPR_WFL_LINENO (t
));
1290 walk_tree (&EXPR_WFL_NODE (t
), expand_call_inline
, data
,
1292 /* Restore the original source position. */
1299 /* Because types were not copied in copy_body, CALL_EXPRs beneath
1300 them should not be expanded. This can happen if the type is a
1301 dynamic array type, for example. */
1304 /* From here on, we're only interested in CALL_EXPRs. */
1305 if (TREE_CODE (t
) != CALL_EXPR
)
1308 /* First, see if we can figure out what function is being called.
1309 If we cannot, then there is no hope of inlining the function. */
1310 fn
= get_callee_fndecl (t
);
1314 /* Turn forward declarations into real ones. */
1315 fn
= cgraph_node (fn
)->decl
;
1317 /* If fn is a declaration of a function in a nested scope that was
1318 globally declared inline, we don't set its DECL_INITIAL.
1319 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1320 C++ front-end uses it for cdtors to refer to their internal
1321 declarations, that are not real functions. Fortunately those
1322 don't have trees to be saved, so we can tell by checking their
1324 if (! DECL_INITIAL (fn
)
1325 && DECL_ABSTRACT_ORIGIN (fn
)
1326 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn
)))
1327 fn
= DECL_ABSTRACT_ORIGIN (fn
);
1329 /* Don't try to inline functions that are not well-suited to
1331 if (!cgraph_inline_p (id
->current_decl
, fn
, &reason
))
1333 if (warn_inline
&& DECL_DECLARED_INLINE_P (fn
)
1334 && !DECL_IN_SYSTEM_HEADER (fn
)
1337 warning ("%Jinlining failed in call to '%F': %s", fn
, fn
, reason
);
1338 warning ("called from here");
1343 if (! (*lang_hooks
.tree_inlining
.start_inlining
) (fn
))
1346 /* Set the current filename and line number to the function we are
1347 inlining so that when we create new _STMT nodes here they get
1348 line numbers corresponding to the function we are calling. We
1349 wrap the whole inlined body in an EXPR_WITH_FILE_AND_LINE as well
1350 because individual statements don't record the filename. */
1351 push_srcloc (DECL_SOURCE_FILE (fn
), DECL_SOURCE_LINE (fn
));
1353 #ifndef INLINER_FOR_JAVA
1354 /* Build a statement-expression containing code to initialize the
1355 arguments, the actual inline expansion of the body, and a label
1356 for the return statements within the function to jump to. The
1357 type of the statement expression is the return type of the
1359 expr
= build1 (STMT_EXPR
, TREE_TYPE (TREE_TYPE (fn
)), make_node (COMPOUND_STMT
));
1360 /* There is no scope associated with the statement-expression. */
1361 STMT_EXPR_NO_SCOPE (expr
) = 1;
1362 if (lookup_attribute ("warn_unused_result",
1363 TYPE_ATTRIBUTES (TREE_TYPE (fn
))))
1364 STMT_EXPR_WARN_UNUSED_RESULT (expr
) = 1;
1365 stmt
= STMT_EXPR_STMT (expr
);
1366 #else /* INLINER_FOR_JAVA */
1367 /* Build a block containing code to initialize the arguments, the
1368 actual inline expansion of the body, and a label for the return
1369 statements within the function to jump to. The type of the
1370 statement expression is the return type of the function call. */
1372 expr
= build (BLOCK
, TREE_TYPE (TREE_TYPE (fn
)), stmt
);
1373 #endif /* INLINER_FOR_JAVA */
1375 /* Local declarations will be replaced by their equivalents in this
1378 id
->decl_map
= splay_tree_new (splay_tree_compare_pointers
,
1381 /* Initialize the parameters. */
1382 args
= TREE_OPERAND (t
, 1);
1383 return_slot_addr
= NULL_TREE
;
1384 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (t
))
1386 return_slot_addr
= TREE_VALUE (args
);
1387 args
= TREE_CHAIN (args
);
1390 #ifndef INLINER_FOR_JAVA
1391 arg_inits
= initialize_inlined_parameters (id
, args
, fn
);
1392 /* Expand any inlined calls in the initializers. Do this before we
1393 push FN on the stack of functions we are inlining; we want to
1394 inline calls to FN that appear in the initializers for the
1396 expand_calls_inline (&arg_inits
, id
);
1397 /* And add them to the tree. */
1398 COMPOUND_BODY (stmt
) = chainon (COMPOUND_BODY (stmt
), arg_inits
);
1399 #else /* INLINER_FOR_JAVA */
1400 arg_inits
= initialize_inlined_parameters (id
, args
, fn
, expr
);
1403 /* Expand any inlined calls in the initializers. Do this before we
1404 push FN on the stack of functions we are inlining; we want to
1405 inline calls to FN that appear in the initializers for the
1407 expand_calls_inline (&arg_inits
, id
);
1409 /* And add them to the tree. */
1410 BLOCK_EXPR_BODY (expr
) = add_stmt_to_compound (BLOCK_EXPR_BODY (expr
),
1411 TREE_TYPE (arg_inits
),
1414 #endif /* INLINER_FOR_JAVA */
1416 /* Record the function we are about to inline so that we can avoid
1417 recursing into it. */
1418 VARRAY_PUSH_TREE (id
->fns
, fn
);
1420 /* Record the function we are about to inline if optimize_function
1421 has not been called on it yet and we don't have it in the list. */
1422 if (! DECL_INLINED_FNS (fn
))
1426 for (i
= VARRAY_ACTIVE_SIZE (id
->inlined_fns
) - 1; i
>= 0; i
--)
1427 if (VARRAY_TREE (id
->inlined_fns
, i
) == fn
)
1430 VARRAY_PUSH_TREE (id
->inlined_fns
, fn
);
1433 /* Return statements in the function body will be replaced by jumps
1434 to the RET_LABEL. */
1435 id
->ret_label
= build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
1436 DECL_CONTEXT (id
->ret_label
) = VARRAY_TREE (id
->fns
, 0);
1438 if (! DECL_INITIAL (fn
)
1439 || TREE_CODE (DECL_INITIAL (fn
)) != BLOCK
)
1442 #ifndef INLINER_FOR_JAVA
1443 /* Create a block to put the parameters in. We have to do this
1444 after the parameters have been remapped because remapping
1445 parameters is different from remapping ordinary variables. */
1446 scope_stmt
= build_stmt (SCOPE_STMT
, DECL_INITIAL (fn
));
1447 SCOPE_BEGIN_P (scope_stmt
) = 1;
1448 SCOPE_NO_CLEANUPS_P (scope_stmt
) = 1;
1449 remap_block (scope_stmt
, DECL_ARGUMENTS (fn
), id
);
1450 TREE_CHAIN (scope_stmt
) = COMPOUND_BODY (stmt
);
1451 COMPOUND_BODY (stmt
) = scope_stmt
;
1453 /* Tell the debugging backends that this block represents the
1454 outermost scope of the inlined function. */
1455 if (SCOPE_STMT_BLOCK (scope_stmt
))
1456 BLOCK_ABSTRACT_ORIGIN (SCOPE_STMT_BLOCK (scope_stmt
)) = DECL_ORIGIN (fn
);
1458 /* Declare the return variable for the function. */
1459 COMPOUND_BODY (stmt
)
1460 = chainon (COMPOUND_BODY (stmt
),
1461 declare_return_variable (id
, return_slot_addr
, &use_stmt
));
1462 #else /* INLINER_FOR_JAVA */
1464 /* Declare the return variable for the function. */
1465 tree decl
= declare_return_variable (id
, return_slot_addr
, &retvar
);
1468 tree
*next
= &BLOCK_VARS (expr
);
1470 next
= &TREE_CHAIN (*next
);
1474 #endif /* INLINER_FOR_JAVA */
1476 /* After we've initialized the parameters, we insert the body of the
1478 #ifndef INLINER_FOR_JAVA
1479 inlined_body
= &COMPOUND_BODY (stmt
);
1480 while (*inlined_body
)
1481 inlined_body
= &TREE_CHAIN (*inlined_body
);
1482 *inlined_body
= copy_body (id
);
1483 #else /* INLINER_FOR_JAVA */
1486 java_inlining_map_static_initializers (fn
, id
->decl_map
);
1487 new_body
= copy_body (id
);
1488 TREE_TYPE (new_body
) = TREE_TYPE (TREE_TYPE (fn
));
1489 BLOCK_EXPR_BODY (expr
)
1490 = add_stmt_to_compound (BLOCK_EXPR_BODY (expr
),
1491 TREE_TYPE (new_body
), new_body
);
1492 inlined_body
= &BLOCK_EXPR_BODY (expr
);
1494 #endif /* INLINER_FOR_JAVA */
1496 /* After the body of the function comes the RET_LABEL. This must come
1497 before we evaluate the returned value below, because that evaluation
1498 may cause RTL to be generated. */
1499 #ifndef INLINER_FOR_JAVA
1500 COMPOUND_BODY (stmt
)
1501 = chainon (COMPOUND_BODY (stmt
),
1502 build_stmt (LABEL_STMT
, id
->ret_label
));
1503 #else /* INLINER_FOR_JAVA */
1505 tree label
= build1 (LABEL_EXPR
, void_type_node
, id
->ret_label
);
1506 BLOCK_EXPR_BODY (expr
)
1507 = add_stmt_to_compound (BLOCK_EXPR_BODY (expr
), void_type_node
, label
);
1508 TREE_SIDE_EFFECTS (label
) = TREE_SIDE_EFFECTS (t
);
1510 #endif /* INLINER_FOR_JAVA */
1512 /* Finally, mention the returned value so that the value of the
1513 statement-expression is the returned value of the function. */
1514 #ifndef INLINER_FOR_JAVA
1515 COMPOUND_BODY (stmt
) = chainon (COMPOUND_BODY (stmt
), use_stmt
);
1517 /* Close the block for the parameters. */
1518 scope_stmt
= build_stmt (SCOPE_STMT
, DECL_INITIAL (fn
));
1519 SCOPE_NO_CLEANUPS_P (scope_stmt
) = 1;
1520 remap_block (scope_stmt
, NULL_TREE
, id
);
1521 COMPOUND_BODY (stmt
)
1522 = chainon (COMPOUND_BODY (stmt
), scope_stmt
);
1523 #else /* INLINER_FOR_JAVA */
1526 /* Mention the retvar. If the return type of the function was
1527 promoted, convert it back to the expected type. */
1528 if (TREE_TYPE (TREE_TYPE (fn
)) != TREE_TYPE (retvar
))
1529 retvar
= build1 (NOP_EXPR
, TREE_TYPE (TREE_TYPE (fn
)), retvar
);
1530 BLOCK_EXPR_BODY (expr
)
1531 = add_stmt_to_compound (BLOCK_EXPR_BODY (expr
),
1532 TREE_TYPE (retvar
), retvar
);
1535 java_inlining_merge_static_initializers (fn
, id
->decl_map
);
1536 #endif /* INLINER_FOR_JAVA */
1539 splay_tree_delete (id
->decl_map
);
1542 /* The new expression has side-effects if the old one did. */
1543 TREE_SIDE_EFFECTS (expr
) = TREE_SIDE_EFFECTS (t
);
1545 /* Replace the call by the inlined body. Wrap it in an
1546 EXPR_WITH_FILE_LOCATION so that we'll get debugging line notes
1547 pointing to the right place. */
1548 #ifndef INLINER_FOR_JAVA
1549 chain
= TREE_CHAIN (*tp
);
1550 #endif /* INLINER_FOR_JAVA */
1551 *tp
= build_expr_wfl (expr
, DECL_SOURCE_FILE (fn
), DECL_SOURCE_LINE (fn
),
1553 EXPR_WFL_EMIT_LINE_NOTE (*tp
) = 1;
1554 #ifndef INLINER_FOR_JAVA
1555 TREE_CHAIN (*tp
) = chain
;
1556 #endif /* not INLINER_FOR_JAVA */
1559 /* If the value of the new expression is ignored, that's OK. We
1560 don't warn about this for CALL_EXPRs, so we shouldn't warn about
1561 the equivalent inlined version either. */
1562 TREE_USED (*tp
) = 1;
1564 /* Our function now has more statements than it did before. */
1565 DECL_ESTIMATED_INSNS (VARRAY_TREE (id
->fns
, 0)) += DECL_ESTIMATED_INSNS (fn
);
1566 /* For accounting, subtract one for the saved call/ret. */
1567 id
->inlined_insns
+= DECL_ESTIMATED_INSNS (fn
) - 1;
1569 /* Update callgraph if needed. */
1572 cgraph_remove_call (id
->decl
, fn
);
1573 cgraph_create_edges (id
->decl
, *inlined_body
);
1576 /* Recurse into the body of the just inlined function. */
1578 tree old_decl
= id
->current_decl
;
1579 id
->current_decl
= fn
;
1580 expand_calls_inline (inlined_body
, id
);
1581 id
->current_decl
= old_decl
;
1583 VARRAY_POP (id
->fns
);
1585 /* If we've returned to the top level, clear out the record of how
1586 much inlining has been done. */
1587 if (VARRAY_ACTIVE_SIZE (id
->fns
) == id
->first_inlined_fn
)
1588 id
->inlined_insns
= 0;
1590 /* Don't walk into subtrees. We've already handled them above. */
1593 (*lang_hooks
.tree_inlining
.end_inlining
) (fn
);
1595 /* Keep iterating. */
1598 /* Walk over the entire tree *TP, replacing CALL_EXPRs with inline
1599 expansions as appropriate. */
1602 expand_calls_inline (tree
*tp
, inline_data
*id
)
1604 /* Search through *TP, replacing all calls to inline functions by
1605 appropriate equivalents. Use walk_tree in no-duplicates mode
1606 to avoid exponential time complexity. (We can't just use
1607 walk_tree_without_duplicates, because of the special TARGET_EXPR
1608 handling in expand_calls. The hash table is set up in
1609 optimize_function. */
1610 walk_tree (tp
, expand_call_inline
, id
, id
->tree_pruner
);
1613 /* Expand calls to inline functions in the body of FN. */
1616 optimize_inline_calls (tree fn
)
1622 memset (&id
, 0, sizeof (id
));
1625 id
.current_decl
= fn
;
1626 /* Don't allow recursion into FN. */
1627 VARRAY_TREE_INIT (id
.fns
, 32, "fns");
1628 VARRAY_PUSH_TREE (id
.fns
, fn
);
1629 if (!DECL_ESTIMATED_INSNS (fn
))
1630 DECL_ESTIMATED_INSNS (fn
)
1631 = (*lang_hooks
.tree_inlining
.estimate_num_insns
) (fn
);
1632 /* Or any functions that aren't finished yet. */
1633 prev_fn
= NULL_TREE
;
1634 if (current_function_decl
)
1636 VARRAY_PUSH_TREE (id
.fns
, current_function_decl
);
1637 prev_fn
= current_function_decl
;
1640 prev_fn
= ((*lang_hooks
.tree_inlining
.add_pending_fn_decls
)
1641 (&id
.fns
, prev_fn
));
1643 /* Create the list of functions this call will inline. */
1644 VARRAY_TREE_INIT (id
.inlined_fns
, 32, "inlined_fns");
1646 /* Keep track of the low-water mark, i.e., the point where the first
1647 real inlining is represented in ID.FNS. */
1648 id
.first_inlined_fn
= VARRAY_ACTIVE_SIZE (id
.fns
);
1650 /* Replace all calls to inline functions with the bodies of those
1652 id
.tree_pruner
= htab_create (37, htab_hash_pointer
,
1653 htab_eq_pointer
, NULL
);
1654 expand_calls_inline (&DECL_SAVED_TREE (fn
), &id
);
1657 htab_delete (id
.tree_pruner
);
1658 if (DECL_LANG_SPECIFIC (fn
))
1660 tree ifn
= make_tree_vec (VARRAY_ACTIVE_SIZE (id
.inlined_fns
));
1662 if (VARRAY_ACTIVE_SIZE (id
.inlined_fns
))
1663 memcpy (&TREE_VEC_ELT (ifn
, 0), &VARRAY_TREE (id
.inlined_fns
, 0),
1664 VARRAY_ACTIVE_SIZE (id
.inlined_fns
) * sizeof (tree
));
1665 DECL_INLINED_FNS (fn
) = ifn
;
1669 /* FN is a function that has a complete body, and CLONE is a function
1670 whose body is to be set to a copy of FN, mapping argument
1671 declarations according to the ARG_MAP splay_tree. */
1674 clone_body (tree clone
, tree fn
, void *arg_map
)
1678 /* Clone the body, as if we were making an inline call. But, remap
1679 the parameters in the callee to the parameters of caller. If
1680 there's an in-charge parameter, map it to an appropriate
1682 memset (&id
, 0, sizeof (id
));
1683 VARRAY_TREE_INIT (id
.fns
, 2, "fns");
1684 VARRAY_PUSH_TREE (id
.fns
, clone
);
1685 VARRAY_PUSH_TREE (id
.fns
, fn
);
1686 id
.decl_map
= (splay_tree
)arg_map
;
1688 /* Cloning is treated slightly differently from inlining. Set
1689 CLONING_P so that it's clear which operation we're performing. */
1690 id
.cloning_p
= true;
1692 /* Actually copy the body. */
1693 TREE_CHAIN (DECL_SAVED_TREE (clone
)) = copy_body (&id
);
1696 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal.
1697 FUNC is called with the DATA and the address of each sub-tree. If
1698 FUNC returns a non-NULL value, the traversal is aborted, and the
1699 value returned by FUNC is returned. If HTAB is non-NULL it is used
1700 to record the nodes visited, and to avoid visiting a node more than
1704 walk_tree (tree
*tp
, walk_tree_fn func
, void *data
, void *htab_
)
1706 htab_t htab
= (htab_t
) htab_
;
1707 enum tree_code code
;
1711 #define WALK_SUBTREE(NODE) \
1714 result = walk_tree (&(NODE), func, data, htab); \
1720 #define WALK_SUBTREE_TAIL(NODE) \
1724 goto tail_recurse; \
1729 /* Skip empty subtrees. */
1737 /* Don't walk the same tree twice, if the user has requested
1738 that we avoid doing so. */
1739 slot
= htab_find_slot (htab
, *tp
, INSERT
);
1745 /* Call the function. */
1747 result
= (*func
) (tp
, &walk_subtrees
, data
);
1749 /* If we found something, return it. */
1753 code
= TREE_CODE (*tp
);
1755 #ifndef INLINER_FOR_JAVA
1756 /* Even if we didn't, FUNC may have decided that there was nothing
1757 interesting below this point in the tree. */
1760 if (STATEMENT_CODE_P (code
) || code
== TREE_LIST
1761 || (*lang_hooks
.tree_inlining
.tree_chain_matters_p
) (*tp
))
1762 /* But we still need to check our siblings. */
1763 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
1768 /* Handle common cases up front. */
1769 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
1770 #else /* INLINER_FOR_JAVA */
1771 if (code
!= EXIT_BLOCK_EXPR
1772 && code
!= SAVE_EXPR
1773 && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
1774 #endif /* INLINER_FOR_JAVA */
1778 #ifndef INLINER_FOR_JAVA
1779 /* Set lineno here so we get the right instantiation context
1780 if we call instantiate_decl from inlinable_function_p. */
1781 if (STATEMENT_CODE_P (code
) && !STMT_LINENO_FOR_FN_P (*tp
))
1782 input_line
= STMT_LINENO (*tp
);
1783 #endif /* not INLINER_FOR_JAVA */
1785 /* Walk over all the sub-trees of this operand. */
1786 len
= first_rtl_op (code
);
1787 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
1788 But, we only want to walk once. */
1789 if (code
== TARGET_EXPR
1790 && TREE_OPERAND (*tp
, 3) == TREE_OPERAND (*tp
, 1))
1792 /* Go through the subtrees. We need to do this in forward order so
1793 that the scope of a FOR_EXPR is handled properly. */
1794 for (i
= 0; i
< len
; ++i
)
1795 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
1797 #ifndef INLINER_FOR_JAVA
1798 /* For statements, we also walk the chain so that we cover the
1799 entire statement tree. */
1800 if (STATEMENT_CODE_P (code
))
1802 if (code
== DECL_STMT
1803 && DECL_STMT_DECL (*tp
)
1804 && DECL_P (DECL_STMT_DECL (*tp
)))
1806 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
1807 into declarations that are just mentioned, rather than
1808 declared; they don't really belong to this part of the tree.
1809 And, we can see cycles: the initializer for a declaration can
1810 refer to the declaration itself. */
1811 WALK_SUBTREE (DECL_INITIAL (DECL_STMT_DECL (*tp
)));
1812 WALK_SUBTREE (DECL_SIZE (DECL_STMT_DECL (*tp
)));
1813 WALK_SUBTREE (DECL_SIZE_UNIT (DECL_STMT_DECL (*tp
)));
1814 WALK_SUBTREE (TREE_TYPE (*tp
));
1817 /* This can be tail-recursion optimized if we write it this way. */
1818 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
1821 #endif /* not INLINER_FOR_JAVA */
1822 /* We didn't find what we were looking for. */
1825 else if (TREE_CODE_CLASS (code
) == 'd')
1827 WALK_SUBTREE_TAIL (TREE_TYPE (*tp
));
1829 else if (TREE_CODE_CLASS (code
) == 't')
1831 WALK_SUBTREE (TYPE_SIZE (*tp
));
1832 WALK_SUBTREE (TYPE_SIZE_UNIT (*tp
));
1833 /* Also examine various special fields, below. */
1836 result
= (*lang_hooks
.tree_inlining
.walk_subtrees
) (tp
, &walk_subtrees
, func
,
1838 if (result
|| ! walk_subtrees
)
1841 /* Not one of the easy cases. We must explicitly go through the
1846 case IDENTIFIER_NODE
:
1861 case PLACEHOLDER_EXPR
:
1862 /* None of these have subtrees other than those already walked
1867 case REFERENCE_TYPE
:
1868 WALK_SUBTREE_TAIL (TREE_TYPE (*tp
));
1872 WALK_SUBTREE (TREE_VALUE (*tp
));
1873 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
1878 int len
= TREE_VEC_LENGTH (*tp
);
1883 /* Walk all elements but the first. */
1885 WALK_SUBTREE (TREE_VEC_ELT (*tp
, len
));
1887 /* Now walk the first one as a tail call. */
1888 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp
, 0));
1892 WALK_SUBTREE (TREE_REALPART (*tp
));
1893 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp
));
1896 WALK_SUBTREE_TAIL (CONSTRUCTOR_ELTS (*tp
));
1899 WALK_SUBTREE (TYPE_METHOD_BASETYPE (*tp
));
1903 WALK_SUBTREE (TREE_TYPE (*tp
));
1905 tree arg
= TYPE_ARG_TYPES (*tp
);
1907 /* We never want to walk into default arguments. */
1908 for (; arg
; arg
= TREE_CHAIN (arg
))
1909 WALK_SUBTREE (TREE_VALUE (arg
));
1914 WALK_SUBTREE (TREE_TYPE (*tp
));
1915 WALK_SUBTREE_TAIL (TYPE_DOMAIN (*tp
));
1918 WALK_SUBTREE (TYPE_MIN_VALUE (*tp
));
1919 WALK_SUBTREE_TAIL (TYPE_MAX_VALUE (*tp
));
1922 WALK_SUBTREE (TREE_TYPE (*tp
));
1923 WALK_SUBTREE_TAIL (TYPE_OFFSET_BASETYPE (*tp
));
1925 #ifdef INLINER_FOR_JAVA
1926 case EXIT_BLOCK_EXPR
:
1927 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, 1));
1930 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, 0));
1931 #endif /* INLINER_FOR_JAVA */
1937 /* We didn't find what we were looking for. */
1941 #undef WALK_SUBTREE_TAIL
1944 /* Like walk_tree, but does not walk duplicate nodes more than
1948 walk_tree_without_duplicates (tree
*tp
, walk_tree_fn func
, void *data
)
1953 htab
= htab_create (37, htab_hash_pointer
, htab_eq_pointer
, NULL
);
1954 result
= walk_tree (tp
, func
, data
, htab
);
1959 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
1962 copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1964 enum tree_code code
= TREE_CODE (*tp
);
1966 /* We make copies of most nodes. */
1967 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
1968 || TREE_CODE_CLASS (code
) == 'c'
1969 || code
== TREE_LIST
1971 || (*lang_hooks
.tree_inlining
.tree_chain_matters_p
) (*tp
))
1973 /* Because the chain gets clobbered when we make a copy, we save it
1975 tree chain
= TREE_CHAIN (*tp
);
1977 /* Copy the node. */
1978 *tp
= copy_node (*tp
);
1980 /* Now, restore the chain, if appropriate. That will cause
1981 walk_tree to walk into the chain as well. */
1982 if (code
== PARM_DECL
|| code
== TREE_LIST
1983 #ifndef INLINER_FOR_JAVA
1984 || (*lang_hooks
.tree_inlining
.tree_chain_matters_p
) (*tp
)
1985 || STATEMENT_CODE_P (code
))
1986 TREE_CHAIN (*tp
) = chain
;
1988 /* For now, we don't update BLOCKs when we make copies. So, we
1989 have to nullify all scope-statements. */
1990 if (TREE_CODE (*tp
) == SCOPE_STMT
)
1991 SCOPE_STMT_BLOCK (*tp
) = NULL_TREE
;
1992 #else /* INLINER_FOR_JAVA */
1993 || (*lang_hooks
.tree_inlining
.tree_chain_matters_p
) (*tp
))
1994 TREE_CHAIN (*tp
) = chain
;
1995 #endif /* INLINER_FOR_JAVA */
1997 else if (TREE_CODE_CLASS (code
) == 't')
2003 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2004 information indicating to what new SAVE_EXPR this one should be
2005 mapped, use that one. Otherwise, create a new node and enter it in
2006 ST. FN is the function into which the copy will be placed. */
2009 remap_save_expr (tree
*tp
, void *st_
, tree fn
, int *walk_subtrees
)
2011 splay_tree st
= (splay_tree
) st_
;
2014 /* See if we already encountered this SAVE_EXPR. */
2015 n
= splay_tree_lookup (st
, (splay_tree_key
) *tp
);
2017 /* If we didn't already remap this SAVE_EXPR, do so now. */
2020 tree t
= copy_node (*tp
);
2022 /* The SAVE_EXPR is now part of the function into which we
2023 are inlining this body. */
2024 SAVE_EXPR_CONTEXT (t
) = fn
;
2025 /* And we haven't evaluated it yet. */
2026 SAVE_EXPR_RTL (t
) = NULL_RTX
;
2027 /* Remember this SAVE_EXPR. */
2028 n
= splay_tree_insert (st
,
2029 (splay_tree_key
) *tp
,
2030 (splay_tree_value
) t
);
2031 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2032 splay_tree_insert (st
, (splay_tree_key
) t
, (splay_tree_value
) t
);
2035 /* We've already walked into this SAVE_EXPR, so we needn't do it
2039 /* Replace this SAVE_EXPR with the copy. */
2040 *tp
= (tree
) n
->value
;
2043 #ifdef INLINER_FOR_JAVA
2044 /* Add STMT to EXISTING if possible, otherwise create a new
2045 COMPOUND_EXPR and add STMT to it. */
2048 add_stmt_to_compound (tree existing
, tree type
, tree stmt
)
2053 return build (COMPOUND_EXPR
, type
, existing
, stmt
);
2058 #endif /* INLINER_FOR_JAVA */