PR c++/11503
[official-gcc.git] / gcc / tree-inline.c
blob3cfe702048e2ac41cd699445cce44b46d1fe0485
1 /* Control and data flow functions for trees.
2 Copyright 2001, 2002, 2003 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "integrate.h"
36 #include "varray.h"
37 #include "hashtab.h"
38 #include "splay-tree.h"
39 #include "langhooks.h"
40 #include "cgraph.h"
42 /* This should be eventually be generalized to other languages, but
43 this would require a shared function-as-trees infrastructure. */
44 #ifndef INLINER_FOR_JAVA
45 #include "c-common.h"
46 #else /* INLINER_FOR_JAVA */
47 #include "parse.h"
48 #include "java-tree.h"
49 #endif /* INLINER_FOR_JAVA */
51 /* 0 if we should not perform inlining.
52 1 if we should expand functions calls inline at the tree level.
53 2 if we should consider *all* functions to be inline
54 candidates. */
56 int flag_inline_trees = 0;
58 /* To Do:
60 o In order to make inlining-on-trees work, we pessimized
61 function-local static constants. In particular, they are now
62 always output, even when not addressed. Fix this by treating
63 function-local static constants just like global static
64 constants; the back-end already knows not to output them if they
65 are not needed.
67 o Provide heuristics to clamp inlining of recursive template
68 calls? */
70 /* Data required for function inlining. */
72 typedef struct inline_data
74 /* A stack of the functions we are inlining. For example, if we are
75 compiling `f', which calls `g', which calls `h', and we are
76 inlining the body of `h', the stack will contain, `h', followed
77 by `g', followed by `f'. The first few elements of the stack may
78 contain other functions that we know we should not recurse into,
79 even though they are not directly being inlined. */
80 varray_type fns;
81 /* The index of the first element of FNS that really represents an
82 inlined function. */
83 unsigned first_inlined_fn;
84 /* The label to jump to when a return statement is encountered. If
85 this value is NULL, then return statements will simply be
86 remapped as return statements, rather than as jumps. */
87 tree ret_label;
88 /* The map from local declarations in the inlined function to
89 equivalents in the function into which it is being inlined. */
90 splay_tree decl_map;
91 /* Nonzero if we are currently within the cleanup for a
92 TARGET_EXPR. */
93 int in_target_cleanup_p;
94 /* A list of the functions current function has inlined. */
95 varray_type inlined_fns;
96 /* The approximate number of instructions we have inlined in the
97 current call stack. */
98 int inlined_insns;
99 /* We use the same mechanism to build clones that we do to perform
100 inlining. However, there are a few places where we need to
101 distinguish between those two situations. This flag is true if
102 we are cloning, rather than inlining. */
103 bool cloning_p;
104 /* Hash table used to prevent walk_tree from visiting the same node
105 umpteen million times. */
106 htab_t tree_pruner;
107 /* Decl of function we are inlining into. */
108 tree decl;
109 tree current_decl;
110 } inline_data;
112 /* Prototypes. */
114 static tree declare_return_variable (inline_data *, tree, tree *);
115 static tree copy_body_r (tree *, int *, void *);
116 static tree copy_body (inline_data *);
117 static tree expand_call_inline (tree *, int *, void *);
118 static void expand_calls_inline (tree *, inline_data *);
119 static int inlinable_function_p (tree, inline_data *, int);
120 static tree remap_decl (tree, inline_data *);
121 #ifndef INLINER_FOR_JAVA
122 static tree initialize_inlined_parameters (inline_data *, tree, tree);
123 static void remap_block (tree, tree, inline_data *);
124 static void copy_scope_stmt (tree *, int *, inline_data *);
125 #else /* INLINER_FOR_JAVA */
126 static tree initialize_inlined_parameters (inline_data *, tree, tree, tree);
127 static void remap_block (tree *, tree, inline_data *);
128 static tree add_stmt_to_compound (tree, tree, tree);
129 #endif /* INLINER_FOR_JAVA */
130 static tree find_alloca_call_1 (tree *, int *, void *);
131 static tree find_alloca_call (tree);
132 static tree find_builtin_longjmp_call_1 (tree *, int *, void *);
133 static tree find_builtin_longjmp_call (tree);
135 /* Remap DECL during the copying of the BLOCK tree for the function. */
137 static tree
138 remap_decl (tree decl, inline_data *id)
140 splay_tree_node n;
141 tree fn;
143 /* We only remap local variables in the current function. */
144 fn = VARRAY_TOP_TREE (id->fns);
145 if (! (*lang_hooks.tree_inlining.auto_var_in_fn_p) (decl, fn))
146 return NULL_TREE;
148 /* See if we have remapped this declaration. */
149 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
150 /* If we didn't already have an equivalent for this declaration,
151 create one now. */
152 if (!n)
154 tree t;
156 /* Make a copy of the variable or label. */
157 t = copy_decl_for_inlining (decl, fn,
158 VARRAY_TREE (id->fns, 0));
160 /* The decl T could be a dynamic array or other variable size type,
161 in which case some fields need to be remapped because they may
162 contain SAVE_EXPRs. */
163 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE
164 && TYPE_DOMAIN (TREE_TYPE (t)))
166 TREE_TYPE (t) = copy_node (TREE_TYPE (t));
167 TYPE_DOMAIN (TREE_TYPE (t))
168 = copy_node (TYPE_DOMAIN (TREE_TYPE (t)));
169 walk_tree (&TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (t))),
170 copy_body_r, id, NULL);
173 #ifndef INLINER_FOR_JAVA
174 if (! DECL_NAME (t) && TREE_TYPE (t)
175 && (*lang_hooks.tree_inlining.anon_aggr_type_p) (TREE_TYPE (t)))
177 /* For a VAR_DECL of anonymous type, we must also copy the
178 member VAR_DECLS here and rechain the
179 DECL_ANON_UNION_ELEMS. */
180 tree members = NULL;
181 tree src;
183 for (src = DECL_ANON_UNION_ELEMS (t); src;
184 src = TREE_CHAIN (src))
186 tree member = remap_decl (TREE_VALUE (src), id);
188 if (TREE_PURPOSE (src))
189 abort ();
190 members = tree_cons (NULL, member, members);
192 DECL_ANON_UNION_ELEMS (t) = nreverse (members);
194 #endif /* not INLINER_FOR_JAVA */
196 /* Remember it, so that if we encounter this local entity
197 again we can reuse this copy. */
198 n = splay_tree_insert (id->decl_map,
199 (splay_tree_key) decl,
200 (splay_tree_value) t);
203 return (tree) n->value;
206 #ifndef INLINER_FOR_JAVA
207 /* Copy the SCOPE_STMT_BLOCK associated with SCOPE_STMT to contain
208 remapped versions of the variables therein. And hook the new block
209 into the block-tree. If non-NULL, the DECLS are declarations to
210 add to use instead of the BLOCK_VARS in the old block. */
211 #else /* INLINER_FOR_JAVA */
212 /* Copy the BLOCK to contain remapped versions of the variables
213 therein. And hook the new block into the block-tree. */
214 #endif /* INLINER_FOR_JAVA */
216 static void
217 #ifndef INLINER_FOR_JAVA
218 remap_block (tree scope_stmt, tree decls, inline_data *id)
219 #else /* INLINER_FOR_JAVA */
220 remap_block (tree *block, tree decls, inline_data *id)
221 #endif /* INLINER_FOR_JAVA */
223 #ifndef INLINER_FOR_JAVA
224 /* We cannot do this in the cleanup for a TARGET_EXPR since we do
225 not know whether or not expand_expr will actually write out the
226 code we put there. If it does not, then we'll have more BLOCKs
227 than block-notes, and things will go awry. At some point, we
228 should make the back-end handle BLOCK notes in a tidier way,
229 without requiring a strict correspondence to the block-tree; then
230 this check can go. */
231 if (id->in_target_cleanup_p)
233 SCOPE_STMT_BLOCK (scope_stmt) = NULL_TREE;
234 return;
237 /* If this is the beginning of a scope, remap the associated BLOCK. */
238 if (SCOPE_BEGIN_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
240 tree old_block;
241 tree new_block;
242 tree old_var;
243 tree fn;
245 /* Make the new block. */
246 old_block = SCOPE_STMT_BLOCK (scope_stmt);
247 new_block = make_node (BLOCK);
248 TREE_USED (new_block) = TREE_USED (old_block);
249 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
250 SCOPE_STMT_BLOCK (scope_stmt) = new_block;
252 /* Remap its variables. */
253 for (old_var = decls ? decls : BLOCK_VARS (old_block);
254 old_var;
255 old_var = TREE_CHAIN (old_var))
257 tree new_var;
259 /* Remap the variable. */
260 new_var = remap_decl (old_var, id);
261 /* If we didn't remap this variable, so we can't mess with
262 its TREE_CHAIN. If we remapped this variable to
263 something other than a declaration (say, if we mapped it
264 to a constant), then we must similarly omit any mention
265 of it here. */
266 if (!new_var || !DECL_P (new_var))
268 else
270 TREE_CHAIN (new_var) = BLOCK_VARS (new_block);
271 BLOCK_VARS (new_block) = new_var;
274 /* We put the BLOCK_VARS in reverse order; fix that now. */
275 BLOCK_VARS (new_block) = nreverse (BLOCK_VARS (new_block));
276 fn = VARRAY_TREE (id->fns, 0);
277 if (id->cloning_p)
278 /* We're building a clone; DECL_INITIAL is still
279 error_mark_node, and current_binding_level is the parm
280 binding level. */
281 (*lang_hooks.decls.insert_block) (new_block);
282 else
284 /* Attach this new block after the DECL_INITIAL block for the
285 function into which this block is being inlined. In
286 rest_of_compilation we will straighten out the BLOCK tree. */
287 tree *first_block;
288 if (DECL_INITIAL (fn))
289 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
290 else
291 first_block = &DECL_INITIAL (fn);
292 BLOCK_CHAIN (new_block) = *first_block;
293 *first_block = new_block;
295 /* Remember the remapped block. */
296 splay_tree_insert (id->decl_map,
297 (splay_tree_key) old_block,
298 (splay_tree_value) new_block);
300 /* If this is the end of a scope, set the SCOPE_STMT_BLOCK to be the
301 remapped block. */
302 else if (SCOPE_END_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
304 splay_tree_node n;
306 /* Find this block in the table of remapped things. */
307 n = splay_tree_lookup (id->decl_map,
308 (splay_tree_key) SCOPE_STMT_BLOCK (scope_stmt));
309 if (! n)
310 abort ();
311 SCOPE_STMT_BLOCK (scope_stmt) = (tree) n->value;
313 #else /* INLINER_FOR_JAVA */
314 tree old_block;
315 tree new_block;
316 tree old_var;
317 tree fn;
319 /* Make the new block. */
320 old_block = *block;
321 new_block = make_node (BLOCK);
322 TREE_USED (new_block) = TREE_USED (old_block);
323 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
324 BLOCK_SUBBLOCKS (new_block) = BLOCK_SUBBLOCKS (old_block);
325 TREE_SIDE_EFFECTS (new_block) = TREE_SIDE_EFFECTS (old_block);
326 TREE_TYPE (new_block) = TREE_TYPE (old_block);
327 *block = new_block;
329 /* Remap its variables. */
330 for (old_var = decls ? decls : BLOCK_VARS (old_block);
331 old_var;
332 old_var = TREE_CHAIN (old_var))
334 tree new_var;
336 /* All local class initialization flags go in the outermost
337 scope. */
338 if (LOCAL_CLASS_INITIALIZATION_FLAG_P (old_var))
340 /* We may already have one. */
341 if (! splay_tree_lookup (id->decl_map, (splay_tree_key) old_var))
343 tree outermost_block;
344 new_var = remap_decl (old_var, id);
345 DECL_ABSTRACT_ORIGIN (new_var) = NULL;
346 outermost_block = DECL_SAVED_TREE (current_function_decl);
347 TREE_CHAIN (new_var) = BLOCK_VARS (outermost_block);
348 BLOCK_VARS (outermost_block) = new_var;
350 continue;
353 /* Remap the variable. */
354 new_var = remap_decl (old_var, id);
355 /* If we didn't remap this variable, so we can't mess with
356 its TREE_CHAIN. If we remapped this variable to
357 something other than a declaration (say, if we mapped it
358 to a constant), then we must similarly omit any mention
359 of it here. */
360 if (!new_var || !DECL_P (new_var))
362 else
364 TREE_CHAIN (new_var) = BLOCK_VARS (new_block);
365 BLOCK_VARS (new_block) = new_var;
368 /* We put the BLOCK_VARS in reverse order; fix that now. */
369 BLOCK_VARS (new_block) = nreverse (BLOCK_VARS (new_block));
370 fn = VARRAY_TREE (id->fns, 0);
371 /* Remember the remapped block. */
372 splay_tree_insert (id->decl_map,
373 (splay_tree_key) old_block,
374 (splay_tree_value) new_block);
375 #endif /* INLINER_FOR_JAVA */
378 #ifndef INLINER_FOR_JAVA
379 /* Copy the SCOPE_STMT pointed to by TP. */
381 static void
382 copy_scope_stmt (tree *tp, int *walk_subtrees, inline_data *id)
384 tree block;
386 /* Remember whether or not this statement was nullified. When
387 making a copy, copy_tree_r always sets SCOPE_NULLIFIED_P (and
388 doesn't copy the SCOPE_STMT_BLOCK) to free callers from having to
389 deal with copying BLOCKs if they do not wish to do so. */
390 block = SCOPE_STMT_BLOCK (*tp);
391 /* Copy (and replace) the statement. */
392 copy_tree_r (tp, walk_subtrees, NULL);
393 /* Restore the SCOPE_STMT_BLOCK. */
394 SCOPE_STMT_BLOCK (*tp) = block;
396 /* Remap the associated block. */
397 remap_block (*tp, NULL_TREE, id);
399 #endif /* not INLINER_FOR_JAVA */
401 /* Called from copy_body via walk_tree. DATA is really an
402 `inline_data *'. */
403 static tree
404 copy_body_r (tree *tp, int *walk_subtrees, void *data)
406 inline_data* id;
407 tree fn;
409 /* Set up. */
410 id = (inline_data *) data;
411 fn = VARRAY_TOP_TREE (id->fns);
413 #if 0
414 /* All automatic variables should have a DECL_CONTEXT indicating
415 what function they come from. */
416 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
417 && DECL_NAMESPACE_SCOPE_P (*tp))
418 if (! DECL_EXTERNAL (*tp) && ! TREE_STATIC (*tp))
419 abort ();
420 #endif
422 #ifdef INLINER_FOR_JAVA
423 if (TREE_CODE (*tp) == BLOCK)
424 remap_block (tp, NULL_TREE, id);
425 #endif
427 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
428 GOTO_STMT with the RET_LABEL as its target. */
429 #ifndef INLINER_FOR_JAVA
430 if (TREE_CODE (*tp) == RETURN_STMT && id->ret_label)
431 #else /* INLINER_FOR_JAVA */
432 if (TREE_CODE (*tp) == RETURN_EXPR && id->ret_label)
433 #endif /* INLINER_FOR_JAVA */
435 tree return_stmt = *tp;
436 tree goto_stmt;
438 /* Build the GOTO_STMT. */
439 #ifndef INLINER_FOR_JAVA
440 goto_stmt = build_stmt (GOTO_STMT, id->ret_label);
441 TREE_CHAIN (goto_stmt) = TREE_CHAIN (return_stmt);
442 GOTO_FAKE_P (goto_stmt) = 1;
443 #else /* INLINER_FOR_JAVA */
444 tree assignment = TREE_OPERAND (return_stmt, 0);
445 goto_stmt = build1 (GOTO_EXPR, void_type_node, id->ret_label);
446 TREE_SIDE_EFFECTS (goto_stmt) = 1;
447 #endif /* INLINER_FOR_JAVA */
449 /* If we're returning something, just turn that into an
450 assignment into the equivalent of the original
451 RESULT_DECL. */
452 #ifndef INLINER_FOR_JAVA
453 if (RETURN_STMT_EXPR (return_stmt))
455 *tp = build_stmt (EXPR_STMT,
456 RETURN_STMT_EXPR (return_stmt));
457 STMT_IS_FULL_EXPR_P (*tp) = 1;
458 /* And then jump to the end of the function. */
459 TREE_CHAIN (*tp) = goto_stmt;
461 #else /* INLINER_FOR_JAVA */
462 if (assignment)
464 copy_body_r (&assignment, walk_subtrees, data);
465 *tp = build (COMPOUND_EXPR, void_type_node, assignment, goto_stmt);
466 TREE_SIDE_EFFECTS (*tp) = 1;
468 #endif /* INLINER_FOR_JAVA */
469 /* If we're not returning anything just do the jump. */
470 else
471 *tp = goto_stmt;
473 /* Local variables and labels need to be replaced by equivalent
474 variables. We don't want to copy static variables; there's only
475 one of those, no matter how many times we inline the containing
476 function. */
477 else if ((*lang_hooks.tree_inlining.auto_var_in_fn_p) (*tp, fn))
479 tree new_decl;
481 /* Remap the declaration. */
482 new_decl = remap_decl (*tp, id);
483 if (! new_decl)
484 abort ();
485 /* Replace this variable with the copy. */
486 STRIP_TYPE_NOPS (new_decl);
487 *tp = new_decl;
489 #if 0
490 else if (nonstatic_local_decl_p (*tp)
491 && DECL_CONTEXT (*tp) != VARRAY_TREE (id->fns, 0))
492 abort ();
493 #endif
494 else if (TREE_CODE (*tp) == SAVE_EXPR)
495 remap_save_expr (tp, id->decl_map, VARRAY_TREE (id->fns, 0),
496 walk_subtrees);
497 else if (TREE_CODE (*tp) == UNSAVE_EXPR)
498 /* UNSAVE_EXPRs should not be generated until expansion time. */
499 abort ();
500 #ifndef INLINER_FOR_JAVA
501 /* For a SCOPE_STMT, we must copy the associated block so that we
502 can write out debugging information for the inlined variables. */
503 else if (TREE_CODE (*tp) == SCOPE_STMT && !id->in_target_cleanup_p)
504 copy_scope_stmt (tp, walk_subtrees, id);
505 #else /* INLINER_FOR_JAVA */
506 else if (TREE_CODE (*tp) == LABELED_BLOCK_EXPR)
508 /* We need a new copy of this labeled block; the EXIT_BLOCK_EXPR
509 will refer to it, so save a copy ready for remapping. We
510 save it in the decl_map, although it isn't a decl. */
511 tree new_block = copy_node (*tp);
512 splay_tree_insert (id->decl_map,
513 (splay_tree_key) *tp,
514 (splay_tree_value) new_block);
515 *tp = new_block;
517 else if (TREE_CODE (*tp) == EXIT_BLOCK_EXPR)
519 splay_tree_node n
520 = splay_tree_lookup (id->decl_map,
521 (splay_tree_key) TREE_OPERAND (*tp, 0));
522 /* We _must_ have seen the enclosing LABELED_BLOCK_EXPR. */
523 if (! n)
524 abort ();
525 *tp = copy_node (*tp);
526 TREE_OPERAND (*tp, 0) = (tree) n->value;
528 #endif /* INLINER_FOR_JAVA */
529 /* Otherwise, just copy the node. Note that copy_tree_r already
530 knows not to copy VAR_DECLs, etc., so this is safe. */
531 else
533 if (TREE_CODE (*tp) == MODIFY_EXPR
534 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
535 && ((*lang_hooks.tree_inlining.auto_var_in_fn_p)
536 (TREE_OPERAND (*tp, 0), fn)))
538 /* Some assignments VAR = VAR; don't generate any rtl code
539 and thus don't count as variable modification. Avoid
540 keeping bogosities like 0 = 0. */
541 tree decl = TREE_OPERAND (*tp, 0), value;
542 splay_tree_node n;
544 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
545 if (n)
547 value = (tree) n->value;
548 STRIP_TYPE_NOPS (value);
549 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
551 *tp = value;
552 return copy_body_r (tp, walk_subtrees, data);
556 else if (TREE_CODE (*tp) == ADDR_EXPR
557 && ((*lang_hooks.tree_inlining.auto_var_in_fn_p)
558 (TREE_OPERAND (*tp, 0), fn)))
560 /* Get rid of &* from inline substitutions. It can occur when
561 someone takes the address of a parm or return slot passed by
562 invisible reference. */
563 tree decl = TREE_OPERAND (*tp, 0), value;
564 splay_tree_node n;
566 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
567 if (n)
569 value = (tree) n->value;
570 if (TREE_CODE (value) == INDIRECT_REF)
572 *tp = convert (TREE_TYPE (*tp), TREE_OPERAND (value, 0));
573 return copy_body_r (tp, walk_subtrees, data);
578 copy_tree_r (tp, walk_subtrees, NULL);
580 /* The copied TARGET_EXPR has never been expanded, even if the
581 original node was expanded already. */
582 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
584 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
585 TREE_OPERAND (*tp, 3) = NULL_TREE;
589 /* Keep iterating. */
590 return NULL_TREE;
593 /* Make a copy of the body of FN so that it can be inserted inline in
594 another function. */
596 static tree
597 copy_body (inline_data *id)
599 tree body;
601 body = DECL_SAVED_TREE (VARRAY_TOP_TREE (id->fns));
602 walk_tree (&body, copy_body_r, id, NULL);
604 return body;
607 /* Generate code to initialize the parameters of the function at the
608 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
610 static tree
611 #ifndef INLINER_FOR_JAVA
612 initialize_inlined_parameters (inline_data *id, tree args, tree fn)
613 #else /* INLINER_FOR_JAVA */
614 initialize_inlined_parameters (inline_data *id, tree args, tree fn, tree block)
615 #endif /* INLINER_FOR_JAVA */
617 tree init_stmts;
618 tree parms;
619 tree a;
620 tree p;
621 #ifdef INLINER_FOR_JAVA
622 tree vars = NULL_TREE;
623 #endif /* INLINER_FOR_JAVA */
625 /* Figure out what the parameters are. */
626 parms = DECL_ARGUMENTS (fn);
628 /* Start with no initializations whatsoever. */
629 init_stmts = NULL_TREE;
631 /* Loop through the parameter declarations, replacing each with an
632 equivalent VAR_DECL, appropriately initialized. */
633 for (p = parms, a = args; p;
634 a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
636 #ifndef INLINER_FOR_JAVA
637 tree init_stmt;
638 tree cleanup;
639 #endif /* not INLINER_FOR_JAVA */
640 tree var;
641 tree value;
642 tree var_sub;
644 /* Find the initializer. */
645 value = (*lang_hooks.tree_inlining.convert_parm_for_inlining)
646 (p, a ? TREE_VALUE (a) : NULL_TREE, fn);
648 /* If the parameter is never assigned to, we may not need to
649 create a new variable here at all. Instead, we may be able
650 to just use the argument value. */
651 if (TREE_READONLY (p)
652 && !TREE_ADDRESSABLE (p)
653 && value && !TREE_SIDE_EFFECTS (value))
655 /* Simplify the value, if possible. */
656 value = fold (DECL_P (value) ? decl_constant_value (value) : value);
658 /* We can't risk substituting complex expressions. They
659 might contain variables that will be assigned to later.
660 Theoretically, we could check the expression to see if
661 all of the variables that determine its value are
662 read-only, but we don't bother. */
663 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
665 /* If this is a declaration, wrap it a NOP_EXPR so that
666 we don't try to put the VALUE on the list of
667 BLOCK_VARS. */
668 if (DECL_P (value))
669 value = build1 (NOP_EXPR, TREE_TYPE (value), value);
671 /* If this is a constant, make sure it has the right type. */
672 else if (TREE_TYPE (value) != TREE_TYPE (p))
673 value = fold (build1 (NOP_EXPR, TREE_TYPE (p), value));
675 splay_tree_insert (id->decl_map,
676 (splay_tree_key) p,
677 (splay_tree_value) value);
678 continue;
682 /* Make an equivalent VAR_DECL. */
683 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
685 /* See if the frontend wants to pass this by invisible reference. If
686 so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
687 replace uses of the PARM_DECL with dereferences. */
688 if (TREE_TYPE (var) != TREE_TYPE (p)
689 && POINTER_TYPE_P (TREE_TYPE (var))
690 && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
691 var_sub = build1 (INDIRECT_REF, TREE_TYPE (p), var);
692 else
693 var_sub = var;
695 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
696 that way, when the PARM_DECL is encountered, it will be
697 automatically replaced by the VAR_DECL. */
698 splay_tree_insert (id->decl_map,
699 (splay_tree_key) p,
700 (splay_tree_value) var_sub);
702 /* Declare this new variable. */
703 #ifndef INLINER_FOR_JAVA
704 init_stmt = build_stmt (DECL_STMT, var);
705 TREE_CHAIN (init_stmt) = init_stmts;
706 init_stmts = init_stmt;
707 #else /* INLINER_FOR_JAVA */
708 TREE_CHAIN (var) = vars;
709 vars = var;
710 #endif /* INLINER_FOR_JAVA */
712 /* Initialize this VAR_DECL from the equivalent argument. If
713 the argument is an object, created via a constructor or copy,
714 this will not result in an extra copy: the TARGET_EXPR
715 representing the argument will be bound to VAR, and the
716 object will be constructed in VAR. */
717 if (! TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
718 #ifndef INLINER_FOR_JAVA
719 DECL_INITIAL (var) = value;
720 else
722 /* Even if P was TREE_READONLY, the new VAR should not be.
723 In the original code, we would have constructed a
724 temporary, and then the function body would have never
725 changed the value of P. However, now, we will be
726 constructing VAR directly. The constructor body may
727 change its value multiple times as it is being
728 constructed. Therefore, it must not be TREE_READONLY;
729 the back-end assumes that TREE_READONLY variable is
730 assigned to only once. */
731 TREE_READONLY (var) = 0;
733 /* Build a run-time initialization. */
734 init_stmt = build_stmt (EXPR_STMT,
735 build (INIT_EXPR, TREE_TYPE (p),
736 var, value));
737 /* Add this initialization to the list. Note that we want the
738 declaration *after* the initialization because we are going
739 to reverse all the initialization statements below. */
740 TREE_CHAIN (init_stmt) = init_stmts;
741 init_stmts = init_stmt;
744 /* See if we need to clean up the declaration. */
745 cleanup = (*lang_hooks.maybe_build_cleanup) (var);
746 if (cleanup)
748 tree cleanup_stmt;
749 /* Build the cleanup statement. */
750 cleanup_stmt = build_stmt (CLEANUP_STMT, var, cleanup);
751 /* Add it to the *front* of the list; the list will be
752 reversed below. */
753 TREE_CHAIN (cleanup_stmt) = init_stmts;
754 init_stmts = cleanup_stmt;
756 #else /* INLINER_FOR_JAVA */
758 tree assignment = build (MODIFY_EXPR, TREE_TYPE (p), var, value);
759 init_stmts = add_stmt_to_compound (init_stmts, TREE_TYPE (p),
760 assignment);
762 else
764 /* Java objects don't ever need constructing when being
765 passed as arguments because only call by reference is
766 supported. */
767 abort ();
769 #endif /* INLINER_FOR_JAVA */
772 #ifndef INLINER_FOR_JAVA
773 /* Evaluate trailing arguments. */
774 for (; a; a = TREE_CHAIN (a))
776 tree init_stmt;
777 tree value = TREE_VALUE (a);
779 if (! value || ! TREE_SIDE_EFFECTS (value))
780 continue;
782 init_stmt = build_stmt (EXPR_STMT, value);
783 TREE_CHAIN (init_stmt) = init_stmts;
784 init_stmts = init_stmt;
787 /* The initialization statements have been built up in reverse
788 order. Straighten them out now. */
789 return nreverse (init_stmts);
790 #else /* INLINER_FOR_JAVA */
791 BLOCK_VARS (block) = nreverse (vars);
792 return init_stmts;
793 #endif /* INLINER_FOR_JAVA */
796 /* Declare a return variable to replace the RESULT_DECL for the
797 function we are calling. An appropriate DECL_STMT is returned.
798 The USE_STMT is filled in to contain a use of the declaration to
799 indicate the return value of the function. */
801 #ifndef INLINER_FOR_JAVA
802 static tree
803 declare_return_variable (struct inline_data *id, tree return_slot_addr,
804 tree *use_stmt)
805 #else /* INLINER_FOR_JAVA */
806 static tree
807 declare_return_variable (struct inline_data *id, tree return_slot_addr,
808 tree *var)
809 #endif /* INLINER_FOR_JAVA */
811 tree fn = VARRAY_TOP_TREE (id->fns);
812 tree result = DECL_RESULT (fn);
813 #ifndef INLINER_FOR_JAVA
814 tree var;
815 #endif /* not INLINER_FOR_JAVA */
816 int need_return_decl = 1;
818 /* We don't need to do anything for functions that don't return
819 anything. */
820 if (!result || VOID_TYPE_P (TREE_TYPE (result)))
822 #ifndef INLINER_FOR_JAVA
823 *use_stmt = NULL_TREE;
824 #else /* INLINER_FOR_JAVA */
825 *var = NULL_TREE;
826 #endif /* INLINER_FOR_JAVA */
827 return NULL_TREE;
830 #ifndef INLINER_FOR_JAVA
831 var = ((*lang_hooks.tree_inlining.copy_res_decl_for_inlining)
832 (result, fn, VARRAY_TREE (id->fns, 0), id->decl_map,
833 &need_return_decl, return_slot_addr));
835 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
836 way, when the RESULT_DECL is encountered, it will be
837 automatically replaced by the VAR_DECL. */
838 splay_tree_insert (id->decl_map,
839 (splay_tree_key) result,
840 (splay_tree_value) var);
842 /* Build the USE_STMT. If the return type of the function was
843 promoted, convert it back to the expected type. */
844 if (TREE_TYPE (var) == TREE_TYPE (TREE_TYPE (fn)))
845 *use_stmt = build_stmt (EXPR_STMT, var);
846 else
847 *use_stmt = build_stmt (EXPR_STMT,
848 build1 (NOP_EXPR, TREE_TYPE (TREE_TYPE (fn)),
849 var));
850 TREE_ADDRESSABLE (*use_stmt) = 1;
852 /* Build the declaration statement if FN does not return an
853 aggregate. */
854 if (need_return_decl)
855 return build_stmt (DECL_STMT, var);
856 #else /* INLINER_FOR_JAVA */
857 *var = ((*lang_hooks.tree_inlining.copy_res_decl_for_inlining)
858 (result, fn, VARRAY_TREE (id->fns, 0), id->decl_map,
859 &need_return_decl, return_slot_addr));
861 splay_tree_insert (id->decl_map,
862 (splay_tree_key) result,
863 (splay_tree_value) *var);
864 DECL_IGNORED_P (*var) = 1;
865 if (need_return_decl)
866 return *var;
867 #endif /* INLINER_FOR_JAVA */
868 /* If FN does return an aggregate, there's no need to declare the
869 return variable; we're using a variable in our caller's frame. */
870 else
871 return NULL_TREE;
874 /* Returns nonzero if a function can be inlined as a tree. */
877 tree_inlinable_function_p (tree fn, int nolimit)
879 return inlinable_function_p (fn, NULL, nolimit);
882 /* If *TP is possibly call to alloca, return nonzero. */
883 static tree
884 find_alloca_call_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
885 void *data ATTRIBUTE_UNUSED)
887 if (alloca_call_p (*tp))
888 return *tp;
889 return NULL;
892 /* Return subexpression representing possible alloca call, if any. */
893 static tree
894 find_alloca_call (tree exp)
896 location_t saved_loc = input_location;
897 tree ret = walk_tree_without_duplicates
898 (&exp, find_alloca_call_1, NULL);
899 input_location = saved_loc;
900 return ret;
903 static tree
904 find_builtin_longjmp_call_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
905 void *data ATTRIBUTE_UNUSED)
907 tree exp = *tp, decl;
909 if (TREE_CODE (exp) == CALL_EXPR
910 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
911 && (decl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
912 TREE_CODE (decl) == FUNCTION_DECL)
913 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
914 && DECL_FUNCTION_CODE (decl) == BUILT_IN_LONGJMP)
915 return decl;
917 return NULL;
920 static tree
921 find_builtin_longjmp_call (tree exp)
923 location_t saved_loc = input_location;
924 tree ret = walk_tree_without_duplicates
925 (&exp, find_builtin_longjmp_call_1, NULL);
926 input_location = saved_loc;
927 return ret;
930 /* Returns nonzero if FN is a function that can be inlined into the
931 inlining context ID_. If ID_ is NULL, check whether the function
932 can be inlined at all. */
934 static int
935 inlinable_function_p (tree fn, inline_data *id, int nolimit)
937 int inlinable;
938 int currfn_insns = 0;
939 int max_inline_insns_single = MAX_INLINE_INSNS_SINGLE;
941 /* If we've already decided this function shouldn't be inlined,
942 there's no need to check again. */
943 if (DECL_UNINLINABLE (fn))
944 return 0;
946 /* See if there is any language-specific reason it cannot be
947 inlined. (It is important that this hook be called early because
948 in C++ it may result in template instantiation.) */
949 inlinable = !(*lang_hooks.tree_inlining.cannot_inline_tree_fn) (&fn);
951 /* We may be here either because fn is declared inline or because
952 we use -finline-functions. For the second case, we are more
953 restrictive. */
954 if (DID_INLINE_FUNC (fn))
955 max_inline_insns_single = MAX_INLINE_INSNS_AUTO;
957 /* The number of instructions (estimated) of current function. */
958 if (!nolimit && !DECL_ESTIMATED_INSNS (fn))
959 DECL_ESTIMATED_INSNS (fn)
960 = (*lang_hooks.tree_inlining.estimate_num_insns) (fn);
961 currfn_insns = DECL_ESTIMATED_INSNS (fn);
963 /* If we're not inlining things, then nothing is inlinable. */
964 if (! flag_inline_trees)
965 inlinable = 0;
966 /* If we're not inlining all functions and the function was not
967 declared `inline', we don't inline it. Don't think of
968 disregarding DECL_INLINE when flag_inline_trees == 2; it's the
969 front-end that must set DECL_INLINE in this case, because
970 dwarf2out loses if a function is inlined that doesn't have
971 DECL_INLINE set. */
972 else if (! DECL_INLINE (fn) && !nolimit)
973 inlinable = 0;
974 #ifdef INLINER_FOR_JAVA
975 /* Synchronized methods can't be inlined. This is a bug. */
976 else if (METHOD_SYNCHRONIZED (fn))
977 inlinable = 0;
978 #endif /* INLINER_FOR_JAVA */
979 /* We can't inline functions that are too big. Only allow a single
980 function to be of MAX_INLINE_INSNS_SINGLE size. Make special
981 allowance for extern inline functions, though. */
982 else if (!nolimit
983 && ! (*lang_hooks.tree_inlining.disregard_inline_limits) (fn)
984 && currfn_insns > max_inline_insns_single)
985 inlinable = 0;
986 /* We can't inline functions that call __builtin_longjmp at all.
987 The non-local goto machinery really requires the destination
988 be in a different function. If we allow the function calling
989 __builtin_longjmp to be inlined into the function calling
990 __builtin_setjmp, Things will Go Awry. */
991 /* ??? Need front end help to identify "regular" non-local goto. */
992 else if (find_builtin_longjmp_call (DECL_SAVED_TREE (fn)))
993 inlinable = 0;
994 /* Refuse to inline alloca call unless user explicitly forced so as this may
995 change program's memory overhead drastically when the function using alloca
996 is called in loop. In GCC present in SPEC2000 inlining into schedule_block
997 cause it to require 2GB of ram instead of 256MB. */
998 else if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)) == NULL
999 && find_alloca_call (DECL_SAVED_TREE (fn)))
1000 inlinable = 0;
1002 /* Squirrel away the result so that we don't have to check again. */
1003 DECL_UNINLINABLE (fn) = ! inlinable;
1005 /* In case we don't disregard the inlining limits and we basically
1006 can inline this function, investigate further. */
1007 if (! (*lang_hooks.tree_inlining.disregard_inline_limits) (fn)
1008 && inlinable && !nolimit)
1010 int sum_insns = (id ? id->inlined_insns : 0) + currfn_insns;
1011 /* In the extreme case that we have exceeded the recursive inlining
1012 limit by a huge factor (128), we just say no. Should not happen
1013 in real life. */
1014 if (sum_insns > MAX_INLINE_INSNS * 128)
1015 inlinable = 0;
1016 /* If we did not hit the extreme limit, we use a linear function
1017 with slope -1/MAX_INLINE_SLOPE to exceedingly decrease the
1018 allowable size. We always allow a size of MIN_INLINE_INSNS
1019 though. */
1020 else if ((sum_insns > MAX_INLINE_INSNS)
1021 && (currfn_insns > MIN_INLINE_INSNS))
1023 int max_curr = MAX_INLINE_INSNS_SINGLE
1024 - (sum_insns - MAX_INLINE_INSNS) / MAX_INLINE_SLOPE;
1025 if (currfn_insns > max_curr)
1026 inlinable = 0;
1030 /* If we don't have the function body available, we can't inline
1031 it. */
1032 if (! DECL_SAVED_TREE (fn))
1033 inlinable = 0;
1035 /* Check again, language hooks may have modified it. */
1036 if (! inlinable || DECL_UNINLINABLE (fn))
1037 return 0;
1039 /* Don't do recursive inlining, either. We don't record this in
1040 DECL_UNINLINABLE; we may be able to inline this function later. */
1041 if (id)
1043 size_t i;
1045 for (i = 0; i < VARRAY_ACTIVE_SIZE (id->fns); ++i)
1046 if (VARRAY_TREE (id->fns, i) == fn)
1047 return 0;
1049 if (DECL_INLINED_FNS (fn))
1051 int j;
1052 tree inlined_fns = DECL_INLINED_FNS (fn);
1054 for (j = 0; j < TREE_VEC_LENGTH (inlined_fns); ++j)
1055 if (TREE_VEC_ELT (inlined_fns, j) == VARRAY_TREE (id->fns, 0))
1056 return 0;
1060 /* Return the result. */
1061 return inlinable;
1064 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
1066 static tree
1067 expand_call_inline (tree *tp, int *walk_subtrees, void *data)
1069 inline_data *id;
1070 tree t;
1071 tree expr;
1072 tree stmt;
1073 #ifndef INLINER_FOR_JAVA
1074 tree chain;
1075 tree scope_stmt;
1076 tree use_stmt;
1077 #else /* INLINER_FOR_JAVA */
1078 tree retvar;
1079 #endif /* INLINER_FOR_JAVA */
1080 tree fn;
1081 tree arg_inits;
1082 tree *inlined_body;
1083 splay_tree st;
1084 tree args;
1085 tree return_slot_addr;
1087 /* See what we've got. */
1088 id = (inline_data *) data;
1089 t = *tp;
1091 /* Recurse, but letting recursive invocations know that we are
1092 inside the body of a TARGET_EXPR. */
1093 if (TREE_CODE (*tp) == TARGET_EXPR)
1095 #ifndef INLINER_FOR_JAVA
1096 int i, len = first_rtl_op (TARGET_EXPR);
1098 /* We're walking our own subtrees. */
1099 *walk_subtrees = 0;
1101 /* Actually walk over them. This loop is the body of
1102 walk_trees, omitting the case where the TARGET_EXPR
1103 itself is handled. */
1104 for (i = 0; i < len; ++i)
1106 if (i == 2)
1107 ++id->in_target_cleanup_p;
1108 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data,
1109 id->tree_pruner);
1110 if (i == 2)
1111 --id->in_target_cleanup_p;
1114 return NULL_TREE;
1115 #else /* INLINER_FOR_JAVA */
1116 abort ();
1117 #endif /* INLINER_FOR_JAVA */
1119 else if (TREE_CODE (t) == EXPR_WITH_FILE_LOCATION)
1121 /* We're walking the subtree directly. */
1122 *walk_subtrees = 0;
1123 /* Update the source position. */
1124 push_srcloc (EXPR_WFL_FILENAME (t), EXPR_WFL_LINENO (t));
1125 walk_tree (&EXPR_WFL_NODE (t), expand_call_inline, data,
1126 id->tree_pruner);
1127 /* Restore the original source position. */
1128 pop_srcloc ();
1130 return NULL_TREE;
1133 if (TYPE_P (t))
1134 /* Because types were not copied in copy_body, CALL_EXPRs beneath
1135 them should not be expanded. This can happen if the type is a
1136 dynamic array type, for example. */
1137 *walk_subtrees = 0;
1139 /* From here on, we're only interested in CALL_EXPRs. */
1140 if (TREE_CODE (t) != CALL_EXPR)
1141 return NULL_TREE;
1143 /* First, see if we can figure out what function is being called.
1144 If we cannot, then there is no hope of inlining the function. */
1145 fn = get_callee_fndecl (t);
1146 if (!fn)
1147 return NULL_TREE;
1149 /* Turn forward declarations into real ones. */
1150 if (flag_unit_at_a_time)
1151 fn = cgraph_node (fn)->decl;
1153 /* If fn is a declaration of a function in a nested scope that was
1154 globally declared inline, we don't set its DECL_INITIAL.
1155 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1156 C++ front-end uses it for cdtors to refer to their internal
1157 declarations, that are not real functions. Fortunately those
1158 don't have trees to be saved, so we can tell by checking their
1159 DECL_SAVED_TREE. */
1160 if (! DECL_INITIAL (fn)
1161 && DECL_ABSTRACT_ORIGIN (fn)
1162 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
1163 fn = DECL_ABSTRACT_ORIGIN (fn);
1165 /* Don't try to inline functions that are not well-suited to
1166 inlining. */
1167 if (!DECL_SAVED_TREE (fn)
1168 || (flag_unit_at_a_time && !cgraph_inline_p (id->current_decl, fn))
1169 || (!flag_unit_at_a_time && !inlinable_function_p (fn, id, 0)))
1171 if (warn_inline && DECL_INLINE (fn) && !DID_INLINE_FUNC (fn)
1172 && !DECL_IN_SYSTEM_HEADER (fn))
1174 warning_with_decl (fn, "inlining failed in call to `%s'");
1175 warning ("called from here");
1177 return NULL_TREE;
1180 if (! (*lang_hooks.tree_inlining.start_inlining) (fn))
1181 return NULL_TREE;
1183 /* Set the current filename and line number to the function we are
1184 inlining so that when we create new _STMT nodes here they get
1185 line numbers corresponding to the function we are calling. We
1186 wrap the whole inlined body in an EXPR_WITH_FILE_AND_LINE as well
1187 because individual statements don't record the filename. */
1188 push_srcloc (DECL_SOURCE_FILE (fn), DECL_SOURCE_LINE (fn));
1190 #ifndef INLINER_FOR_JAVA
1191 /* Build a statement-expression containing code to initialize the
1192 arguments, the actual inline expansion of the body, and a label
1193 for the return statements within the function to jump to. The
1194 type of the statement expression is the return type of the
1195 function call. */
1196 expr = build1 (STMT_EXPR, TREE_TYPE (TREE_TYPE (fn)), make_node (COMPOUND_STMT));
1197 /* There is no scope associated with the statement-expression. */
1198 STMT_EXPR_NO_SCOPE (expr) = 1;
1199 stmt = STMT_EXPR_STMT (expr);
1200 #else /* INLINER_FOR_JAVA */
1201 /* Build a block containing code to initialize the arguments, the
1202 actual inline expansion of the body, and a label for the return
1203 statements within the function to jump to. The type of the
1204 statement expression is the return type of the function call. */
1205 stmt = NULL;
1206 expr = build (BLOCK, TREE_TYPE (TREE_TYPE (fn)), stmt);
1207 #endif /* INLINER_FOR_JAVA */
1209 /* Local declarations will be replaced by their equivalents in this
1210 map. */
1211 st = id->decl_map;
1212 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
1213 NULL, NULL);
1215 /* Initialize the parameters. */
1216 args = TREE_OPERAND (t, 1);
1217 return_slot_addr = NULL_TREE;
1218 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (t))
1220 return_slot_addr = TREE_VALUE (args);
1221 args = TREE_CHAIN (args);
1224 #ifndef INLINER_FOR_JAVA
1225 arg_inits = initialize_inlined_parameters (id, args, fn);
1226 /* Expand any inlined calls in the initializers. Do this before we
1227 push FN on the stack of functions we are inlining; we want to
1228 inline calls to FN that appear in the initializers for the
1229 parameters. */
1230 expand_calls_inline (&arg_inits, id);
1231 /* And add them to the tree. */
1232 COMPOUND_BODY (stmt) = chainon (COMPOUND_BODY (stmt), arg_inits);
1233 #else /* INLINER_FOR_JAVA */
1234 arg_inits = initialize_inlined_parameters (id, args, fn, expr);
1235 if (arg_inits)
1237 /* Expand any inlined calls in the initializers. Do this before we
1238 push FN on the stack of functions we are inlining; we want to
1239 inline calls to FN that appear in the initializers for the
1240 parameters. */
1241 expand_calls_inline (&arg_inits, id);
1243 /* And add them to the tree. */
1244 BLOCK_EXPR_BODY (expr) = add_stmt_to_compound (BLOCK_EXPR_BODY (expr),
1245 TREE_TYPE (arg_inits),
1246 arg_inits);
1248 #endif /* INLINER_FOR_JAVA */
1250 /* Record the function we are about to inline so that we can avoid
1251 recursing into it. */
1252 VARRAY_PUSH_TREE (id->fns, fn);
1254 /* Record the function we are about to inline if optimize_function
1255 has not been called on it yet and we don't have it in the list. */
1256 if (! DECL_INLINED_FNS (fn))
1258 int i;
1260 for (i = VARRAY_ACTIVE_SIZE (id->inlined_fns) - 1; i >= 0; i--)
1261 if (VARRAY_TREE (id->inlined_fns, i) == fn)
1262 break;
1263 if (i < 0)
1264 VARRAY_PUSH_TREE (id->inlined_fns, fn);
1267 /* Return statements in the function body will be replaced by jumps
1268 to the RET_LABEL. */
1269 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1270 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
1272 if (! DECL_INITIAL (fn)
1273 || TREE_CODE (DECL_INITIAL (fn)) != BLOCK)
1274 abort ();
1276 #ifndef INLINER_FOR_JAVA
1277 /* Create a block to put the parameters in. We have to do this
1278 after the parameters have been remapped because remapping
1279 parameters is different from remapping ordinary variables. */
1280 scope_stmt = build_stmt (SCOPE_STMT, DECL_INITIAL (fn));
1281 SCOPE_BEGIN_P (scope_stmt) = 1;
1282 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
1283 remap_block (scope_stmt, DECL_ARGUMENTS (fn), id);
1284 TREE_CHAIN (scope_stmt) = COMPOUND_BODY (stmt);
1285 COMPOUND_BODY (stmt) = scope_stmt;
1287 /* Tell the debugging backends that this block represents the
1288 outermost scope of the inlined function. */
1289 if (SCOPE_STMT_BLOCK (scope_stmt))
1290 BLOCK_ABSTRACT_ORIGIN (SCOPE_STMT_BLOCK (scope_stmt)) = DECL_ORIGIN (fn);
1292 /* Declare the return variable for the function. */
1293 COMPOUND_BODY (stmt)
1294 = chainon (COMPOUND_BODY (stmt),
1295 declare_return_variable (id, return_slot_addr, &use_stmt));
1296 #else /* INLINER_FOR_JAVA */
1298 /* Declare the return variable for the function. */
1299 tree decl = declare_return_variable (id, return_slot_addr, &retvar);
1300 if (retvar)
1302 tree *next = &BLOCK_VARS (expr);
1303 while (*next)
1304 next = &TREE_CHAIN (*next);
1305 *next = decl;
1308 #endif /* INLINER_FOR_JAVA */
1310 /* After we've initialized the parameters, we insert the body of the
1311 function itself. */
1312 #ifndef INLINER_FOR_JAVA
1313 inlined_body = &COMPOUND_BODY (stmt);
1314 while (*inlined_body)
1315 inlined_body = &TREE_CHAIN (*inlined_body);
1316 *inlined_body = copy_body (id);
1317 #else /* INLINER_FOR_JAVA */
1319 tree new_body;
1320 java_inlining_map_static_initializers (fn, id->decl_map);
1321 new_body = copy_body (id);
1322 TREE_TYPE (new_body) = TREE_TYPE (TREE_TYPE (fn));
1323 BLOCK_EXPR_BODY (expr)
1324 = add_stmt_to_compound (BLOCK_EXPR_BODY (expr),
1325 TREE_TYPE (new_body), new_body);
1326 inlined_body = &BLOCK_EXPR_BODY (expr);
1328 #endif /* INLINER_FOR_JAVA */
1330 /* After the body of the function comes the RET_LABEL. This must come
1331 before we evaluate the returned value below, because that evaluation
1332 may cause RTL to be generated. */
1333 #ifndef INLINER_FOR_JAVA
1334 COMPOUND_BODY (stmt)
1335 = chainon (COMPOUND_BODY (stmt),
1336 build_stmt (LABEL_STMT, id->ret_label));
1337 #else /* INLINER_FOR_JAVA */
1339 tree label = build1 (LABEL_EXPR, void_type_node, id->ret_label);
1340 BLOCK_EXPR_BODY (expr)
1341 = add_stmt_to_compound (BLOCK_EXPR_BODY (expr), void_type_node, label);
1342 TREE_SIDE_EFFECTS (label) = TREE_SIDE_EFFECTS (t);
1344 #endif /* INLINER_FOR_JAVA */
1346 /* Finally, mention the returned value so that the value of the
1347 statement-expression is the returned value of the function. */
1348 #ifndef INLINER_FOR_JAVA
1349 COMPOUND_BODY (stmt) = chainon (COMPOUND_BODY (stmt), use_stmt);
1351 /* Close the block for the parameters. */
1352 scope_stmt = build_stmt (SCOPE_STMT, DECL_INITIAL (fn));
1353 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
1354 remap_block (scope_stmt, NULL_TREE, id);
1355 COMPOUND_BODY (stmt)
1356 = chainon (COMPOUND_BODY (stmt), scope_stmt);
1357 #else /* INLINER_FOR_JAVA */
1358 if (retvar)
1360 /* Mention the retvar. If the return type of the function was
1361 promoted, convert it back to the expected type. */
1362 if (TREE_TYPE (TREE_TYPE (fn)) != TREE_TYPE (retvar))
1363 retvar = build1 (NOP_EXPR, TREE_TYPE (TREE_TYPE (fn)), retvar);
1364 BLOCK_EXPR_BODY (expr)
1365 = add_stmt_to_compound (BLOCK_EXPR_BODY (expr),
1366 TREE_TYPE (retvar), retvar);
1369 java_inlining_merge_static_initializers (fn, id->decl_map);
1370 #endif /* INLINER_FOR_JAVA */
1372 /* Clean up. */
1373 splay_tree_delete (id->decl_map);
1374 id->decl_map = st;
1376 /* The new expression has side-effects if the old one did. */
1377 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (t);
1379 /* Replace the call by the inlined body. Wrap it in an
1380 EXPR_WITH_FILE_LOCATION so that we'll get debugging line notes
1381 pointing to the right place. */
1382 #ifndef INLINER_FOR_JAVA
1383 chain = TREE_CHAIN (*tp);
1384 #endif /* INLINER_FOR_JAVA */
1385 *tp = build_expr_wfl (expr, DECL_SOURCE_FILE (fn), DECL_SOURCE_LINE (fn),
1386 /*col=*/0);
1387 EXPR_WFL_EMIT_LINE_NOTE (*tp) = 1;
1388 #ifndef INLINER_FOR_JAVA
1389 TREE_CHAIN (*tp) = chain;
1390 #endif /* not INLINER_FOR_JAVA */
1391 pop_srcloc ();
1393 /* If the value of the new expression is ignored, that's OK. We
1394 don't warn about this for CALL_EXPRs, so we shouldn't warn about
1395 the equivalent inlined version either. */
1396 TREE_USED (*tp) = 1;
1398 /* Our function now has more statements than it did before. */
1399 DECL_ESTIMATED_INSNS (VARRAY_TREE (id->fns, 0)) += DECL_ESTIMATED_INSNS (fn);
1400 /* For accounting, subtract one for the saved call/ret. */
1401 id->inlined_insns += DECL_ESTIMATED_INSNS (fn) - 1;
1403 /* Update callgraph if needed. */
1404 if (id->decl && flag_unit_at_a_time)
1406 cgraph_remove_call (id->decl, fn);
1407 cgraph_create_edges (id->decl, *inlined_body);
1410 /* Recurse into the body of the just inlined function. */
1412 tree old_decl = id->current_decl;
1413 id->current_decl = fn;
1414 expand_calls_inline (inlined_body, id);
1415 id->current_decl = old_decl;
1417 VARRAY_POP (id->fns);
1419 /* If we've returned to the top level, clear out the record of how
1420 much inlining has been done. */
1421 if (VARRAY_ACTIVE_SIZE (id->fns) == id->first_inlined_fn)
1422 id->inlined_insns = 0;
1424 /* Don't walk into subtrees. We've already handled them above. */
1425 *walk_subtrees = 0;
1427 (*lang_hooks.tree_inlining.end_inlining) (fn);
1429 /* Keep iterating. */
1430 return NULL_TREE;
1432 /* Walk over the entire tree *TP, replacing CALL_EXPRs with inline
1433 expansions as appropriate. */
1435 static void
1436 expand_calls_inline (tree *tp, inline_data *id)
1438 /* Search through *TP, replacing all calls to inline functions by
1439 appropriate equivalents. Use walk_tree in no-duplicates mode
1440 to avoid exponential time complexity. (We can't just use
1441 walk_tree_without_duplicates, because of the special TARGET_EXPR
1442 handling in expand_calls. The hash table is set up in
1443 optimize_function. */
1444 walk_tree (tp, expand_call_inline, id, id->tree_pruner);
1447 /* Expand calls to inline functions in the body of FN. */
1449 void
1450 optimize_inline_calls (tree fn)
1452 inline_data id;
1453 tree prev_fn;
1455 /* Clear out ID. */
1456 memset (&id, 0, sizeof (id));
1458 id.decl = fn;
1459 id.current_decl = fn;
1460 /* Don't allow recursion into FN. */
1461 VARRAY_TREE_INIT (id.fns, 32, "fns");
1462 VARRAY_PUSH_TREE (id.fns, fn);
1463 if (!DECL_ESTIMATED_INSNS (fn))
1464 DECL_ESTIMATED_INSNS (fn)
1465 = (*lang_hooks.tree_inlining.estimate_num_insns) (fn);
1466 /* Or any functions that aren't finished yet. */
1467 prev_fn = NULL_TREE;
1468 if (current_function_decl)
1470 VARRAY_PUSH_TREE (id.fns, current_function_decl);
1471 prev_fn = current_function_decl;
1474 prev_fn = ((*lang_hooks.tree_inlining.add_pending_fn_decls)
1475 (&id.fns, prev_fn));
1477 /* Create the list of functions this call will inline. */
1478 VARRAY_TREE_INIT (id.inlined_fns, 32, "inlined_fns");
1480 /* Keep track of the low-water mark, i.e., the point where the first
1481 real inlining is represented in ID.FNS. */
1482 id.first_inlined_fn = VARRAY_ACTIVE_SIZE (id.fns);
1484 /* Replace all calls to inline functions with the bodies of those
1485 functions. */
1486 id.tree_pruner = htab_create (37, htab_hash_pointer,
1487 htab_eq_pointer, NULL);
1488 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
1490 /* Clean up. */
1491 htab_delete (id.tree_pruner);
1492 if (DECL_LANG_SPECIFIC (fn))
1494 tree ifn = make_tree_vec (VARRAY_ACTIVE_SIZE (id.inlined_fns));
1496 if (VARRAY_ACTIVE_SIZE (id.inlined_fns))
1497 memcpy (&TREE_VEC_ELT (ifn, 0), &VARRAY_TREE (id.inlined_fns, 0),
1498 VARRAY_ACTIVE_SIZE (id.inlined_fns) * sizeof (tree));
1499 DECL_INLINED_FNS (fn) = ifn;
1503 /* FN is a function that has a complete body, and CLONE is a function
1504 whose body is to be set to a copy of FN, mapping argument
1505 declarations according to the ARG_MAP splay_tree. */
1507 void
1508 clone_body (tree clone, tree fn, void *arg_map)
1510 inline_data id;
1512 /* Clone the body, as if we were making an inline call. But, remap
1513 the parameters in the callee to the parameters of caller. If
1514 there's an in-charge parameter, map it to an appropriate
1515 constant. */
1516 memset (&id, 0, sizeof (id));
1517 VARRAY_TREE_INIT (id.fns, 2, "fns");
1518 VARRAY_PUSH_TREE (id.fns, clone);
1519 VARRAY_PUSH_TREE (id.fns, fn);
1520 id.decl_map = (splay_tree)arg_map;
1522 /* Cloning is treated slightly differently from inlining. Set
1523 CLONING_P so that it's clear which operation we're performing. */
1524 id.cloning_p = true;
1526 /* Actually copy the body. */
1527 TREE_CHAIN (DECL_SAVED_TREE (clone)) = copy_body (&id);
1530 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal.
1531 FUNC is called with the DATA and the address of each sub-tree. If
1532 FUNC returns a non-NULL value, the traversal is aborted, and the
1533 value returned by FUNC is returned. If HTAB is non-NULL it is used
1534 to record the nodes visited, and to avoid visiting a node more than
1535 once. */
1537 tree
1538 walk_tree (tree *tp, walk_tree_fn func, void *data, void *htab_)
1540 htab_t htab = (htab_t) htab_;
1541 enum tree_code code;
1542 int walk_subtrees;
1543 tree result;
1545 #define WALK_SUBTREE(NODE) \
1546 do \
1548 result = walk_tree (&(NODE), func, data, htab); \
1549 if (result) \
1550 return result; \
1552 while (0)
1554 #define WALK_SUBTREE_TAIL(NODE) \
1555 do \
1557 tp = & (NODE); \
1558 goto tail_recurse; \
1560 while (0)
1562 tail_recurse:
1563 /* Skip empty subtrees. */
1564 if (!*tp)
1565 return NULL_TREE;
1567 if (htab)
1569 void **slot;
1571 /* Don't walk the same tree twice, if the user has requested
1572 that we avoid doing so. */
1573 slot = htab_find_slot (htab, *tp, INSERT);
1574 if (*slot)
1575 return NULL_TREE;
1576 *slot = *tp;
1579 /* Call the function. */
1580 walk_subtrees = 1;
1581 result = (*func) (tp, &walk_subtrees, data);
1583 /* If we found something, return it. */
1584 if (result)
1585 return result;
1587 code = TREE_CODE (*tp);
1589 #ifndef INLINER_FOR_JAVA
1590 /* Even if we didn't, FUNC may have decided that there was nothing
1591 interesting below this point in the tree. */
1592 if (!walk_subtrees)
1594 if (STATEMENT_CODE_P (code) || code == TREE_LIST
1595 || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp))
1596 /* But we still need to check our siblings. */
1597 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
1598 else
1599 return NULL_TREE;
1602 /* Handle common cases up front. */
1603 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
1604 || TREE_CODE_CLASS (code) == 'r'
1605 || TREE_CODE_CLASS (code) == 's')
1606 #else /* INLINER_FOR_JAVA */
1607 if (code != EXIT_BLOCK_EXPR
1608 && code != SAVE_EXPR
1609 && (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
1610 || TREE_CODE_CLASS (code) == 'r'
1611 || TREE_CODE_CLASS (code) == 's'))
1612 #endif /* INLINER_FOR_JAVA */
1614 int i, len;
1616 #ifndef INLINER_FOR_JAVA
1617 /* Set lineno here so we get the right instantiation context
1618 if we call instantiate_decl from inlinable_function_p. */
1619 if (STATEMENT_CODE_P (code) && !STMT_LINENO_FOR_FN_P (*tp))
1620 input_line = STMT_LINENO (*tp);
1621 #endif /* not INLINER_FOR_JAVA */
1623 /* Walk over all the sub-trees of this operand. */
1624 len = first_rtl_op (code);
1625 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
1626 But, we only want to walk once. */
1627 if (code == TARGET_EXPR
1628 && TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1))
1629 --len;
1630 /* Go through the subtrees. We need to do this in forward order so
1631 that the scope of a FOR_EXPR is handled properly. */
1632 for (i = 0; i < len; ++i)
1633 WALK_SUBTREE (TREE_OPERAND (*tp, i));
1635 #ifndef INLINER_FOR_JAVA
1636 /* For statements, we also walk the chain so that we cover the
1637 entire statement tree. */
1638 if (STATEMENT_CODE_P (code))
1640 if (code == DECL_STMT
1641 && DECL_STMT_DECL (*tp)
1642 && DECL_P (DECL_STMT_DECL (*tp)))
1644 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
1645 into declarations that are just mentioned, rather than
1646 declared; they don't really belong to this part of the tree.
1647 And, we can see cycles: the initializer for a declaration can
1648 refer to the declaration itself. */
1649 WALK_SUBTREE (DECL_INITIAL (DECL_STMT_DECL (*tp)));
1650 WALK_SUBTREE (DECL_SIZE (DECL_STMT_DECL (*tp)));
1651 WALK_SUBTREE (DECL_SIZE_UNIT (DECL_STMT_DECL (*tp)));
1654 /* This can be tail-recursion optimized if we write it this way. */
1655 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
1658 #endif /* not INLINER_FOR_JAVA */
1659 /* We didn't find what we were looking for. */
1660 return NULL_TREE;
1662 else if (TREE_CODE_CLASS (code) == 'd')
1664 WALK_SUBTREE_TAIL (TREE_TYPE (*tp));
1666 else if (TREE_CODE_CLASS (code) == 't')
1668 WALK_SUBTREE (TYPE_SIZE (*tp));
1669 WALK_SUBTREE (TYPE_SIZE_UNIT (*tp));
1670 /* Also examine various special fields, below. */
1673 result = (*lang_hooks.tree_inlining.walk_subtrees) (tp, &walk_subtrees, func,
1674 data, htab);
1675 if (result || ! walk_subtrees)
1676 return result;
1678 /* Not one of the easy cases. We must explicitly go through the
1679 children. */
1680 switch (code)
1682 case ERROR_MARK:
1683 case IDENTIFIER_NODE:
1684 case INTEGER_CST:
1685 case REAL_CST:
1686 case VECTOR_CST:
1687 case STRING_CST:
1688 case REAL_TYPE:
1689 case COMPLEX_TYPE:
1690 case VECTOR_TYPE:
1691 case VOID_TYPE:
1692 case BOOLEAN_TYPE:
1693 case UNION_TYPE:
1694 case ENUMERAL_TYPE:
1695 case BLOCK:
1696 case RECORD_TYPE:
1697 case CHAR_TYPE:
1698 /* None of thse have subtrees other than those already walked
1699 above. */
1700 break;
1702 case POINTER_TYPE:
1703 case REFERENCE_TYPE:
1704 WALK_SUBTREE_TAIL (TREE_TYPE (*tp));
1705 break;
1707 case TREE_LIST:
1708 WALK_SUBTREE (TREE_VALUE (*tp));
1709 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
1710 break;
1712 case TREE_VEC:
1714 int len = TREE_VEC_LENGTH (*tp);
1716 if (len == 0)
1717 break;
1719 /* Walk all elements but the first. */
1720 while (--len)
1721 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
1723 /* Now walk the first one as a tail call. */
1724 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
1727 case COMPLEX_CST:
1728 WALK_SUBTREE (TREE_REALPART (*tp));
1729 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
1731 case CONSTRUCTOR:
1732 WALK_SUBTREE_TAIL (CONSTRUCTOR_ELTS (*tp));
1734 case METHOD_TYPE:
1735 WALK_SUBTREE (TYPE_METHOD_BASETYPE (*tp));
1736 /* Fall through. */
1738 case FUNCTION_TYPE:
1739 WALK_SUBTREE (TREE_TYPE (*tp));
1741 tree arg = TYPE_ARG_TYPES (*tp);
1743 /* We never want to walk into default arguments. */
1744 for (; arg; arg = TREE_CHAIN (arg))
1745 WALK_SUBTREE (TREE_VALUE (arg));
1747 break;
1749 case ARRAY_TYPE:
1750 WALK_SUBTREE (TREE_TYPE (*tp));
1751 WALK_SUBTREE_TAIL (TYPE_DOMAIN (*tp));
1753 case INTEGER_TYPE:
1754 WALK_SUBTREE (TYPE_MIN_VALUE (*tp));
1755 WALK_SUBTREE_TAIL (TYPE_MAX_VALUE (*tp));
1757 case OFFSET_TYPE:
1758 WALK_SUBTREE (TREE_TYPE (*tp));
1759 WALK_SUBTREE_TAIL (TYPE_OFFSET_BASETYPE (*tp));
1761 #ifdef INLINER_FOR_JAVA
1762 case EXIT_BLOCK_EXPR:
1763 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 1));
1765 case SAVE_EXPR:
1766 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
1767 #endif /* INLINER_FOR_JAVA */
1769 default:
1770 abort ();
1773 /* We didn't find what we were looking for. */
1774 return NULL_TREE;
1776 #undef WALK_SUBTREE
1777 #undef WALK_SUBTREE_TAIL
1780 /* Like walk_tree, but does not walk duplicate nodes more than
1781 once. */
1783 tree
1784 walk_tree_without_duplicates (tree *tp, walk_tree_fn func, void *data)
1786 tree result;
1787 htab_t htab;
1789 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
1790 result = walk_tree (tp, func, data, htab);
1791 htab_delete (htab);
1792 return result;
1795 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
1797 tree
1798 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1800 enum tree_code code = TREE_CODE (*tp);
1802 /* We make copies of most nodes. */
1803 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
1804 || TREE_CODE_CLASS (code) == 'r'
1805 || TREE_CODE_CLASS (code) == 'c'
1806 || TREE_CODE_CLASS (code) == 's'
1807 || code == TREE_LIST
1808 || code == TREE_VEC
1809 || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp))
1811 /* Because the chain gets clobbered when we make a copy, we save it
1812 here. */
1813 tree chain = TREE_CHAIN (*tp);
1815 /* Copy the node. */
1816 *tp = copy_node (*tp);
1818 /* Now, restore the chain, if appropriate. That will cause
1819 walk_tree to walk into the chain as well. */
1820 if (code == PARM_DECL || code == TREE_LIST
1821 #ifndef INLINER_FOR_JAVA
1822 || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp)
1823 || STATEMENT_CODE_P (code))
1824 TREE_CHAIN (*tp) = chain;
1826 /* For now, we don't update BLOCKs when we make copies. So, we
1827 have to nullify all scope-statements. */
1828 if (TREE_CODE (*tp) == SCOPE_STMT)
1829 SCOPE_STMT_BLOCK (*tp) = NULL_TREE;
1830 #else /* INLINER_FOR_JAVA */
1831 || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp))
1832 TREE_CHAIN (*tp) = chain;
1833 #endif /* INLINER_FOR_JAVA */
1835 else if (TREE_CODE_CLASS (code) == 't' && !variably_modified_type_p (*tp))
1836 /* Types only need to be copied if they are variably modified. */
1837 *walk_subtrees = 0;
1839 return NULL_TREE;
1842 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
1843 information indicating to what new SAVE_EXPR this one should be
1844 mapped, use that one. Otherwise, create a new node and enter it in
1845 ST. FN is the function into which the copy will be placed. */
1847 void
1848 remap_save_expr (tree *tp, void *st_, tree fn, int *walk_subtrees)
1850 splay_tree st = (splay_tree) st_;
1851 splay_tree_node n;
1853 /* See if we already encountered this SAVE_EXPR. */
1854 n = splay_tree_lookup (st, (splay_tree_key) *tp);
1856 /* If we didn't already remap this SAVE_EXPR, do so now. */
1857 if (!n)
1859 tree t = copy_node (*tp);
1861 /* The SAVE_EXPR is now part of the function into which we
1862 are inlining this body. */
1863 SAVE_EXPR_CONTEXT (t) = fn;
1864 /* And we haven't evaluated it yet. */
1865 SAVE_EXPR_RTL (t) = NULL_RTX;
1866 /* Remember this SAVE_EXPR. */
1867 n = splay_tree_insert (st,
1868 (splay_tree_key) *tp,
1869 (splay_tree_value) t);
1870 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
1871 splay_tree_insert (st, (splay_tree_key) t,
1872 (splay_tree_value) error_mark_node);
1874 else
1875 /* We've already walked into this SAVE_EXPR, so we needn't do it
1876 again. */
1877 *walk_subtrees = 0;
1879 /* Replace this SAVE_EXPR with the copy. */
1880 *tp = (tree) n->value;
1883 #ifdef INLINER_FOR_JAVA
1884 /* Add STMT to EXISTING if possible, otherwise create a new
1885 COMPOUND_EXPR and add STMT to it. */
1887 static tree
1888 add_stmt_to_compound (tree existing, tree type, tree stmt)
1890 if (!stmt)
1891 return existing;
1892 else if (existing)
1893 return build (COMPOUND_EXPR, type, existing, stmt);
1894 else
1895 return stmt;
1898 #endif /* INLINER_FOR_JAVA */