tree-inline.c (estimate_num_insns_1): Handle VEC_COND_EXPR.
[official-gcc.git] / gcc / tree-inline.c
blobe6b074528ddb3e1522056f655b628416787638a7
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "integrate.h"
36 #include "varray.h"
37 #include "hashtab.h"
38 #include "splay-tree.h"
39 #include "langhooks.h"
40 #include "cgraph.h"
41 #include "intl.h"
42 #include "tree-mudflap.h"
43 #include "tree-flow.h"
44 #include "function.h"
45 #include "diagnostic.h"
46 #include "debug.h"
48 /* I'm not real happy about this, but we need to handle gimple and
49 non-gimple trees. */
50 #include "tree-iterator.h"
51 #include "tree-gimple.h"
53 /* 0 if we should not perform inlining.
54 1 if we should expand functions calls inline at the tree level.
55 2 if we should consider *all* functions to be inline
56 candidates. */
58 int flag_inline_trees = 0;
60 /* To Do:
62 o In order to make inlining-on-trees work, we pessimized
63 function-local static constants. In particular, they are now
64 always output, even when not addressed. Fix this by treating
65 function-local static constants just like global static
66 constants; the back-end already knows not to output them if they
67 are not needed.
69 o Provide heuristics to clamp inlining of recursive template
70 calls? */
72 /* Data required for function inlining. */
74 typedef struct inline_data
76 /* A stack of the functions we are inlining. For example, if we are
77 compiling `f', which calls `g', which calls `h', and we are
78 inlining the body of `h', the stack will contain, `h', followed
79 by `g', followed by `f'. The first few elements of the stack may
80 contain other functions that we know we should not recurse into,
81 even though they are not directly being inlined. */
82 varray_type fns;
83 /* The index of the first element of FNS that really represents an
84 inlined function. */
85 unsigned first_inlined_fn;
86 /* The label to jump to when a return statement is encountered. If
87 this value is NULL, then return statements will simply be
88 remapped as return statements, rather than as jumps. */
89 tree ret_label;
90 /* The VAR_DECL for the return value. */
91 tree retvar;
92 /* The map from local declarations in the inlined function to
93 equivalents in the function into which it is being inlined. */
94 splay_tree decl_map;
95 /* Nonzero if we are currently within the cleanup for a
96 TARGET_EXPR. */
97 int in_target_cleanup_p;
98 /* We use the same mechanism to build clones that we do to perform
99 inlining. However, there are a few places where we need to
100 distinguish between those two situations. This flag is true if
101 we are cloning, rather than inlining. */
102 bool cloning_p;
103 /* Similarly for saving function body. */
104 bool saving_p;
105 /* Hash table used to prevent walk_tree from visiting the same node
106 umpteen million times. */
107 htab_t tree_pruner;
108 /* Callgraph node of function we are inlining into. */
109 struct cgraph_node *node;
110 /* Callgraph node of currently inlined function. */
111 struct cgraph_node *current_node;
112 /* Statement iterator. We need this so we can keep the tree in
113 gimple form when we insert the inlined function. It is not
114 used when we are not dealing with gimple trees. */
115 tree_stmt_iterator tsi;
116 } inline_data;
118 /* Prototypes. */
120 static tree copy_body_r (tree *, int *, void *);
121 static tree copy_body (inline_data *);
122 static tree expand_call_inline (tree *, int *, void *);
123 static void expand_calls_inline (tree *, inline_data *);
124 static bool inlinable_function_p (tree);
125 static tree remap_decl (tree, inline_data *);
126 static tree remap_type (tree, inline_data *);
127 static tree initialize_inlined_parameters (inline_data *, tree,
128 tree, tree, tree);
129 static void remap_block (tree *, inline_data *);
130 static tree remap_decls (tree, inline_data *);
131 static void copy_bind_expr (tree *, int *, inline_data *);
132 static tree mark_local_for_remap_r (tree *, int *, void *);
133 static void unsave_expr_1 (tree);
134 static tree unsave_r (tree *, int *, void *);
135 static void declare_inline_vars (tree bind_expr, tree vars);
136 static void remap_save_expr (tree *, void *, int *);
138 /* Insert a tree->tree mapping for ID. Despite the name suggests
139 that the trees should be variables, it is used for more than that. */
141 static void
142 insert_decl_map (inline_data *id, tree key, tree value)
144 splay_tree_insert (id->decl_map, (splay_tree_key) key,
145 (splay_tree_value) value);
147 /* Always insert an identity map as well. If we see this same new
148 node again, we won't want to duplicate it a second time. */
149 if (key != value)
150 splay_tree_insert (id->decl_map, (splay_tree_key) value,
151 (splay_tree_value) value);
154 /* Remap DECL during the copying of the BLOCK tree for the function.
155 We are only called to remap local variables in the current function. */
157 static tree
158 remap_decl (tree decl, inline_data *id)
160 splay_tree_node n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
161 tree fn = VARRAY_TOP_TREE (id->fns);
163 /* See if we have remapped this declaration. If we didn't already have an
164 equivalent for this declaration, create one now. */
165 if (!n)
167 /* Make a copy of the variable or label. */
168 tree t = copy_decl_for_inlining (decl, fn, VARRAY_TREE (id->fns, 0));
170 /* Remember it, so that if we encounter this local entity again
171 we can reuse this copy. Do this early because remap_type may
172 need this decl for TYPE_STUB_DECL. */
173 insert_decl_map (id, decl, t);
175 /* Remap types, if necessary. */
176 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
177 if (TREE_CODE (t) == TYPE_DECL)
178 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
179 else if (TREE_CODE (t) == PARM_DECL)
180 DECL_ARG_TYPE_AS_WRITTEN (t)
181 = remap_type (DECL_ARG_TYPE_AS_WRITTEN (t), id);
183 /* Remap sizes as necessary. */
184 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
185 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
187 /* If fields, do likewise for offset and qualifier. */
188 if (TREE_CODE (t) == FIELD_DECL)
190 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
191 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
192 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
195 #if 0
196 /* FIXME handle anon aggrs. */
197 if (! DECL_NAME (t) && TREE_TYPE (t)
198 && lang_hooks.tree_inlining.anon_aggr_type_p (TREE_TYPE (t)))
200 /* For a VAR_DECL of anonymous type, we must also copy the
201 member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS. */
202 tree members = NULL;
203 tree src;
205 for (src = DECL_ANON_UNION_ELEMS (t); src;
206 src = TREE_CHAIN (src))
208 tree member = remap_decl (TREE_VALUE (src), id);
210 gcc_assert (!TREE_PURPOSE (src));
211 members = tree_cons (NULL, member, members);
213 DECL_ANON_UNION_ELEMS (t) = nreverse (members);
215 #endif
217 return t;
220 return unshare_expr ((tree) n->value);
223 static tree
224 remap_type (tree type, inline_data *id)
226 splay_tree_node node;
227 tree new, t;
229 if (type == NULL)
230 return type;
232 /* See if we have remapped this type. */
233 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
234 if (node)
235 return (tree) node->value;
237 /* The type only needs remapping if it's variably modified by a variable
238 in the function we are inlining. */
239 if (! variably_modified_type_p (type, VARRAY_TOP_TREE (id->fns)))
241 insert_decl_map (id, type, type);
242 return type;
245 /* We do need a copy. build and register it now. If this is a pointer or
246 reference type, remap the designated type and make a new pointer or
247 reference type. */
248 if (TREE_CODE (type) == POINTER_TYPE)
250 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
251 TYPE_MODE (type),
252 TYPE_REF_CAN_ALIAS_ALL (type));
253 insert_decl_map (id, type, new);
254 return new;
256 else if (TREE_CODE (type) == REFERENCE_TYPE)
258 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
259 TYPE_MODE (type),
260 TYPE_REF_CAN_ALIAS_ALL (type));
261 insert_decl_map (id, type, new);
262 return new;
264 else
265 new = copy_node (type);
267 insert_decl_map (id, type, new);
269 /* This is a new type, not a copy of an old type. Need to reassociate
270 variants. We can handle everything except the main variant lazily. */
271 t = TYPE_MAIN_VARIANT (type);
272 if (type != t)
274 t = remap_type (t, id);
275 TYPE_MAIN_VARIANT (new) = t;
276 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
277 TYPE_NEXT_VARIANT (t) = new;
279 else
281 TYPE_MAIN_VARIANT (new) = new;
282 TYPE_NEXT_VARIANT (new) = NULL;
285 if (TYPE_STUB_DECL (type))
286 TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
288 /* Lazily create pointer and reference types. */
289 TYPE_POINTER_TO (new) = NULL;
290 TYPE_REFERENCE_TO (new) = NULL;
292 switch (TREE_CODE (new))
294 case INTEGER_TYPE:
295 case REAL_TYPE:
296 case ENUMERAL_TYPE:
297 case BOOLEAN_TYPE:
298 case CHAR_TYPE:
299 t = TYPE_MIN_VALUE (new);
300 if (t && TREE_CODE (t) != INTEGER_CST)
301 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
303 t = TYPE_MAX_VALUE (new);
304 if (t && TREE_CODE (t) != INTEGER_CST)
305 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
306 return new;
308 case FUNCTION_TYPE:
309 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
310 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
311 return new;
313 case ARRAY_TYPE:
314 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
315 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
316 break;
318 case RECORD_TYPE:
319 case UNION_TYPE:
320 case QUAL_UNION_TYPE:
321 walk_tree (&TYPE_FIELDS (new), copy_body_r, id, NULL);
322 break;
324 case OFFSET_TYPE:
325 default:
326 /* Shouldn't have been thought variable sized. */
327 gcc_unreachable ();
330 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
331 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
333 return new;
336 static tree
337 remap_decls (tree decls, inline_data *id)
339 tree old_var;
340 tree new_decls = NULL_TREE;
342 /* Remap its variables. */
343 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
345 tree new_var;
347 /* Remap the variable. */
348 new_var = remap_decl (old_var, id);
350 /* If we didn't remap this variable, so we can't mess with its
351 TREE_CHAIN. If we remapped this variable to the return slot, it's
352 already declared somewhere else, so don't declare it here. */
353 if (!new_var || new_var == id->retvar)
355 else
357 gcc_assert (DECL_P (new_var));
358 TREE_CHAIN (new_var) = new_decls;
359 new_decls = new_var;
363 return nreverse (new_decls);
366 /* Copy the BLOCK to contain remapped versions of the variables
367 therein. And hook the new block into the block-tree. */
369 static void
370 remap_block (tree *block, inline_data *id)
372 tree old_block;
373 tree new_block;
374 tree fn;
376 /* Make the new block. */
377 old_block = *block;
378 new_block = make_node (BLOCK);
379 TREE_USED (new_block) = TREE_USED (old_block);
380 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
381 *block = new_block;
383 /* Remap its variables. */
384 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
386 fn = VARRAY_TREE (id->fns, 0);
387 #if 1
388 /* FIXME! It shouldn't be so hard to manage blocks. Rebuilding them in
389 rest_of_compilation is a good start. */
390 if (id->cloning_p)
391 /* We're building a clone; DECL_INITIAL is still
392 error_mark_node, and current_binding_level is the parm
393 binding level. */
394 lang_hooks.decls.insert_block (new_block);
395 else
397 /* Attach this new block after the DECL_INITIAL block for the
398 function into which this block is being inlined. In
399 rest_of_compilation we will straighten out the BLOCK tree. */
400 tree *first_block;
401 if (DECL_INITIAL (fn))
402 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
403 else
404 first_block = &DECL_INITIAL (fn);
405 BLOCK_CHAIN (new_block) = *first_block;
406 *first_block = new_block;
408 #endif
409 /* Remember the remapped block. */
410 insert_decl_map (id, old_block, new_block);
413 static void
414 copy_statement_list (tree *tp)
416 tree_stmt_iterator oi, ni;
417 tree new;
419 new = alloc_stmt_list ();
420 ni = tsi_start (new);
421 oi = tsi_start (*tp);
422 *tp = new;
424 for (; !tsi_end_p (oi); tsi_next (&oi))
425 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
428 static void
429 copy_bind_expr (tree *tp, int *walk_subtrees, inline_data *id)
431 tree block = BIND_EXPR_BLOCK (*tp);
432 /* Copy (and replace) the statement. */
433 copy_tree_r (tp, walk_subtrees, NULL);
434 if (block)
436 remap_block (&block, id);
437 BIND_EXPR_BLOCK (*tp) = block;
440 if (BIND_EXPR_VARS (*tp))
441 /* This will remap a lot of the same decls again, but this should be
442 harmless. */
443 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
446 /* Called from copy_body via walk_tree. DATA is really an `inline_data *'. */
448 static tree
449 copy_body_r (tree *tp, int *walk_subtrees, void *data)
451 inline_data *id = (inline_data *) data;
452 tree fn = VARRAY_TOP_TREE (id->fns);
454 #if 0
455 /* All automatic variables should have a DECL_CONTEXT indicating
456 what function they come from. */
457 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
458 && DECL_NAMESPACE_SCOPE_P (*tp))
459 gcc_assert (DECL_EXTERNAL (*tp) || TREE_STATIC (*tp));
460 #endif
462 /* If this is a RETURN_EXPR, change it into a MODIFY_EXPR and a
463 GOTO_EXPR with the RET_LABEL as its target. */
464 if (TREE_CODE (*tp) == RETURN_EXPR && id->ret_label)
466 tree return_stmt = *tp;
467 tree goto_stmt;
469 /* Build the GOTO_EXPR. */
470 tree assignment = TREE_OPERAND (return_stmt, 0);
471 goto_stmt = build1 (GOTO_EXPR, void_type_node, id->ret_label);
472 TREE_USED (id->ret_label) = 1;
474 /* If we're returning something, just turn that into an
475 assignment into the equivalent of the original
476 RESULT_DECL. */
477 if (assignment)
479 /* Do not create a statement containing a naked RESULT_DECL. */
480 if (TREE_CODE (assignment) == RESULT_DECL)
481 gimplify_stmt (&assignment);
483 *tp = build (BIND_EXPR, void_type_node, NULL, NULL, NULL);
484 append_to_statement_list (assignment, &BIND_EXPR_BODY (*tp));
485 append_to_statement_list (goto_stmt, &BIND_EXPR_BODY (*tp));
487 /* If we're not returning anything just do the jump. */
488 else
489 *tp = goto_stmt;
491 /* Local variables and labels need to be replaced by equivalent
492 variables. We don't want to copy static variables; there's only
493 one of those, no matter how many times we inline the containing
494 function. Similarly for globals from an outer function. */
495 else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
497 tree new_decl;
499 /* Remap the declaration. */
500 new_decl = remap_decl (*tp, id);
501 gcc_assert (new_decl);
502 /* Replace this variable with the copy. */
503 STRIP_TYPE_NOPS (new_decl);
504 *tp = new_decl;
505 *walk_subtrees = 0;
507 else if (TREE_CODE (*tp) == STATEMENT_LIST)
508 copy_statement_list (tp);
509 else if (TREE_CODE (*tp) == SAVE_EXPR)
510 remap_save_expr (tp, id->decl_map, walk_subtrees);
511 else if (TREE_CODE (*tp) == BIND_EXPR)
512 copy_bind_expr (tp, walk_subtrees, id);
513 /* Types may need remapping as well. */
514 else if (TYPE_P (*tp))
515 *tp = remap_type (*tp, id);
517 /* If this is a constant, we have to copy the node iff the type will be
518 remapped. copy_tree_r will not copy a constant. */
519 else if (CONSTANT_CLASS_P (*tp))
521 tree new_type = remap_type (TREE_TYPE (*tp), id);
523 if (new_type == TREE_TYPE (*tp))
524 *walk_subtrees = 0;
526 else if (TREE_CODE (*tp) == INTEGER_CST)
527 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
528 TREE_INT_CST_HIGH (*tp));
529 else
531 *tp = copy_node (*tp);
532 TREE_TYPE (*tp) = new_type;
536 /* Otherwise, just copy the node. Note that copy_tree_r already
537 knows not to copy VAR_DECLs, etc., so this is safe. */
538 else
540 tree old_node = *tp;
542 if (TREE_CODE (*tp) == MODIFY_EXPR
543 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
544 && (lang_hooks.tree_inlining.auto_var_in_fn_p
545 (TREE_OPERAND (*tp, 0), fn)))
547 /* Some assignments VAR = VAR; don't generate any rtl code
548 and thus don't count as variable modification. Avoid
549 keeping bogosities like 0 = 0. */
550 tree decl = TREE_OPERAND (*tp, 0), value;
551 splay_tree_node n;
553 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
554 if (n)
556 value = (tree) n->value;
557 STRIP_TYPE_NOPS (value);
558 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
560 *tp = build_empty_stmt ();
561 return copy_body_r (tp, walk_subtrees, data);
565 else if (TREE_CODE (*tp) == INDIRECT_REF)
567 /* Get rid of *& from inline substitutions that can happen when a
568 pointer argument is an ADDR_EXPR. */
569 tree decl = TREE_OPERAND (*tp, 0);
570 splay_tree_node n;
572 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
573 if (n)
575 *tp = build_fold_indirect_ref ((tree)n->value);
576 *walk_subtrees = 0;
577 return NULL;
581 copy_tree_r (tp, walk_subtrees, NULL);
583 if (TREE_CODE (*tp) == CALL_EXPR && id->node && get_callee_fndecl (*tp))
585 if (id->saving_p)
587 struct cgraph_node *node;
588 struct cgraph_edge *edge;
590 for (node = id->node->next_clone; node; node = node->next_clone)
592 edge = cgraph_edge (node, old_node);
593 gcc_assert (edge);
594 edge->call_expr = *tp;
597 else
599 struct cgraph_edge *edge
600 = cgraph_edge (id->current_node, old_node);
602 if (edge)
603 cgraph_clone_edge (edge, id->node, *tp);
607 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
609 /* The copied TARGET_EXPR has never been expanded, even if the
610 original node was expanded already. */
611 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
613 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
614 TREE_OPERAND (*tp, 3) = NULL_TREE;
617 /* Variable substitution need not be simple. In particular, the
618 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
619 and friends are up-to-date. */
620 else if (TREE_CODE (*tp) == ADDR_EXPR)
622 walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
623 recompute_tree_invarant_for_addr_expr (*tp);
624 *walk_subtrees = 0;
628 /* Keep iterating. */
629 return NULL_TREE;
632 /* Make a copy of the body of FN so that it can be inserted inline in
633 another function. */
635 static tree
636 copy_body (inline_data *id)
638 tree body;
639 tree fndecl = VARRAY_TOP_TREE (id->fns);
641 if (fndecl == current_function_decl
642 && cfun->saved_tree)
643 body = cfun->saved_tree;
644 else
645 body = DECL_SAVED_TREE (fndecl);
646 walk_tree (&body, copy_body_r, id, NULL);
648 return body;
651 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
652 defined in function FN, or of a data member thereof. */
654 static bool
655 self_inlining_addr_expr (tree value, tree fn)
657 tree var;
659 if (TREE_CODE (value) != ADDR_EXPR)
660 return false;
662 var = get_base_address (TREE_OPERAND (value, 0));
664 return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
667 static void
668 setup_one_parameter (inline_data *id, tree p, tree value, tree fn,
669 tree *init_stmts, tree *vars, bool *gimplify_init_stmts_p)
671 tree init_stmt;
672 tree var;
674 /* If the parameter is never assigned to, we may not need to
675 create a new variable here at all. Instead, we may be able
676 to just use the argument value. */
677 if (TREE_READONLY (p)
678 && !TREE_ADDRESSABLE (p)
679 && value && !TREE_SIDE_EFFECTS (value))
681 /* We can't risk substituting complex expressions. They
682 might contain variables that will be assigned to later.
683 Theoretically, we could check the expression to see if
684 all of the variables that determine its value are
685 read-only, but we don't bother. */
686 /* We may produce non-gimple trees by adding NOPs or introduce
687 invalid sharing when operand is not really constant.
688 It is not big deal to prohibit constant propagation here as
689 we will constant propagate in DOM1 pass anyway. */
690 if (is_gimple_min_invariant (value)
691 && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
692 /* We have to be very careful about ADDR_EXPR. Make sure
693 the base variable isn't a local variable of the inlined
694 function, e.g., when doing recursive inlining, direct or
695 mutually-recursive or whatever, which is why we don't
696 just test whether fn == current_function_decl. */
697 && ! self_inlining_addr_expr (value, fn))
699 insert_decl_map (id, p, value);
700 return;
704 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
705 here since the type of this decl must be visible to the calling
706 function. */
707 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
709 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
710 that way, when the PARM_DECL is encountered, it will be
711 automatically replaced by the VAR_DECL. */
712 insert_decl_map (id, p, var);
714 /* Declare this new variable. */
715 TREE_CHAIN (var) = *vars;
716 *vars = var;
718 /* Make gimplifier happy about this variable. */
719 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
721 /* Even if P was TREE_READONLY, the new VAR should not be.
722 In the original code, we would have constructed a
723 temporary, and then the function body would have never
724 changed the value of P. However, now, we will be
725 constructing VAR directly. The constructor body may
726 change its value multiple times as it is being
727 constructed. Therefore, it must not be TREE_READONLY;
728 the back-end assumes that TREE_READONLY variable is
729 assigned to only once. */
730 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
731 TREE_READONLY (var) = 0;
733 /* Initialize this VAR_DECL from the equivalent argument. Convert
734 the argument to the proper type in case it was promoted. */
735 if (value)
737 tree rhs = fold_convert (TREE_TYPE (var), value);
739 if (rhs == error_mark_node)
740 return;
742 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
743 keep our trees in gimple form. */
744 init_stmt = build (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
745 append_to_statement_list (init_stmt, init_stmts);
747 /* If we did not create a gimple value and we did not create a gimple
748 cast of a gimple value, then we will need to gimplify INIT_STMTS
749 at the end. Note that is_gimple_cast only checks the outer
750 tree code, not its operand. Thus the explicit check that it's
751 operand is a gimple value. */
752 if (!is_gimple_val (rhs)
753 && (!is_gimple_cast (rhs)
754 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
755 *gimplify_init_stmts_p = true;
759 /* Generate code to initialize the parameters of the function at the
760 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
762 static tree
763 initialize_inlined_parameters (inline_data *id, tree args, tree static_chain,
764 tree fn, tree bind_expr)
766 tree init_stmts = NULL_TREE;
767 tree parms;
768 tree a;
769 tree p;
770 tree vars = NULL_TREE;
771 bool gimplify_init_stmts_p = false;
772 int argnum = 0;
774 /* Figure out what the parameters are. */
775 parms = DECL_ARGUMENTS (fn);
776 if (fn == current_function_decl)
777 parms = cfun->saved_args;
779 /* Loop through the parameter declarations, replacing each with an
780 equivalent VAR_DECL, appropriately initialized. */
781 for (p = parms, a = args; p;
782 a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
784 tree value;
786 ++argnum;
788 /* Find the initializer. */
789 value = lang_hooks.tree_inlining.convert_parm_for_inlining
790 (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
792 setup_one_parameter (id, p, value, fn, &init_stmts, &vars,
793 &gimplify_init_stmts_p);
796 /* Evaluate trailing arguments. */
797 for (; a; a = TREE_CHAIN (a))
799 tree value = TREE_VALUE (a);
800 append_to_statement_list (value, &init_stmts);
803 /* Initialize the static chain. */
804 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
805 if (fn == current_function_decl)
806 p = DECL_STRUCT_FUNCTION (fn)->saved_static_chain_decl;
807 if (p)
809 /* No static chain? Seems like a bug in tree-nested.c. */
810 gcc_assert (static_chain);
812 setup_one_parameter (id, p, static_chain, fn, &init_stmts, &vars,
813 &gimplify_init_stmts_p);
816 if (gimplify_init_stmts_p)
817 gimplify_body (&init_stmts, current_function_decl, false);
819 declare_inline_vars (bind_expr, vars);
820 return init_stmts;
823 /* Declare a return variable to replace the RESULT_DECL for the function we
824 are calling. RETURN_SLOT_ADDR, if non-null, was a fake parameter that
825 took the address of the result. MODIFY_DEST, if non-null, was the LHS of
826 the MODIFY_EXPR to which this call is the RHS.
828 The return value is a (possibly null) value that is the result of the
829 function as seen by the callee. *USE_P is a (possibly null) value that
830 holds the result as seen by the caller. */
832 static tree
833 declare_return_variable (inline_data *id, tree return_slot_addr,
834 tree modify_dest, tree *use_p)
836 tree callee = VARRAY_TOP_TREE (id->fns);
837 tree caller = VARRAY_TREE (id->fns, 0);
838 tree result = DECL_RESULT (callee);
839 tree callee_type = TREE_TYPE (result);
840 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
841 tree var, use;
843 /* We don't need to do anything for functions that don't return
844 anything. */
845 if (!result || VOID_TYPE_P (callee_type))
847 *use_p = NULL_TREE;
848 return NULL_TREE;
851 /* If there was a return slot, then the return value is the
852 dereferenced address of that object. */
853 if (return_slot_addr)
855 /* The front end shouldn't have used both return_slot_addr and
856 a modify expression. */
857 gcc_assert (!modify_dest);
858 if (DECL_BY_REFERENCE (result))
859 var = return_slot_addr;
860 else
861 var = build_fold_indirect_ref (return_slot_addr);
862 use = NULL;
863 goto done;
866 /* All types requiring non-trivial constructors should have been handled. */
867 gcc_assert (!TREE_ADDRESSABLE (callee_type));
869 /* Attempt to avoid creating a new temporary variable. */
870 if (modify_dest)
872 bool use_it = false;
874 /* We can't use MODIFY_DEST if there's type promotion involved. */
875 if (!lang_hooks.types_compatible_p (caller_type, callee_type))
876 use_it = false;
878 /* ??? If we're assigning to a variable sized type, then we must
879 reuse the destination variable, because we've no good way to
880 create variable sized temporaries at this point. */
881 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
882 use_it = true;
884 /* If the callee cannot possibly modify MODIFY_DEST, then we can
885 reuse it as the result of the call directly. Don't do this if
886 it would promote MODIFY_DEST to addressable. */
887 else if (!TREE_STATIC (modify_dest)
888 && !TREE_ADDRESSABLE (modify_dest)
889 && !TREE_ADDRESSABLE (result))
890 use_it = true;
892 if (use_it)
894 var = modify_dest;
895 use = NULL;
896 goto done;
900 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
902 var = copy_decl_for_inlining (result, callee, caller);
903 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
904 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
905 = tree_cons (NULL_TREE, var,
906 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
908 /* Do not have the rest of GCC warn about this variable as it should
909 not be visible to the user. */
910 TREE_NO_WARNING (var) = 1;
912 /* Build the use expr. If the return type of the function was
913 promoted, convert it back to the expected type. */
914 use = var;
915 if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
916 use = fold_convert (caller_type, var);
918 done:
919 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
920 way, when the RESULT_DECL is encountered, it will be
921 automatically replaced by the VAR_DECL. */
922 insert_decl_map (id, result, var);
924 /* Remember this so we can ignore it in remap_decls. */
925 id->retvar = var;
927 *use_p = use;
928 return var;
931 /* Returns nonzero if a function can be inlined as a tree. */
933 bool
934 tree_inlinable_function_p (tree fn)
936 return inlinable_function_p (fn);
939 static const char *inline_forbidden_reason;
941 static tree
942 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
943 void *fnp)
945 tree node = *nodep;
946 tree fn = (tree) fnp;
947 tree t;
949 switch (TREE_CODE (node))
951 case CALL_EXPR:
952 /* Refuse to inline alloca call unless user explicitly forced so as
953 this may change program's memory overhead drastically when the
954 function using alloca is called in loop. In GCC present in
955 SPEC2000 inlining into schedule_block cause it to require 2GB of
956 RAM instead of 256MB. */
957 if (alloca_call_p (node)
958 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
960 inline_forbidden_reason
961 = N_("%Jfunction %qF can never be inlined because it uses "
962 "alloca (override using the always_inline attribute)");
963 return node;
965 t = get_callee_fndecl (node);
966 if (! t)
967 break;
969 /* We cannot inline functions that call setjmp. */
970 if (setjmp_call_p (t))
972 inline_forbidden_reason
973 = N_("%Jfunction %qF can never be inlined because it uses setjmp");
974 return node;
977 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
978 switch (DECL_FUNCTION_CODE (t))
980 /* We cannot inline functions that take a variable number of
981 arguments. */
982 case BUILT_IN_VA_START:
983 case BUILT_IN_STDARG_START:
984 case BUILT_IN_NEXT_ARG:
985 case BUILT_IN_VA_END:
986 inline_forbidden_reason
987 = N_("%Jfunction %qF can never be inlined because it "
988 "uses variable argument lists");
989 return node;
991 case BUILT_IN_LONGJMP:
992 /* We can't inline functions that call __builtin_longjmp at
993 all. The non-local goto machinery really requires the
994 destination be in a different function. If we allow the
995 function calling __builtin_longjmp to be inlined into the
996 function calling __builtin_setjmp, Things will Go Awry. */
997 inline_forbidden_reason
998 = N_("%Jfunction %qF can never be inlined because "
999 "it uses setjmp-longjmp exception handling");
1000 return node;
1002 case BUILT_IN_NONLOCAL_GOTO:
1003 /* Similarly. */
1004 inline_forbidden_reason
1005 = N_("%Jfunction %qF can never be inlined because "
1006 "it uses non-local goto");
1007 return node;
1009 case BUILT_IN_RETURN:
1010 case BUILT_IN_APPLY_ARGS:
1011 /* If a __builtin_apply_args caller would be inlined,
1012 it would be saving arguments of the function it has
1013 been inlined into. Similarly __builtin_return would
1014 return from the function the inline has been inlined into. */
1015 inline_forbidden_reason
1016 = N_("%Jfunction %qF can never be inlined because "
1017 "it uses __builtin_return or __builtin_apply_args");
1018 return node;
1020 default:
1021 break;
1023 break;
1025 case GOTO_EXPR:
1026 t = TREE_OPERAND (node, 0);
1028 /* We will not inline a function which uses computed goto. The
1029 addresses of its local labels, which may be tucked into
1030 global storage, are of course not constant across
1031 instantiations, which causes unexpected behavior. */
1032 if (TREE_CODE (t) != LABEL_DECL)
1034 inline_forbidden_reason
1035 = N_("%Jfunction %qF can never be inlined "
1036 "because it contains a computed goto");
1037 return node;
1039 break;
1041 case LABEL_EXPR:
1042 t = TREE_OPERAND (node, 0);
1043 if (DECL_NONLOCAL (t))
1045 /* We cannot inline a function that receives a non-local goto
1046 because we cannot remap the destination label used in the
1047 function that is performing the non-local goto. */
1048 inline_forbidden_reason
1049 = N_("%Jfunction %qF can never be inlined "
1050 "because it receives a non-local goto");
1051 return node;
1053 break;
1055 case RECORD_TYPE:
1056 case UNION_TYPE:
1057 /* We cannot inline a function of the form
1059 void F (int i) { struct S { int ar[i]; } s; }
1061 Attempting to do so produces a catch-22.
1062 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1063 UNION_TYPE nodes, then it goes into infinite recursion on a
1064 structure containing a pointer to its own type. If it doesn't,
1065 then the type node for S doesn't get adjusted properly when
1066 F is inlined.
1068 ??? This is likely no longer true, but it's too late in the 4.0
1069 cycle to try to find out. This should be checked for 4.1. */
1070 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1071 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1073 inline_forbidden_reason
1074 = N_("%Jfunction %qF can never be inlined "
1075 "because it uses variable sized variables");
1076 return node;
1079 default:
1080 break;
1083 return NULL_TREE;
1086 /* Return subexpression representing possible alloca call, if any. */
1087 static tree
1088 inline_forbidden_p (tree fndecl)
1090 location_t saved_loc = input_location;
1091 tree ret = walk_tree_without_duplicates (&DECL_SAVED_TREE (fndecl),
1092 inline_forbidden_p_1, fndecl);
1094 input_location = saved_loc;
1095 return ret;
1098 /* Returns nonzero if FN is a function that does not have any
1099 fundamental inline blocking properties. */
1101 static bool
1102 inlinable_function_p (tree fn)
1104 bool inlinable = true;
1106 /* If we've already decided this function shouldn't be inlined,
1107 there's no need to check again. */
1108 if (DECL_UNINLINABLE (fn))
1109 return false;
1111 /* See if there is any language-specific reason it cannot be
1112 inlined. (It is important that this hook be called early because
1113 in C++ it may result in template instantiation.)
1114 If the function is not inlinable for language-specific reasons,
1115 it is left up to the langhook to explain why. */
1116 inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1118 /* If we don't have the function body available, we can't inline it.
1119 However, this should not be recorded since we also get here for
1120 forward declared inline functions. Therefore, return at once. */
1121 if (!DECL_SAVED_TREE (fn))
1122 return false;
1124 /* If we're not inlining at all, then we cannot inline this function. */
1125 else if (!flag_inline_trees)
1126 inlinable = false;
1128 /* Only try to inline functions if DECL_INLINE is set. This should be
1129 true for all functions declared `inline', and for all other functions
1130 as well with -finline-functions.
1132 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1133 it's the front-end that must set DECL_INLINE in this case, because
1134 dwarf2out loses if a function that does not have DECL_INLINE set is
1135 inlined anyway. That is why we have both DECL_INLINE and
1136 DECL_DECLARED_INLINE_P. */
1137 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1138 here should be redundant. */
1139 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1140 inlinable = false;
1142 else if (inline_forbidden_p (fn))
1144 /* See if we should warn about uninlinable functions. Previously,
1145 some of these warnings would be issued while trying to expand
1146 the function inline, but that would cause multiple warnings
1147 about functions that would for example call alloca. But since
1148 this a property of the function, just one warning is enough.
1149 As a bonus we can now give more details about the reason why a
1150 function is not inlinable.
1151 We only warn for functions declared `inline' by the user. */
1152 bool do_warning = (warn_inline
1153 && DECL_INLINE (fn)
1154 && DECL_DECLARED_INLINE_P (fn)
1155 && !DECL_IN_SYSTEM_HEADER (fn));
1157 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1158 sorry (inline_forbidden_reason, fn, fn);
1159 else if (do_warning)
1160 warning (0, inline_forbidden_reason, fn, fn);
1162 inlinable = false;
1165 /* Squirrel away the result so that we don't have to check again. */
1166 DECL_UNINLINABLE (fn) = !inlinable;
1168 return inlinable;
1171 /* Estimate the cost of a memory move. Use machine dependent
1172 word size and take possible memcpy call into account. */
1175 estimate_move_cost (tree type)
1177 HOST_WIDE_INT size;
1179 size = int_size_in_bytes (type);
1181 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1182 /* Cost of a memcpy call, 3 arguments and the call. */
1183 return 4;
1184 else
1185 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1188 /* Used by estimate_num_insns. Estimate number of instructions seen
1189 by given statement. */
1191 static tree
1192 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1194 int *count = data;
1195 tree x = *tp;
1197 if (IS_TYPE_OR_DECL_P (x))
1199 *walk_subtrees = 0;
1200 return NULL;
1202 /* Assume that constants and references counts nothing. These should
1203 be majorized by amount of operations among them we count later
1204 and are common target of CSE and similar optimizations. */
1205 else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
1206 return NULL;
1208 switch (TREE_CODE (x))
1210 /* Containers have no cost. */
1211 case TREE_LIST:
1212 case TREE_VEC:
1213 case BLOCK:
1214 case COMPONENT_REF:
1215 case BIT_FIELD_REF:
1216 case INDIRECT_REF:
1217 case ALIGN_INDIRECT_REF:
1218 case MISALIGNED_INDIRECT_REF:
1219 case ARRAY_REF:
1220 case ARRAY_RANGE_REF:
1221 case OBJ_TYPE_REF:
1222 case EXC_PTR_EXPR: /* ??? */
1223 case FILTER_EXPR: /* ??? */
1224 case COMPOUND_EXPR:
1225 case BIND_EXPR:
1226 case WITH_CLEANUP_EXPR:
1227 case NOP_EXPR:
1228 case VIEW_CONVERT_EXPR:
1229 case SAVE_EXPR:
1230 case ADDR_EXPR:
1231 case COMPLEX_EXPR:
1232 case RANGE_EXPR:
1233 case CASE_LABEL_EXPR:
1234 case SSA_NAME:
1235 case CATCH_EXPR:
1236 case EH_FILTER_EXPR:
1237 case STATEMENT_LIST:
1238 case ERROR_MARK:
1239 case NON_LVALUE_EXPR:
1240 case FDESC_EXPR:
1241 case VA_ARG_EXPR:
1242 case TRY_CATCH_EXPR:
1243 case TRY_FINALLY_EXPR:
1244 case LABEL_EXPR:
1245 case GOTO_EXPR:
1246 case RETURN_EXPR:
1247 case EXIT_EXPR:
1248 case LOOP_EXPR:
1249 case PHI_NODE:
1250 case WITH_SIZE_EXPR:
1251 break;
1253 /* We don't account constants for now. Assume that the cost is amortized
1254 by operations that do use them. We may re-consider this decision once
1255 we are able to optimize the tree before estimating it's size and break
1256 out static initializers. */
1257 case IDENTIFIER_NODE:
1258 case INTEGER_CST:
1259 case REAL_CST:
1260 case COMPLEX_CST:
1261 case VECTOR_CST:
1262 case STRING_CST:
1263 *walk_subtrees = 0;
1264 return NULL;
1266 /* Try to estimate the cost of assignments. We have three cases to
1267 deal with:
1268 1) Simple assignments to registers;
1269 2) Stores to things that must live in memory. This includes
1270 "normal" stores to scalars, but also assignments of large
1271 structures, or constructors of big arrays;
1272 3) TARGET_EXPRs.
1274 Let us look at the first two cases, assuming we have "a = b + C":
1275 <modify_expr <var_decl "a"> <plus_expr <var_decl "b"> <constant C>>
1276 If "a" is a GIMPLE register, the assignment to it is free on almost
1277 any target, because "a" usually ends up in a real register. Hence
1278 the only cost of this expression comes from the PLUS_EXPR, and we
1279 can ignore the MODIFY_EXPR.
1280 If "a" is not a GIMPLE register, the assignment to "a" will most
1281 likely be a real store, so the cost of the MODIFY_EXPR is the cost
1282 of moving something into "a", which we compute using the function
1283 estimate_move_cost.
1285 The third case deals with TARGET_EXPRs, for which the semantics are
1286 that a temporary is assigned, unless the TARGET_EXPR itself is being
1287 assigned to something else. In the latter case we do not need the
1288 temporary. E.g. in <modify_expr <var_decl "a"> <target_expr>>, the
1289 MODIFY_EXPR is free. */
1290 case INIT_EXPR:
1291 case MODIFY_EXPR:
1292 /* Is the right and side a TARGET_EXPR? */
1293 if (TREE_CODE (TREE_OPERAND (x, 1)) == TARGET_EXPR)
1294 break;
1295 /* ... fall through ... */
1297 case TARGET_EXPR:
1298 x = TREE_OPERAND (x, 0);
1299 /* Is this an assignments to a register? */
1300 if (is_gimple_reg (x))
1301 break;
1302 /* Otherwise it's a store, so fall through to compute the move cost. */
1304 case CONSTRUCTOR:
1305 *count += estimate_move_cost (TREE_TYPE (x));
1306 break;
1308 /* Assign cost of 1 to usual operations.
1309 ??? We may consider mapping RTL costs to this. */
1310 case COND_EXPR:
1311 case VEC_COND_EXPR:
1313 case PLUS_EXPR:
1314 case MINUS_EXPR:
1315 case MULT_EXPR:
1317 case FIX_TRUNC_EXPR:
1318 case FIX_CEIL_EXPR:
1319 case FIX_FLOOR_EXPR:
1320 case FIX_ROUND_EXPR:
1322 case NEGATE_EXPR:
1323 case FLOAT_EXPR:
1324 case MIN_EXPR:
1325 case MAX_EXPR:
1326 case ABS_EXPR:
1328 case LSHIFT_EXPR:
1329 case RSHIFT_EXPR:
1330 case LROTATE_EXPR:
1331 case RROTATE_EXPR:
1333 case BIT_IOR_EXPR:
1334 case BIT_XOR_EXPR:
1335 case BIT_AND_EXPR:
1336 case BIT_NOT_EXPR:
1338 case TRUTH_ANDIF_EXPR:
1339 case TRUTH_ORIF_EXPR:
1340 case TRUTH_AND_EXPR:
1341 case TRUTH_OR_EXPR:
1342 case TRUTH_XOR_EXPR:
1343 case TRUTH_NOT_EXPR:
1345 case LT_EXPR:
1346 case LE_EXPR:
1347 case GT_EXPR:
1348 case GE_EXPR:
1349 case EQ_EXPR:
1350 case NE_EXPR:
1351 case ORDERED_EXPR:
1352 case UNORDERED_EXPR:
1354 case UNLT_EXPR:
1355 case UNLE_EXPR:
1356 case UNGT_EXPR:
1357 case UNGE_EXPR:
1358 case UNEQ_EXPR:
1359 case LTGT_EXPR:
1361 case CONVERT_EXPR:
1363 case CONJ_EXPR:
1365 case PREDECREMENT_EXPR:
1366 case PREINCREMENT_EXPR:
1367 case POSTDECREMENT_EXPR:
1368 case POSTINCREMENT_EXPR:
1370 case SWITCH_EXPR:
1372 case ASM_EXPR:
1374 case REALIGN_LOAD_EXPR:
1376 case RESX_EXPR:
1377 *count += 1;
1378 break;
1380 /* Few special cases of expensive operations. This is useful
1381 to avoid inlining on functions having too many of these. */
1382 case TRUNC_DIV_EXPR:
1383 case CEIL_DIV_EXPR:
1384 case FLOOR_DIV_EXPR:
1385 case ROUND_DIV_EXPR:
1386 case EXACT_DIV_EXPR:
1387 case TRUNC_MOD_EXPR:
1388 case CEIL_MOD_EXPR:
1389 case FLOOR_MOD_EXPR:
1390 case ROUND_MOD_EXPR:
1391 case RDIV_EXPR:
1392 *count += 10;
1393 break;
1394 case CALL_EXPR:
1396 tree decl = get_callee_fndecl (x);
1397 tree arg;
1399 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1400 switch (DECL_FUNCTION_CODE (decl))
1402 case BUILT_IN_CONSTANT_P:
1403 *walk_subtrees = 0;
1404 return NULL_TREE;
1405 case BUILT_IN_EXPECT:
1406 return NULL_TREE;
1407 default:
1408 break;
1411 /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
1412 that does use function declaration to figure out the arguments. */
1413 if (!decl)
1415 for (arg = TREE_OPERAND (x, 1); arg; arg = TREE_CHAIN (arg))
1416 *count += estimate_move_cost (TREE_TYPE (TREE_VALUE (arg)));
1418 else
1420 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1421 *count += estimate_move_cost (TREE_TYPE (arg));
1424 *count += PARAM_VALUE (PARAM_INLINE_CALL_COST);
1425 break;
1427 default:
1428 gcc_unreachable ();
1430 return NULL;
1433 /* Estimate number of instructions that will be created by expanding EXPR. */
1436 estimate_num_insns (tree expr)
1438 int num = 0;
1439 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
1440 return num;
1443 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
1445 static tree
1446 expand_call_inline (tree *tp, int *walk_subtrees, void *data)
1448 inline_data *id;
1449 tree t;
1450 tree expr;
1451 tree stmt;
1452 tree use_retvar;
1453 tree fn;
1454 tree arg_inits;
1455 tree *inlined_body;
1456 splay_tree st;
1457 tree args;
1458 tree return_slot_addr;
1459 tree modify_dest;
1460 location_t saved_location;
1461 struct cgraph_edge *edge;
1462 const char *reason;
1464 /* See what we've got. */
1465 id = (inline_data *) data;
1466 t = *tp;
1468 /* Set input_location here so we get the right instantiation context
1469 if we call instantiate_decl from inlinable_function_p. */
1470 saved_location = input_location;
1471 if (EXPR_HAS_LOCATION (t))
1472 input_location = EXPR_LOCATION (t);
1474 /* Recurse, but letting recursive invocations know that we are
1475 inside the body of a TARGET_EXPR. */
1476 if (TREE_CODE (*tp) == TARGET_EXPR)
1478 #if 0
1479 int i, len = TREE_CODE_LENGTH (TARGET_EXPR);
1481 /* We're walking our own subtrees. */
1482 *walk_subtrees = 0;
1484 /* Actually walk over them. This loop is the body of
1485 walk_trees, omitting the case where the TARGET_EXPR
1486 itself is handled. */
1487 for (i = 0; i < len; ++i)
1489 if (i == 2)
1490 ++id->in_target_cleanup_p;
1491 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data,
1492 id->tree_pruner);
1493 if (i == 2)
1494 --id->in_target_cleanup_p;
1497 goto egress;
1498 #endif
1501 if (TYPE_P (t))
1502 /* Because types were not copied in copy_body, CALL_EXPRs beneath
1503 them should not be expanded. This can happen if the type is a
1504 dynamic array type, for example. */
1505 *walk_subtrees = 0;
1507 /* From here on, we're only interested in CALL_EXPRs. */
1508 if (TREE_CODE (t) != CALL_EXPR)
1509 goto egress;
1511 /* First, see if we can figure out what function is being called.
1512 If we cannot, then there is no hope of inlining the function. */
1513 fn = get_callee_fndecl (t);
1514 if (!fn)
1515 goto egress;
1517 /* Turn forward declarations into real ones. */
1518 fn = cgraph_node (fn)->decl;
1520 /* If fn is a declaration of a function in a nested scope that was
1521 globally declared inline, we don't set its DECL_INITIAL.
1522 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1523 C++ front-end uses it for cdtors to refer to their internal
1524 declarations, that are not real functions. Fortunately those
1525 don't have trees to be saved, so we can tell by checking their
1526 DECL_SAVED_TREE. */
1527 if (! DECL_INITIAL (fn)
1528 && DECL_ABSTRACT_ORIGIN (fn)
1529 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
1530 fn = DECL_ABSTRACT_ORIGIN (fn);
1532 /* Objective C and fortran still calls tree_rest_of_compilation directly.
1533 Kill this check once this is fixed. */
1534 if (!id->current_node->analyzed)
1535 goto egress;
1537 edge = cgraph_edge (id->current_node, t);
1539 /* Constant propagation on argument done during previous inlining
1540 may create new direct call. Produce an edge for it. */
1541 if (!edge)
1543 struct cgraph_node *dest = cgraph_node (fn);
1545 /* We have missing edge in the callgraph. This can happen in one case
1546 where previous inlining turned indirect call into direct call by
1547 constant propagating arguments. In all other cases we hit a bug
1548 (incorrect node sharing is most common reason for missing edges. */
1549 gcc_assert (dest->needed || !flag_unit_at_a_time);
1550 cgraph_create_edge (id->node, dest, t)->inline_failed
1551 = N_("originally indirect function call not considered for inlining");
1552 goto egress;
1555 /* Don't try to inline functions that are not well-suited to
1556 inlining. */
1557 if (!cgraph_inline_p (edge, &reason))
1559 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1561 sorry ("%Jinlining failed in call to %qF: %s", fn, fn, reason);
1562 sorry ("called from here");
1564 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
1565 && !DECL_IN_SYSTEM_HEADER (fn)
1566 && strlen (reason)
1567 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn)))
1569 warning (0, "%Jinlining failed in call to %qF: %s", fn, fn, reason);
1570 warning (0, "called from here");
1572 goto egress;
1575 #ifdef ENABLE_CHECKING
1576 if (edge->callee->decl != id->node->decl)
1577 verify_cgraph_node (edge->callee);
1578 #endif
1580 if (! lang_hooks.tree_inlining.start_inlining (fn))
1581 goto egress;
1583 /* Build a block containing code to initialize the arguments, the
1584 actual inline expansion of the body, and a label for the return
1585 statements within the function to jump to. The type of the
1586 statement expression is the return type of the function call. */
1587 stmt = NULL;
1588 expr = build (BIND_EXPR, void_type_node, NULL_TREE,
1589 stmt, make_node (BLOCK));
1590 BLOCK_ABSTRACT_ORIGIN (BIND_EXPR_BLOCK (expr)) = fn;
1592 /* Local declarations will be replaced by their equivalents in this
1593 map. */
1594 st = id->decl_map;
1595 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
1596 NULL, NULL);
1598 /* Initialize the parameters. */
1599 args = TREE_OPERAND (t, 1);
1600 return_slot_addr = NULL_TREE;
1601 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (t))
1603 return_slot_addr = TREE_VALUE (args);
1604 args = TREE_CHAIN (args);
1605 TREE_TYPE (expr) = void_type_node;
1608 arg_inits = initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2),
1609 fn, expr);
1610 if (arg_inits)
1612 /* Expand any inlined calls in the initializers. Do this before we
1613 push FN on the stack of functions we are inlining; we want to
1614 inline calls to FN that appear in the initializers for the
1615 parameters.
1617 Note we need to save and restore the saved tree statement iterator
1618 to avoid having it clobbered by expand_calls_inline. */
1619 tree_stmt_iterator save_tsi;
1621 save_tsi = id->tsi;
1622 expand_calls_inline (&arg_inits, id);
1623 id->tsi = save_tsi;
1625 /* And add them to the tree. */
1626 append_to_statement_list (arg_inits, &BIND_EXPR_BODY (expr));
1629 /* Record the function we are about to inline so that we can avoid
1630 recursing into it. */
1631 VARRAY_PUSH_TREE (id->fns, fn);
1633 /* Return statements in the function body will be replaced by jumps
1634 to the RET_LABEL. */
1635 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1636 DECL_ARTIFICIAL (id->ret_label) = 1;
1637 DECL_IGNORED_P (id->ret_label) = 1;
1638 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
1639 insert_decl_map (id, id->ret_label, id->ret_label);
1641 gcc_assert (DECL_INITIAL (fn));
1642 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
1644 /* Find the lhs to which the result of this call is assigned. */
1645 modify_dest = tsi_stmt (id->tsi);
1646 if (TREE_CODE (modify_dest) == MODIFY_EXPR)
1648 modify_dest = TREE_OPERAND (modify_dest, 0);
1650 /* The function which we are inlining might not return a value,
1651 in which case we should issue a warning that the function
1652 does not return a value. In that case the optimizers will
1653 see that the variable to which the value is assigned was not
1654 initialized. We do not want to issue a warning about that
1655 uninitialized variable. */
1656 if (DECL_P (modify_dest))
1657 TREE_NO_WARNING (modify_dest) = 1;
1659 else
1660 modify_dest = NULL;
1662 /* Declare the return variable for the function. */
1663 declare_return_variable (id, return_slot_addr,
1664 modify_dest, &use_retvar);
1666 /* After we've initialized the parameters, we insert the body of the
1667 function itself. */
1669 struct cgraph_node *old_node = id->current_node;
1670 tree copy;
1672 id->current_node = edge->callee;
1673 copy = copy_body (id);
1675 /* If the function uses a return slot, then it may legitimately
1676 fall through while still returning a value, so we have to skip
1677 the warning here. */
1678 if (warn_return_type
1679 && !TREE_NO_WARNING (fn)
1680 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
1681 && return_slot_addr == NULL_TREE
1682 && block_may_fallthru (copy))
1684 warning (0, "control may reach end of non-void function %qD being inlined",
1685 fn);
1686 TREE_NO_WARNING (fn) = 1;
1689 append_to_statement_list (copy, &BIND_EXPR_BODY (expr));
1690 id->current_node = old_node;
1692 inlined_body = &BIND_EXPR_BODY (expr);
1694 /* After the body of the function comes the RET_LABEL. This must come
1695 before we evaluate the returned value below, because that evaluation
1696 may cause RTL to be generated. */
1697 if (TREE_USED (id->ret_label))
1699 tree label = build1 (LABEL_EXPR, void_type_node, id->ret_label);
1700 append_to_statement_list (label, &BIND_EXPR_BODY (expr));
1703 /* Clean up. */
1704 splay_tree_delete (id->decl_map);
1705 id->decl_map = st;
1707 /* Although, from the semantic viewpoint, the new expression has
1708 side-effects only if the old one did, it is not possible, from
1709 the technical viewpoint, to evaluate the body of a function
1710 multiple times without serious havoc. */
1711 TREE_SIDE_EFFECTS (expr) = 1;
1713 tsi_link_before (&id->tsi, expr, TSI_SAME_STMT);
1715 /* If the inlined function returns a result that we care about,
1716 then we're going to need to splice in a MODIFY_EXPR. Otherwise
1717 the call was a standalone statement and we can just replace it
1718 with the BIND_EXPR inline representation of the called function. */
1719 if (!use_retvar || !modify_dest)
1720 *tsi_stmt_ptr (id->tsi) = build_empty_stmt ();
1721 else
1722 *tp = use_retvar;
1724 /* When we gimplify a function call, we may clear TREE_SIDE_EFFECTS on
1725 the call if it is to a "const" function. Thus the copy of
1726 TREE_SIDE_EFFECTS from the CALL_EXPR to the BIND_EXPR above with
1727 result in TREE_SIDE_EFFECTS not being set for the inlined copy of a
1728 "const" function.
1730 Unfortunately, that is wrong as inlining the function can create/expose
1731 interesting side effects (such as setting of a return value).
1733 The easiest solution is to simply recalculate TREE_SIDE_EFFECTS for
1734 the toplevel expression. */
1735 recalculate_side_effects (expr);
1737 /* Output the inlining info for this abstract function, since it has been
1738 inlined. If we don't do this now, we can lose the information about the
1739 variables in the function when the blocks get blown away as soon as we
1740 remove the cgraph node. */
1741 (*debug_hooks->outlining_inline_function) (edge->callee->decl);
1743 /* Update callgraph if needed. */
1744 cgraph_remove_node (edge->callee);
1746 /* Recurse into the body of the just inlined function. */
1747 expand_calls_inline (inlined_body, id);
1748 VARRAY_POP (id->fns);
1750 /* Don't walk into subtrees. We've already handled them above. */
1751 *walk_subtrees = 0;
1753 lang_hooks.tree_inlining.end_inlining (fn);
1755 /* Keep iterating. */
1756 egress:
1757 input_location = saved_location;
1758 return NULL_TREE;
1761 static void
1762 expand_calls_inline (tree *stmt_p, inline_data *id)
1764 tree stmt = *stmt_p;
1765 enum tree_code code = TREE_CODE (stmt);
1766 int dummy;
1768 switch (code)
1770 case STATEMENT_LIST:
1772 tree_stmt_iterator i;
1773 tree new;
1775 for (i = tsi_start (stmt); !tsi_end_p (i); )
1777 id->tsi = i;
1778 expand_calls_inline (tsi_stmt_ptr (i), id);
1780 new = tsi_stmt (i);
1781 if (TREE_CODE (new) == STATEMENT_LIST)
1783 tsi_link_before (&i, new, TSI_SAME_STMT);
1784 tsi_delink (&i);
1786 else
1787 tsi_next (&i);
1790 break;
1792 case COND_EXPR:
1793 expand_calls_inline (&COND_EXPR_THEN (stmt), id);
1794 expand_calls_inline (&COND_EXPR_ELSE (stmt), id);
1795 break;
1797 case CATCH_EXPR:
1798 expand_calls_inline (&CATCH_BODY (stmt), id);
1799 break;
1801 case EH_FILTER_EXPR:
1802 expand_calls_inline (&EH_FILTER_FAILURE (stmt), id);
1803 break;
1805 case TRY_CATCH_EXPR:
1806 case TRY_FINALLY_EXPR:
1807 expand_calls_inline (&TREE_OPERAND (stmt, 0), id);
1808 expand_calls_inline (&TREE_OPERAND (stmt, 1), id);
1809 break;
1811 case BIND_EXPR:
1812 expand_calls_inline (&BIND_EXPR_BODY (stmt), id);
1813 break;
1815 case COMPOUND_EXPR:
1816 /* We're gimple. We should have gotten rid of all these. */
1817 gcc_unreachable ();
1819 case RETURN_EXPR:
1820 stmt_p = &TREE_OPERAND (stmt, 0);
1821 stmt = *stmt_p;
1822 if (!stmt || TREE_CODE (stmt) != MODIFY_EXPR)
1823 break;
1825 /* FALLTHRU */
1827 case MODIFY_EXPR:
1828 stmt_p = &TREE_OPERAND (stmt, 1);
1829 stmt = *stmt_p;
1830 if (TREE_CODE (stmt) == WITH_SIZE_EXPR)
1832 stmt_p = &TREE_OPERAND (stmt, 0);
1833 stmt = *stmt_p;
1835 if (TREE_CODE (stmt) != CALL_EXPR)
1836 break;
1838 /* FALLTHRU */
1840 case CALL_EXPR:
1841 expand_call_inline (stmt_p, &dummy, id);
1842 break;
1844 default:
1845 break;
1849 /* Expand calls to inline functions in the body of FN. */
1851 void
1852 optimize_inline_calls (tree fn)
1854 inline_data id;
1855 tree prev_fn;
1857 /* There is no point in performing inlining if errors have already
1858 occurred -- and we might crash if we try to inline invalid
1859 code. */
1860 if (errorcount || sorrycount)
1861 return;
1863 /* Clear out ID. */
1864 memset (&id, 0, sizeof (id));
1866 id.current_node = id.node = cgraph_node (fn);
1867 /* Don't allow recursion into FN. */
1868 VARRAY_TREE_INIT (id.fns, 32, "fns");
1869 VARRAY_PUSH_TREE (id.fns, fn);
1870 /* Or any functions that aren't finished yet. */
1871 prev_fn = NULL_TREE;
1872 if (current_function_decl)
1874 VARRAY_PUSH_TREE (id.fns, current_function_decl);
1875 prev_fn = current_function_decl;
1878 prev_fn = lang_hooks.tree_inlining.add_pending_fn_decls (&id.fns, prev_fn);
1880 /* Keep track of the low-water mark, i.e., the point where the first
1881 real inlining is represented in ID.FNS. */
1882 id.first_inlined_fn = VARRAY_ACTIVE_SIZE (id.fns);
1884 /* Replace all calls to inline functions with the bodies of those
1885 functions. */
1886 id.tree_pruner = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
1887 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
1889 /* Clean up. */
1890 htab_delete (id.tree_pruner);
1892 #ifdef ENABLE_CHECKING
1894 struct cgraph_edge *e;
1896 verify_cgraph_node (id.node);
1898 /* Double check that we inlined everything we are supposed to inline. */
1899 for (e = id.node->callees; e; e = e->next_callee)
1900 gcc_assert (e->inline_failed);
1902 #endif
1905 /* FN is a function that has a complete body, and CLONE is a function whose
1906 body is to be set to a copy of FN, mapping argument declarations according
1907 to the ARG_MAP splay_tree. */
1909 void
1910 clone_body (tree clone, tree fn, void *arg_map)
1912 inline_data id;
1914 /* Clone the body, as if we were making an inline call. But, remap the
1915 parameters in the callee to the parameters of caller. If there's an
1916 in-charge parameter, map it to an appropriate constant. */
1917 memset (&id, 0, sizeof (id));
1918 VARRAY_TREE_INIT (id.fns, 2, "fns");
1919 VARRAY_PUSH_TREE (id.fns, clone);
1920 VARRAY_PUSH_TREE (id.fns, fn);
1921 id.decl_map = (splay_tree)arg_map;
1923 /* Cloning is treated slightly differently from inlining. Set
1924 CLONING_P so that it's clear which operation we're performing. */
1925 id.cloning_p = true;
1927 /* Actually copy the body. */
1928 append_to_statement_list_force (copy_body (&id), &DECL_SAVED_TREE (clone));
1931 /* Make and return duplicate of body in FN. Put copies of DECL_ARGUMENTS
1932 in *arg_copy and of the static chain, if any, in *sc_copy. */
1934 tree
1935 save_body (tree fn, tree *arg_copy, tree *sc_copy)
1937 inline_data id;
1938 tree body, *parg;
1940 memset (&id, 0, sizeof (id));
1941 VARRAY_TREE_INIT (id.fns, 1, "fns");
1942 VARRAY_PUSH_TREE (id.fns, fn);
1943 id.node = cgraph_node (fn);
1944 id.saving_p = true;
1945 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
1946 *arg_copy = DECL_ARGUMENTS (fn);
1948 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
1950 tree new = copy_node (*parg);
1952 lang_hooks.dup_lang_specific_decl (new);
1953 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*parg);
1954 insert_decl_map (&id, *parg, new);
1955 TREE_CHAIN (new) = TREE_CHAIN (*parg);
1956 *parg = new;
1959 *sc_copy = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1960 if (*sc_copy)
1962 tree new = copy_node (*sc_copy);
1964 lang_hooks.dup_lang_specific_decl (new);
1965 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*sc_copy);
1966 insert_decl_map (&id, *sc_copy, new);
1967 TREE_CHAIN (new) = TREE_CHAIN (*sc_copy);
1968 *sc_copy = new;
1971 insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn));
1973 /* Actually copy the body. */
1974 body = copy_body (&id);
1976 /* Clean up. */
1977 splay_tree_delete (id.decl_map);
1978 return body;
1981 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
1983 tree
1984 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1986 enum tree_code code = TREE_CODE (*tp);
1988 /* We make copies of most nodes. */
1989 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
1990 || code == TREE_LIST
1991 || code == TREE_VEC
1992 || code == TYPE_DECL)
1994 /* Because the chain gets clobbered when we make a copy, we save it
1995 here. */
1996 tree chain = TREE_CHAIN (*tp);
1997 tree new;
1999 /* Copy the node. */
2000 new = copy_node (*tp);
2002 /* Propagate mudflap marked-ness. */
2003 if (flag_mudflap && mf_marked_p (*tp))
2004 mf_mark (new);
2006 *tp = new;
2008 /* Now, restore the chain, if appropriate. That will cause
2009 walk_tree to walk into the chain as well. */
2010 if (code == PARM_DECL || code == TREE_LIST)
2011 TREE_CHAIN (*tp) = chain;
2013 /* For now, we don't update BLOCKs when we make copies. So, we
2014 have to nullify all BIND_EXPRs. */
2015 if (TREE_CODE (*tp) == BIND_EXPR)
2016 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2019 else if (TREE_CODE_CLASS (code) == tcc_type)
2020 *walk_subtrees = 0;
2021 else if (TREE_CODE_CLASS (code) == tcc_declaration)
2022 *walk_subtrees = 0;
2023 else if (TREE_CODE_CLASS (code) == tcc_constant)
2024 *walk_subtrees = 0;
2025 else
2026 gcc_assert (code != STATEMENT_LIST);
2027 return NULL_TREE;
2030 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2031 information indicating to what new SAVE_EXPR this one should be mapped,
2032 use that one. Otherwise, create a new node and enter it in ST. */
2034 static void
2035 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2037 splay_tree st = (splay_tree) st_;
2038 splay_tree_node n;
2039 tree t;
2041 /* See if we already encountered this SAVE_EXPR. */
2042 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2044 /* If we didn't already remap this SAVE_EXPR, do so now. */
2045 if (!n)
2047 t = copy_node (*tp);
2049 /* Remember this SAVE_EXPR. */
2050 splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2051 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2052 splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2054 else
2056 /* We've already walked into this SAVE_EXPR; don't do it again. */
2057 *walk_subtrees = 0;
2058 t = (tree) n->value;
2061 /* Replace this SAVE_EXPR with the copy. */
2062 *tp = t;
2065 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
2066 copies the declaration and enters it in the splay_tree in DATA (which is
2067 really an `inline_data *'). */
2069 static tree
2070 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2071 void *data)
2073 inline_data *id = (inline_data *) data;
2075 /* Don't walk into types. */
2076 if (TYPE_P (*tp))
2077 *walk_subtrees = 0;
2079 else if (TREE_CODE (*tp) == LABEL_EXPR)
2081 tree decl = TREE_OPERAND (*tp, 0);
2083 /* Copy the decl and remember the copy. */
2084 insert_decl_map (id, decl,
2085 copy_decl_for_inlining (decl, DECL_CONTEXT (decl),
2086 DECL_CONTEXT (decl)));
2089 return NULL_TREE;
2092 /* Perform any modifications to EXPR required when it is unsaved. Does
2093 not recurse into EXPR's subtrees. */
2095 static void
2096 unsave_expr_1 (tree expr)
2098 switch (TREE_CODE (expr))
2100 case TARGET_EXPR:
2101 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
2102 It's OK for this to happen if it was part of a subtree that
2103 isn't immediately expanded, such as operand 2 of another
2104 TARGET_EXPR. */
2105 if (TREE_OPERAND (expr, 1))
2106 break;
2108 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
2109 TREE_OPERAND (expr, 3) = NULL_TREE;
2110 break;
2112 default:
2113 break;
2117 /* Called via walk_tree when an expression is unsaved. Using the
2118 splay_tree pointed to by ST (which is really a `splay_tree'),
2119 remaps all local declarations to appropriate replacements. */
2121 static tree
2122 unsave_r (tree *tp, int *walk_subtrees, void *data)
2124 inline_data *id = (inline_data *) data;
2125 splay_tree st = id->decl_map;
2126 splay_tree_node n;
2128 /* Only a local declaration (variable or label). */
2129 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
2130 || TREE_CODE (*tp) == LABEL_DECL)
2132 /* Lookup the declaration. */
2133 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2135 /* If it's there, remap it. */
2136 if (n)
2137 *tp = (tree) n->value;
2140 else if (TREE_CODE (*tp) == STATEMENT_LIST)
2141 copy_statement_list (tp);
2142 else if (TREE_CODE (*tp) == BIND_EXPR)
2143 copy_bind_expr (tp, walk_subtrees, id);
2144 else if (TREE_CODE (*tp) == SAVE_EXPR)
2145 remap_save_expr (tp, st, walk_subtrees);
2146 else
2148 copy_tree_r (tp, walk_subtrees, NULL);
2150 /* Do whatever unsaving is required. */
2151 unsave_expr_1 (*tp);
2154 /* Keep iterating. */
2155 return NULL_TREE;
2158 /* Copies everything in EXPR and replaces variables, labels
2159 and SAVE_EXPRs local to EXPR. */
2161 tree
2162 unsave_expr_now (tree expr)
2164 inline_data id;
2166 /* There's nothing to do for NULL_TREE. */
2167 if (expr == 0)
2168 return expr;
2170 /* Set up ID. */
2171 memset (&id, 0, sizeof (id));
2172 VARRAY_TREE_INIT (id.fns, 1, "fns");
2173 VARRAY_PUSH_TREE (id.fns, current_function_decl);
2174 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2176 /* Walk the tree once to find local labels. */
2177 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
2179 /* Walk the tree again, copying, remapping, and unsaving. */
2180 walk_tree (&expr, unsave_r, &id, NULL);
2182 /* Clean up. */
2183 splay_tree_delete (id.decl_map);
2185 return expr;
2188 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
2190 static tree
2191 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
2193 if (*tp == data)
2194 return (tree) data;
2195 else
2196 return NULL;
2199 bool
2200 debug_find_tree (tree top, tree search)
2202 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
2205 /* Declare the variables created by the inliner. Add all the variables in
2206 VARS to BIND_EXPR. */
2208 static void
2209 declare_inline_vars (tree bind_expr, tree vars)
2211 tree t;
2212 for (t = vars; t; t = TREE_CHAIN (t))
2213 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
2215 add_var_to_bind_expr (bind_expr, vars);