2005-05-13 Josh Conner <jconner@apple.com>
[official-gcc.git] / gcc / tree-inline.c
blob5873d6257e92a84d2c39db1ac4bb877094ab8b57
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "integrate.h"
36 #include "varray.h"
37 #include "hashtab.h"
38 #include "splay-tree.h"
39 #include "langhooks.h"
40 #include "cgraph.h"
41 #include "intl.h"
42 #include "tree-mudflap.h"
43 #include "tree-flow.h"
44 #include "function.h"
45 #include "diagnostic.h"
46 #include "debug.h"
48 /* I'm not real happy about this, but we need to handle gimple and
49 non-gimple trees. */
50 #include "tree-iterator.h"
51 #include "tree-gimple.h"
53 /* 0 if we should not perform inlining.
54 1 if we should expand functions calls inline at the tree level.
55 2 if we should consider *all* functions to be inline
56 candidates. */
58 int flag_inline_trees = 0;
60 /* To Do:
62 o In order to make inlining-on-trees work, we pessimized
63 function-local static constants. In particular, they are now
64 always output, even when not addressed. Fix this by treating
65 function-local static constants just like global static
66 constants; the back-end already knows not to output them if they
67 are not needed.
69 o Provide heuristics to clamp inlining of recursive template
70 calls? */
72 /* Data required for function inlining. */
74 typedef struct inline_data
76 /* A stack of the functions we are inlining. For example, if we are
77 compiling `f', which calls `g', which calls `h', and we are
78 inlining the body of `h', the stack will contain, `h', followed
79 by `g', followed by `f'. The first few elements of the stack may
80 contain other functions that we know we should not recurse into,
81 even though they are not directly being inlined. */
82 varray_type fns;
83 /* The index of the first element of FNS that really represents an
84 inlined function. */
85 unsigned first_inlined_fn;
86 /* The label to jump to when a return statement is encountered. If
87 this value is NULL, then return statements will simply be
88 remapped as return statements, rather than as jumps. */
89 tree ret_label;
90 /* The VAR_DECL for the return value. */
91 tree retvar;
92 /* The map from local declarations in the inlined function to
93 equivalents in the function into which it is being inlined. */
94 splay_tree decl_map;
95 /* Nonzero if we are currently within the cleanup for a
96 TARGET_EXPR. */
97 int in_target_cleanup_p;
98 /* We use the same mechanism to build clones that we do to perform
99 inlining. However, there are a few places where we need to
100 distinguish between those two situations. This flag is true if
101 we are cloning, rather than inlining. */
102 bool cloning_p;
103 /* Similarly for saving function body. */
104 bool saving_p;
105 /* Hash table used to prevent walk_tree from visiting the same node
106 umpteen million times. */
107 htab_t tree_pruner;
108 /* Callgraph node of function we are inlining into. */
109 struct cgraph_node *node;
110 /* Callgraph node of currently inlined function. */
111 struct cgraph_node *current_node;
112 /* Statement iterator. We need this so we can keep the tree in
113 gimple form when we insert the inlined function. It is not
114 used when we are not dealing with gimple trees. */
115 tree_stmt_iterator tsi;
116 } inline_data;
118 /* Prototypes. */
120 static tree copy_body_r (tree *, int *, void *);
121 static tree copy_body (inline_data *);
122 static tree expand_call_inline (tree *, int *, void *);
123 static void expand_calls_inline (tree *, inline_data *);
124 static bool inlinable_function_p (tree);
125 static tree remap_decl (tree, inline_data *);
126 static tree remap_type (tree, inline_data *);
127 static tree initialize_inlined_parameters (inline_data *, tree,
128 tree, tree, tree);
129 static void remap_block (tree *, inline_data *);
130 static tree remap_decls (tree, inline_data *);
131 static void copy_bind_expr (tree *, int *, inline_data *);
132 static tree mark_local_for_remap_r (tree *, int *, void *);
133 static void unsave_expr_1 (tree);
134 static tree unsave_r (tree *, int *, void *);
135 static void declare_inline_vars (tree bind_expr, tree vars);
136 static void remap_save_expr (tree *, void *, int *);
138 /* Insert a tree->tree mapping for ID. Despite the name suggests
139 that the trees should be variables, it is used for more than that. */
141 static void
142 insert_decl_map (inline_data *id, tree key, tree value)
144 splay_tree_insert (id->decl_map, (splay_tree_key) key,
145 (splay_tree_value) value);
147 /* Always insert an identity map as well. If we see this same new
148 node again, we won't want to duplicate it a second time. */
149 if (key != value)
150 splay_tree_insert (id->decl_map, (splay_tree_key) value,
151 (splay_tree_value) value);
154 /* Remap DECL during the copying of the BLOCK tree for the function.
155 We are only called to remap local variables in the current function. */
157 static tree
158 remap_decl (tree decl, inline_data *id)
160 splay_tree_node n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
161 tree fn = VARRAY_TOP_TREE (id->fns);
163 /* See if we have remapped this declaration. If we didn't already have an
164 equivalent for this declaration, create one now. */
165 if (!n)
167 /* Make a copy of the variable or label. */
168 tree t = copy_decl_for_inlining (decl, fn, VARRAY_TREE (id->fns, 0));
170 /* Remember it, so that if we encounter this local entity again
171 we can reuse this copy. Do this early because remap_type may
172 need this decl for TYPE_STUB_DECL. */
173 insert_decl_map (id, decl, t);
175 /* Remap types, if necessary. */
176 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
177 if (TREE_CODE (t) == TYPE_DECL)
178 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
179 else if (TREE_CODE (t) == PARM_DECL)
180 DECL_ARG_TYPE_AS_WRITTEN (t)
181 = remap_type (DECL_ARG_TYPE_AS_WRITTEN (t), id);
183 /* Remap sizes as necessary. */
184 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
185 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
187 /* If fields, do likewise for offset and qualifier. */
188 if (TREE_CODE (t) == FIELD_DECL)
190 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
191 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
192 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
195 #if 0
196 /* FIXME handle anon aggrs. */
197 if (! DECL_NAME (t) && TREE_TYPE (t)
198 && lang_hooks.tree_inlining.anon_aggr_type_p (TREE_TYPE (t)))
200 /* For a VAR_DECL of anonymous type, we must also copy the
201 member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS. */
202 tree members = NULL;
203 tree src;
205 for (src = DECL_ANON_UNION_ELEMS (t); src;
206 src = TREE_CHAIN (src))
208 tree member = remap_decl (TREE_VALUE (src), id);
210 gcc_assert (!TREE_PURPOSE (src));
211 members = tree_cons (NULL, member, members);
213 DECL_ANON_UNION_ELEMS (t) = nreverse (members);
215 #endif
217 return t;
220 return unshare_expr ((tree) n->value);
223 static tree
224 remap_type (tree type, inline_data *id)
226 splay_tree_node node;
227 tree new, t;
229 if (type == NULL)
230 return type;
232 /* See if we have remapped this type. */
233 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
234 if (node)
235 return (tree) node->value;
237 /* The type only needs remapping if it's variably modified by a variable
238 in the function we are inlining. */
239 if (! variably_modified_type_p (type, VARRAY_TOP_TREE (id->fns)))
241 insert_decl_map (id, type, type);
242 return type;
245 /* We do need a copy. build and register it now. If this is a pointer or
246 reference type, remap the designated type and make a new pointer or
247 reference type. */
248 if (TREE_CODE (type) == POINTER_TYPE)
250 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
251 TYPE_MODE (type),
252 TYPE_REF_CAN_ALIAS_ALL (type));
253 insert_decl_map (id, type, new);
254 return new;
256 else if (TREE_CODE (type) == REFERENCE_TYPE)
258 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
259 TYPE_MODE (type),
260 TYPE_REF_CAN_ALIAS_ALL (type));
261 insert_decl_map (id, type, new);
262 return new;
264 else
265 new = copy_node (type);
267 insert_decl_map (id, type, new);
269 /* This is a new type, not a copy of an old type. Need to reassociate
270 variants. We can handle everything except the main variant lazily. */
271 t = TYPE_MAIN_VARIANT (type);
272 if (type != t)
274 t = remap_type (t, id);
275 TYPE_MAIN_VARIANT (new) = t;
276 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
277 TYPE_NEXT_VARIANT (t) = new;
279 else
281 TYPE_MAIN_VARIANT (new) = new;
282 TYPE_NEXT_VARIANT (new) = NULL;
285 if (TYPE_STUB_DECL (type))
286 TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
288 /* Lazily create pointer and reference types. */
289 TYPE_POINTER_TO (new) = NULL;
290 TYPE_REFERENCE_TO (new) = NULL;
292 switch (TREE_CODE (new))
294 case INTEGER_TYPE:
295 case REAL_TYPE:
296 case ENUMERAL_TYPE:
297 case BOOLEAN_TYPE:
298 case CHAR_TYPE:
299 t = TYPE_MIN_VALUE (new);
300 if (t && TREE_CODE (t) != INTEGER_CST)
301 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
303 t = TYPE_MAX_VALUE (new);
304 if (t && TREE_CODE (t) != INTEGER_CST)
305 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
306 return new;
308 case FUNCTION_TYPE:
309 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
310 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
311 return new;
313 case ARRAY_TYPE:
314 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
315 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
316 break;
318 case RECORD_TYPE:
319 case UNION_TYPE:
320 case QUAL_UNION_TYPE:
321 walk_tree (&TYPE_FIELDS (new), copy_body_r, id, NULL);
322 break;
324 case OFFSET_TYPE:
325 default:
326 /* Shouldn't have been thought variable sized. */
327 gcc_unreachable ();
330 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
331 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
333 return new;
336 static tree
337 remap_decls (tree decls, inline_data *id)
339 tree old_var;
340 tree new_decls = NULL_TREE;
342 /* Remap its variables. */
343 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
345 tree new_var;
347 /* Remap the variable. */
348 new_var = remap_decl (old_var, id);
350 /* If we didn't remap this variable, so we can't mess with its
351 TREE_CHAIN. If we remapped this variable to the return slot, it's
352 already declared somewhere else, so don't declare it here. */
353 if (!new_var || new_var == id->retvar)
355 else
357 gcc_assert (DECL_P (new_var));
358 TREE_CHAIN (new_var) = new_decls;
359 new_decls = new_var;
363 return nreverse (new_decls);
366 /* Copy the BLOCK to contain remapped versions of the variables
367 therein. And hook the new block into the block-tree. */
369 static void
370 remap_block (tree *block, inline_data *id)
372 tree old_block;
373 tree new_block;
374 tree fn;
376 /* Make the new block. */
377 old_block = *block;
378 new_block = make_node (BLOCK);
379 TREE_USED (new_block) = TREE_USED (old_block);
380 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
381 *block = new_block;
383 /* Remap its variables. */
384 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
386 fn = VARRAY_TREE (id->fns, 0);
387 #if 1
388 /* FIXME! It shouldn't be so hard to manage blocks. Rebuilding them in
389 rest_of_compilation is a good start. */
390 if (id->cloning_p)
391 /* We're building a clone; DECL_INITIAL is still
392 error_mark_node, and current_binding_level is the parm
393 binding level. */
394 lang_hooks.decls.insert_block (new_block);
395 else
397 /* Attach this new block after the DECL_INITIAL block for the
398 function into which this block is being inlined. In
399 rest_of_compilation we will straighten out the BLOCK tree. */
400 tree *first_block;
401 if (DECL_INITIAL (fn))
402 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
403 else
404 first_block = &DECL_INITIAL (fn);
405 BLOCK_CHAIN (new_block) = *first_block;
406 *first_block = new_block;
408 #endif
409 /* Remember the remapped block. */
410 insert_decl_map (id, old_block, new_block);
413 static void
414 copy_statement_list (tree *tp)
416 tree_stmt_iterator oi, ni;
417 tree new;
419 new = alloc_stmt_list ();
420 ni = tsi_start (new);
421 oi = tsi_start (*tp);
422 *tp = new;
424 for (; !tsi_end_p (oi); tsi_next (&oi))
425 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
428 static void
429 copy_bind_expr (tree *tp, int *walk_subtrees, inline_data *id)
431 tree block = BIND_EXPR_BLOCK (*tp);
432 /* Copy (and replace) the statement. */
433 copy_tree_r (tp, walk_subtrees, NULL);
434 if (block)
436 remap_block (&block, id);
437 BIND_EXPR_BLOCK (*tp) = block;
440 if (BIND_EXPR_VARS (*tp))
441 /* This will remap a lot of the same decls again, but this should be
442 harmless. */
443 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
446 /* Called from copy_body via walk_tree. DATA is really an `inline_data *'. */
448 static tree
449 copy_body_r (tree *tp, int *walk_subtrees, void *data)
451 inline_data *id = (inline_data *) data;
452 tree fn = VARRAY_TOP_TREE (id->fns);
454 #if 0
455 /* All automatic variables should have a DECL_CONTEXT indicating
456 what function they come from. */
457 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
458 && DECL_NAMESPACE_SCOPE_P (*tp))
459 gcc_assert (DECL_EXTERNAL (*tp) || TREE_STATIC (*tp));
460 #endif
462 /* If this is a RETURN_EXPR, change it into a MODIFY_EXPR and a
463 GOTO_EXPR with the RET_LABEL as its target. */
464 if (TREE_CODE (*tp) == RETURN_EXPR && id->ret_label)
466 tree return_stmt = *tp;
467 tree goto_stmt;
469 /* Build the GOTO_EXPR. */
470 tree assignment = TREE_OPERAND (return_stmt, 0);
471 goto_stmt = build1 (GOTO_EXPR, void_type_node, id->ret_label);
472 TREE_USED (id->ret_label) = 1;
474 /* If we're returning something, just turn that into an
475 assignment into the equivalent of the original
476 RESULT_DECL. */
477 if (assignment)
479 /* Do not create a statement containing a naked RESULT_DECL. */
480 if (TREE_CODE (assignment) == RESULT_DECL)
481 gimplify_stmt (&assignment);
483 *tp = build (BIND_EXPR, void_type_node, NULL, NULL, NULL);
484 append_to_statement_list (assignment, &BIND_EXPR_BODY (*tp));
485 append_to_statement_list (goto_stmt, &BIND_EXPR_BODY (*tp));
487 /* If we're not returning anything just do the jump. */
488 else
489 *tp = goto_stmt;
491 /* Local variables and labels need to be replaced by equivalent
492 variables. We don't want to copy static variables; there's only
493 one of those, no matter how many times we inline the containing
494 function. Similarly for globals from an outer function. */
495 else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
497 tree new_decl;
499 /* Remap the declaration. */
500 new_decl = remap_decl (*tp, id);
501 gcc_assert (new_decl);
502 /* Replace this variable with the copy. */
503 STRIP_TYPE_NOPS (new_decl);
504 *tp = new_decl;
505 *walk_subtrees = 0;
507 else if (TREE_CODE (*tp) == STATEMENT_LIST)
508 copy_statement_list (tp);
509 else if (TREE_CODE (*tp) == SAVE_EXPR)
510 remap_save_expr (tp, id->decl_map, walk_subtrees);
511 else if (TREE_CODE (*tp) == BIND_EXPR)
512 copy_bind_expr (tp, walk_subtrees, id);
513 /* Types may need remapping as well. */
514 else if (TYPE_P (*tp))
515 *tp = remap_type (*tp, id);
517 /* If this is a constant, we have to copy the node iff the type will be
518 remapped. copy_tree_r will not copy a constant. */
519 else if (CONSTANT_CLASS_P (*tp))
521 tree new_type = remap_type (TREE_TYPE (*tp), id);
523 if (new_type == TREE_TYPE (*tp))
524 *walk_subtrees = 0;
526 else if (TREE_CODE (*tp) == INTEGER_CST)
527 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
528 TREE_INT_CST_HIGH (*tp));
529 else
531 *tp = copy_node (*tp);
532 TREE_TYPE (*tp) = new_type;
536 /* Otherwise, just copy the node. Note that copy_tree_r already
537 knows not to copy VAR_DECLs, etc., so this is safe. */
538 else
540 tree old_node = *tp;
542 if (TREE_CODE (*tp) == MODIFY_EXPR
543 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
544 && (lang_hooks.tree_inlining.auto_var_in_fn_p
545 (TREE_OPERAND (*tp, 0), fn)))
547 /* Some assignments VAR = VAR; don't generate any rtl code
548 and thus don't count as variable modification. Avoid
549 keeping bogosities like 0 = 0. */
550 tree decl = TREE_OPERAND (*tp, 0), value;
551 splay_tree_node n;
553 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
554 if (n)
556 value = (tree) n->value;
557 STRIP_TYPE_NOPS (value);
558 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
560 *tp = build_empty_stmt ();
561 return copy_body_r (tp, walk_subtrees, data);
565 else if (TREE_CODE (*tp) == INDIRECT_REF)
567 /* Get rid of *& from inline substitutions that can happen when a
568 pointer argument is an ADDR_EXPR. */
569 tree decl = TREE_OPERAND (*tp, 0);
570 splay_tree_node n;
572 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
573 if (n)
575 *tp = build_fold_indirect_ref ((tree)n->value);
576 *walk_subtrees = 0;
577 return NULL;
581 copy_tree_r (tp, walk_subtrees, NULL);
583 if (TREE_CODE (*tp) == CALL_EXPR && id->node && get_callee_fndecl (*tp))
585 if (id->saving_p)
587 struct cgraph_node *node;
588 struct cgraph_edge *edge;
590 for (node = id->node->next_clone; node; node = node->next_clone)
592 edge = cgraph_edge (node, old_node);
593 gcc_assert (edge);
594 edge->call_expr = *tp;
597 else
599 struct cgraph_edge *edge
600 = cgraph_edge (id->current_node, old_node);
602 if (edge)
603 cgraph_clone_edge (edge, id->node, *tp);
607 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
609 /* The copied TARGET_EXPR has never been expanded, even if the
610 original node was expanded already. */
611 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
613 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
614 TREE_OPERAND (*tp, 3) = NULL_TREE;
617 /* Variable substitution need not be simple. In particular, the
618 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
619 and friends are up-to-date. */
620 else if (TREE_CODE (*tp) == ADDR_EXPR)
622 walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
623 recompute_tree_invarant_for_addr_expr (*tp);
624 *walk_subtrees = 0;
628 /* Keep iterating. */
629 return NULL_TREE;
632 /* Make a copy of the body of FN so that it can be inserted inline in
633 another function. */
635 static tree
636 copy_body (inline_data *id)
638 tree body;
639 tree fndecl = VARRAY_TOP_TREE (id->fns);
641 if (fndecl == current_function_decl
642 && cfun->saved_tree)
643 body = cfun->saved_tree;
644 else
645 body = DECL_SAVED_TREE (fndecl);
646 walk_tree (&body, copy_body_r, id, NULL);
648 return body;
651 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
652 defined in function FN, or of a data member thereof. */
654 static bool
655 self_inlining_addr_expr (tree value, tree fn)
657 tree var;
659 if (TREE_CODE (value) != ADDR_EXPR)
660 return false;
662 var = get_base_address (TREE_OPERAND (value, 0));
664 return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
667 static void
668 setup_one_parameter (inline_data *id, tree p, tree value, tree fn,
669 tree *init_stmts, tree *vars, bool *gimplify_init_stmts_p)
671 tree init_stmt;
672 tree var;
674 /* If the parameter is never assigned to, we may not need to
675 create a new variable here at all. Instead, we may be able
676 to just use the argument value. */
677 if (TREE_READONLY (p)
678 && !TREE_ADDRESSABLE (p)
679 && value && !TREE_SIDE_EFFECTS (value))
681 /* We can't risk substituting complex expressions. They
682 might contain variables that will be assigned to later.
683 Theoretically, we could check the expression to see if
684 all of the variables that determine its value are
685 read-only, but we don't bother. */
686 /* We may produce non-gimple trees by adding NOPs or introduce
687 invalid sharing when operand is not really constant.
688 It is not big deal to prohibit constant propagation here as
689 we will constant propagate in DOM1 pass anyway. */
690 if (is_gimple_min_invariant (value)
691 && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
692 /* We have to be very careful about ADDR_EXPR. Make sure
693 the base variable isn't a local variable of the inlined
694 function, e.g., when doing recursive inlining, direct or
695 mutually-recursive or whatever, which is why we don't
696 just test whether fn == current_function_decl. */
697 && ! self_inlining_addr_expr (value, fn))
699 insert_decl_map (id, p, value);
700 return;
704 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
705 here since the type of this decl must be visible to the calling
706 function. */
707 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
709 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
710 that way, when the PARM_DECL is encountered, it will be
711 automatically replaced by the VAR_DECL. */
712 insert_decl_map (id, p, var);
714 /* Declare this new variable. */
715 TREE_CHAIN (var) = *vars;
716 *vars = var;
718 /* Make gimplifier happy about this variable. */
719 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
721 /* Even if P was TREE_READONLY, the new VAR should not be.
722 In the original code, we would have constructed a
723 temporary, and then the function body would have never
724 changed the value of P. However, now, we will be
725 constructing VAR directly. The constructor body may
726 change its value multiple times as it is being
727 constructed. Therefore, it must not be TREE_READONLY;
728 the back-end assumes that TREE_READONLY variable is
729 assigned to only once. */
730 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
731 TREE_READONLY (var) = 0;
733 /* Initialize this VAR_DECL from the equivalent argument. Convert
734 the argument to the proper type in case it was promoted. */
735 if (value)
737 tree rhs = fold_convert (TREE_TYPE (var), value);
739 if (rhs == error_mark_node)
740 return;
742 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
743 keep our trees in gimple form. */
744 init_stmt = build (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
745 append_to_statement_list (init_stmt, init_stmts);
747 /* If we did not create a gimple value and we did not create a gimple
748 cast of a gimple value, then we will need to gimplify INIT_STMTS
749 at the end. Note that is_gimple_cast only checks the outer
750 tree code, not its operand. Thus the explicit check that it's
751 operand is a gimple value. */
752 if (!is_gimple_val (rhs)
753 && (!is_gimple_cast (rhs)
754 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
755 *gimplify_init_stmts_p = true;
759 /* Generate code to initialize the parameters of the function at the
760 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
762 static tree
763 initialize_inlined_parameters (inline_data *id, tree args, tree static_chain,
764 tree fn, tree bind_expr)
766 tree init_stmts = NULL_TREE;
767 tree parms;
768 tree a;
769 tree p;
770 tree vars = NULL_TREE;
771 bool gimplify_init_stmts_p = false;
772 int argnum = 0;
774 /* Figure out what the parameters are. */
775 parms = DECL_ARGUMENTS (fn);
776 if (fn == current_function_decl)
777 parms = cfun->saved_args;
779 /* Loop through the parameter declarations, replacing each with an
780 equivalent VAR_DECL, appropriately initialized. */
781 for (p = parms, a = args; p;
782 a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
784 tree value;
786 ++argnum;
788 /* Find the initializer. */
789 value = lang_hooks.tree_inlining.convert_parm_for_inlining
790 (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
792 setup_one_parameter (id, p, value, fn, &init_stmts, &vars,
793 &gimplify_init_stmts_p);
796 /* Evaluate trailing arguments. */
797 for (; a; a = TREE_CHAIN (a))
799 tree value = TREE_VALUE (a);
800 append_to_statement_list (value, &init_stmts);
803 /* Initialize the static chain. */
804 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
805 if (fn == current_function_decl)
806 p = DECL_STRUCT_FUNCTION (fn)->saved_static_chain_decl;
807 if (p)
809 /* No static chain? Seems like a bug in tree-nested.c. */
810 gcc_assert (static_chain);
812 setup_one_parameter (id, p, static_chain, fn, &init_stmts, &vars,
813 &gimplify_init_stmts_p);
816 if (gimplify_init_stmts_p)
817 gimplify_body (&init_stmts, current_function_decl, false);
819 declare_inline_vars (bind_expr, vars);
820 return init_stmts;
823 /* Declare a return variable to replace the RESULT_DECL for the function we
824 are calling. RETURN_SLOT_ADDR, if non-null, was a fake parameter that
825 took the address of the result. MODIFY_DEST, if non-null, was the LHS of
826 the MODIFY_EXPR to which this call is the RHS.
828 The return value is a (possibly null) value that is the result of the
829 function as seen by the callee. *USE_P is a (possibly null) value that
830 holds the result as seen by the caller. */
832 static tree
833 declare_return_variable (inline_data *id, tree return_slot_addr,
834 tree modify_dest, tree *use_p)
836 tree callee = VARRAY_TOP_TREE (id->fns);
837 tree caller = VARRAY_TREE (id->fns, 0);
838 tree result = DECL_RESULT (callee);
839 tree callee_type = TREE_TYPE (result);
840 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
841 tree var, use;
843 /* We don't need to do anything for functions that don't return
844 anything. */
845 if (!result || VOID_TYPE_P (callee_type))
847 *use_p = NULL_TREE;
848 return NULL_TREE;
851 /* If there was a return slot, then the return value is the
852 dereferenced address of that object. */
853 if (return_slot_addr)
855 /* The front end shouldn't have used both return_slot_addr and
856 a modify expression. */
857 gcc_assert (!modify_dest);
858 if (DECL_BY_REFERENCE (result))
859 var = return_slot_addr;
860 else
861 var = build_fold_indirect_ref (return_slot_addr);
862 use = NULL;
863 goto done;
866 /* All types requiring non-trivial constructors should have been handled. */
867 gcc_assert (!TREE_ADDRESSABLE (callee_type));
869 /* Attempt to avoid creating a new temporary variable. */
870 if (modify_dest)
872 bool use_it = false;
874 /* We can't use MODIFY_DEST if there's type promotion involved. */
875 if (!lang_hooks.types_compatible_p (caller_type, callee_type))
876 use_it = false;
878 /* ??? If we're assigning to a variable sized type, then we must
879 reuse the destination variable, because we've no good way to
880 create variable sized temporaries at this point. */
881 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
882 use_it = true;
884 /* If the callee cannot possibly modify MODIFY_DEST, then we can
885 reuse it as the result of the call directly. Don't do this if
886 it would promote MODIFY_DEST to addressable. */
887 else if (!TREE_STATIC (modify_dest)
888 && !TREE_ADDRESSABLE (modify_dest)
889 && !TREE_ADDRESSABLE (result))
890 use_it = true;
892 if (use_it)
894 var = modify_dest;
895 use = NULL;
896 goto done;
900 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
902 var = copy_decl_for_inlining (result, callee, caller);
903 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
904 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
905 = tree_cons (NULL_TREE, var,
906 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
908 /* Do not have the rest of GCC warn about this variable as it should
909 not be visible to the user. */
910 TREE_NO_WARNING (var) = 1;
912 /* Build the use expr. If the return type of the function was
913 promoted, convert it back to the expected type. */
914 use = var;
915 if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
916 use = fold_convert (caller_type, var);
918 done:
919 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
920 way, when the RESULT_DECL is encountered, it will be
921 automatically replaced by the VAR_DECL. */
922 insert_decl_map (id, result, var);
924 /* Remember this so we can ignore it in remap_decls. */
925 id->retvar = var;
927 *use_p = use;
928 return var;
931 /* Returns nonzero if a function can be inlined as a tree. */
933 bool
934 tree_inlinable_function_p (tree fn)
936 return inlinable_function_p (fn);
939 static const char *inline_forbidden_reason;
941 static tree
942 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
943 void *fnp)
945 tree node = *nodep;
946 tree fn = (tree) fnp;
947 tree t;
949 switch (TREE_CODE (node))
951 case CALL_EXPR:
952 /* Refuse to inline alloca call unless user explicitly forced so as
953 this may change program's memory overhead drastically when the
954 function using alloca is called in loop. In GCC present in
955 SPEC2000 inlining into schedule_block cause it to require 2GB of
956 RAM instead of 256MB. */
957 if (alloca_call_p (node)
958 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
960 inline_forbidden_reason
961 = N_("%Jfunction %qF can never be inlined because it uses "
962 "alloca (override using the always_inline attribute)");
963 return node;
965 t = get_callee_fndecl (node);
966 if (! t)
967 break;
969 /* We cannot inline functions that call setjmp. */
970 if (setjmp_call_p (t))
972 inline_forbidden_reason
973 = N_("%Jfunction %qF can never be inlined because it uses setjmp");
974 return node;
977 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
978 switch (DECL_FUNCTION_CODE (t))
980 /* We cannot inline functions that take a variable number of
981 arguments. */
982 case BUILT_IN_VA_START:
983 case BUILT_IN_STDARG_START:
984 case BUILT_IN_NEXT_ARG:
985 case BUILT_IN_VA_END:
986 inline_forbidden_reason
987 = N_("%Jfunction %qF can never be inlined because it "
988 "uses variable argument lists");
989 return node;
991 case BUILT_IN_LONGJMP:
992 /* We can't inline functions that call __builtin_longjmp at
993 all. The non-local goto machinery really requires the
994 destination be in a different function. If we allow the
995 function calling __builtin_longjmp to be inlined into the
996 function calling __builtin_setjmp, Things will Go Awry. */
997 inline_forbidden_reason
998 = N_("%Jfunction %qF can never be inlined because "
999 "it uses setjmp-longjmp exception handling");
1000 return node;
1002 case BUILT_IN_NONLOCAL_GOTO:
1003 /* Similarly. */
1004 inline_forbidden_reason
1005 = N_("%Jfunction %qF can never be inlined because "
1006 "it uses non-local goto");
1007 return node;
1009 case BUILT_IN_RETURN:
1010 case BUILT_IN_APPLY_ARGS:
1011 /* If a __builtin_apply_args caller would be inlined,
1012 it would be saving arguments of the function it has
1013 been inlined into. Similarly __builtin_return would
1014 return from the function the inline has been inlined into. */
1015 inline_forbidden_reason
1016 = N_("%Jfunction %qF can never be inlined because "
1017 "it uses __builtin_return or __builtin_apply_args");
1018 return node;
1020 default:
1021 break;
1023 break;
1025 case GOTO_EXPR:
1026 t = TREE_OPERAND (node, 0);
1028 /* We will not inline a function which uses computed goto. The
1029 addresses of its local labels, which may be tucked into
1030 global storage, are of course not constant across
1031 instantiations, which causes unexpected behavior. */
1032 if (TREE_CODE (t) != LABEL_DECL)
1034 inline_forbidden_reason
1035 = N_("%Jfunction %qF can never be inlined "
1036 "because it contains a computed goto");
1037 return node;
1039 break;
1041 case LABEL_EXPR:
1042 t = TREE_OPERAND (node, 0);
1043 if (DECL_NONLOCAL (t))
1045 /* We cannot inline a function that receives a non-local goto
1046 because we cannot remap the destination label used in the
1047 function that is performing the non-local goto. */
1048 inline_forbidden_reason
1049 = N_("%Jfunction %qF can never be inlined "
1050 "because it receives a non-local goto");
1051 return node;
1053 break;
1055 case RECORD_TYPE:
1056 case UNION_TYPE:
1057 /* We cannot inline a function of the form
1059 void F (int i) { struct S { int ar[i]; } s; }
1061 Attempting to do so produces a catch-22.
1062 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1063 UNION_TYPE nodes, then it goes into infinite recursion on a
1064 structure containing a pointer to its own type. If it doesn't,
1065 then the type node for S doesn't get adjusted properly when
1066 F is inlined.
1068 ??? This is likely no longer true, but it's too late in the 4.0
1069 cycle to try to find out. This should be checked for 4.1. */
1070 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1071 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1073 inline_forbidden_reason
1074 = N_("%Jfunction %qF can never be inlined "
1075 "because it uses variable sized variables");
1076 return node;
1079 default:
1080 break;
1083 return NULL_TREE;
1086 /* Return subexpression representing possible alloca call, if any. */
1087 static tree
1088 inline_forbidden_p (tree fndecl)
1090 location_t saved_loc = input_location;
1091 tree ret = walk_tree_without_duplicates (&DECL_SAVED_TREE (fndecl),
1092 inline_forbidden_p_1, fndecl);
1094 input_location = saved_loc;
1095 return ret;
1098 /* Returns nonzero if FN is a function that does not have any
1099 fundamental inline blocking properties. */
1101 static bool
1102 inlinable_function_p (tree fn)
1104 bool inlinable = true;
1106 /* If we've already decided this function shouldn't be inlined,
1107 there's no need to check again. */
1108 if (DECL_UNINLINABLE (fn))
1109 return false;
1111 /* See if there is any language-specific reason it cannot be
1112 inlined. (It is important that this hook be called early because
1113 in C++ it may result in template instantiation.)
1114 If the function is not inlinable for language-specific reasons,
1115 it is left up to the langhook to explain why. */
1116 inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1118 /* If we don't have the function body available, we can't inline it.
1119 However, this should not be recorded since we also get here for
1120 forward declared inline functions. Therefore, return at once. */
1121 if (!DECL_SAVED_TREE (fn))
1122 return false;
1124 /* If we're not inlining at all, then we cannot inline this function. */
1125 else if (!flag_inline_trees)
1126 inlinable = false;
1128 /* Only try to inline functions if DECL_INLINE is set. This should be
1129 true for all functions declared `inline', and for all other functions
1130 as well with -finline-functions.
1132 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1133 it's the front-end that must set DECL_INLINE in this case, because
1134 dwarf2out loses if a function that does not have DECL_INLINE set is
1135 inlined anyway. That is why we have both DECL_INLINE and
1136 DECL_DECLARED_INLINE_P. */
1137 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1138 here should be redundant. */
1139 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1140 inlinable = false;
1142 else if (inline_forbidden_p (fn))
1144 /* See if we should warn about uninlinable functions. Previously,
1145 some of these warnings would be issued while trying to expand
1146 the function inline, but that would cause multiple warnings
1147 about functions that would for example call alloca. But since
1148 this a property of the function, just one warning is enough.
1149 As a bonus we can now give more details about the reason why a
1150 function is not inlinable.
1151 We only warn for functions declared `inline' by the user. */
1152 bool do_warning = (warn_inline
1153 && DECL_INLINE (fn)
1154 && DECL_DECLARED_INLINE_P (fn)
1155 && !DECL_IN_SYSTEM_HEADER (fn));
1157 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1158 sorry (inline_forbidden_reason, fn, fn);
1159 else if (do_warning)
1160 warning (0, inline_forbidden_reason, fn, fn);
1162 inlinable = false;
1165 /* Squirrel away the result so that we don't have to check again. */
1166 DECL_UNINLINABLE (fn) = !inlinable;
1168 return inlinable;
1171 /* Estimate the cost of a memory move. Use machine dependent
1172 word size and take possible memcpy call into account. */
1175 estimate_move_cost (tree type)
1177 HOST_WIDE_INT size;
1179 size = int_size_in_bytes (type);
1181 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1182 /* Cost of a memcpy call, 3 arguments and the call. */
1183 return 4;
1184 else
1185 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1188 /* Used by estimate_num_insns. Estimate number of instructions seen
1189 by given statement. */
1191 static tree
1192 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1194 int *count = data;
1195 tree x = *tp;
1197 if (IS_TYPE_OR_DECL_P (x))
1199 *walk_subtrees = 0;
1200 return NULL;
1202 /* Assume that constants and references counts nothing. These should
1203 be majorized by amount of operations among them we count later
1204 and are common target of CSE and similar optimizations. */
1205 else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
1206 return NULL;
1208 switch (TREE_CODE (x))
1210 /* Containers have no cost. */
1211 case TREE_LIST:
1212 case TREE_VEC:
1213 case BLOCK:
1214 case COMPONENT_REF:
1215 case BIT_FIELD_REF:
1216 case INDIRECT_REF:
1217 case ALIGN_INDIRECT_REF:
1218 case MISALIGNED_INDIRECT_REF:
1219 case ARRAY_REF:
1220 case ARRAY_RANGE_REF:
1221 case OBJ_TYPE_REF:
1222 case EXC_PTR_EXPR: /* ??? */
1223 case FILTER_EXPR: /* ??? */
1224 case COMPOUND_EXPR:
1225 case BIND_EXPR:
1226 case WITH_CLEANUP_EXPR:
1227 case NOP_EXPR:
1228 case VIEW_CONVERT_EXPR:
1229 case SAVE_EXPR:
1230 case ADDR_EXPR:
1231 case COMPLEX_EXPR:
1232 case RANGE_EXPR:
1233 case CASE_LABEL_EXPR:
1234 case SSA_NAME:
1235 case CATCH_EXPR:
1236 case EH_FILTER_EXPR:
1237 case STATEMENT_LIST:
1238 case ERROR_MARK:
1239 case NON_LVALUE_EXPR:
1240 case FDESC_EXPR:
1241 case VA_ARG_EXPR:
1242 case TRY_CATCH_EXPR:
1243 case TRY_FINALLY_EXPR:
1244 case LABEL_EXPR:
1245 case GOTO_EXPR:
1246 case RETURN_EXPR:
1247 case EXIT_EXPR:
1248 case LOOP_EXPR:
1249 case PHI_NODE:
1250 case WITH_SIZE_EXPR:
1251 break;
1253 /* We don't account constants for now. Assume that the cost is amortized
1254 by operations that do use them. We may re-consider this decision once
1255 we are able to optimize the tree before estimating it's size and break
1256 out static initializers. */
1257 case IDENTIFIER_NODE:
1258 case INTEGER_CST:
1259 case REAL_CST:
1260 case COMPLEX_CST:
1261 case VECTOR_CST:
1262 case STRING_CST:
1263 *walk_subtrees = 0;
1264 return NULL;
1266 /* Try to estimate the cost of assignments. We have three cases to
1267 deal with:
1268 1) Simple assignments to registers;
1269 2) Stores to things that must live in memory. This includes
1270 "normal" stores to scalars, but also assignments of large
1271 structures, or constructors of big arrays;
1272 3) TARGET_EXPRs.
1274 Let us look at the first two cases, assuming we have "a = b + C":
1275 <modify_expr <var_decl "a"> <plus_expr <var_decl "b"> <constant C>>
1276 If "a" is a GIMPLE register, the assignment to it is free on almost
1277 any target, because "a" usually ends up in a real register. Hence
1278 the only cost of this expression comes from the PLUS_EXPR, and we
1279 can ignore the MODIFY_EXPR.
1280 If "a" is not a GIMPLE register, the assignment to "a" will most
1281 likely be a real store, so the cost of the MODIFY_EXPR is the cost
1282 of moving something into "a", which we compute using the function
1283 estimate_move_cost.
1285 The third case deals with TARGET_EXPRs, for which the semantics are
1286 that a temporary is assigned, unless the TARGET_EXPR itself is being
1287 assigned to something else. In the latter case we do not need the
1288 temporary. E.g. in <modify_expr <var_decl "a"> <target_expr>>, the
1289 MODIFY_EXPR is free. */
1290 case INIT_EXPR:
1291 case MODIFY_EXPR:
1292 /* Is the right and side a TARGET_EXPR? */
1293 if (TREE_CODE (TREE_OPERAND (x, 1)) == TARGET_EXPR)
1294 break;
1295 /* ... fall through ... */
1297 case TARGET_EXPR:
1298 x = TREE_OPERAND (x, 0);
1299 /* Is this an assignments to a register? */
1300 if (is_gimple_reg (x))
1301 break;
1302 /* Otherwise it's a store, so fall through to compute the move cost. */
1304 case CONSTRUCTOR:
1305 *count += estimate_move_cost (TREE_TYPE (x));
1306 break;
1308 /* Assign cost of 1 to usual operations.
1309 ??? We may consider mapping RTL costs to this. */
1310 case COND_EXPR:
1312 case PLUS_EXPR:
1313 case MINUS_EXPR:
1314 case MULT_EXPR:
1316 case FIX_TRUNC_EXPR:
1317 case FIX_CEIL_EXPR:
1318 case FIX_FLOOR_EXPR:
1319 case FIX_ROUND_EXPR:
1321 case NEGATE_EXPR:
1322 case FLOAT_EXPR:
1323 case MIN_EXPR:
1324 case MAX_EXPR:
1325 case ABS_EXPR:
1327 case LSHIFT_EXPR:
1328 case RSHIFT_EXPR:
1329 case LROTATE_EXPR:
1330 case RROTATE_EXPR:
1332 case BIT_IOR_EXPR:
1333 case BIT_XOR_EXPR:
1334 case BIT_AND_EXPR:
1335 case BIT_NOT_EXPR:
1337 case TRUTH_ANDIF_EXPR:
1338 case TRUTH_ORIF_EXPR:
1339 case TRUTH_AND_EXPR:
1340 case TRUTH_OR_EXPR:
1341 case TRUTH_XOR_EXPR:
1342 case TRUTH_NOT_EXPR:
1344 case LT_EXPR:
1345 case LE_EXPR:
1346 case GT_EXPR:
1347 case GE_EXPR:
1348 case EQ_EXPR:
1349 case NE_EXPR:
1350 case ORDERED_EXPR:
1351 case UNORDERED_EXPR:
1353 case UNLT_EXPR:
1354 case UNLE_EXPR:
1355 case UNGT_EXPR:
1356 case UNGE_EXPR:
1357 case UNEQ_EXPR:
1358 case LTGT_EXPR:
1360 case CONVERT_EXPR:
1362 case CONJ_EXPR:
1364 case PREDECREMENT_EXPR:
1365 case PREINCREMENT_EXPR:
1366 case POSTDECREMENT_EXPR:
1367 case POSTINCREMENT_EXPR:
1369 case SWITCH_EXPR:
1371 case ASM_EXPR:
1373 case REALIGN_LOAD_EXPR:
1375 case RESX_EXPR:
1376 *count += 1;
1377 break;
1379 /* Few special cases of expensive operations. This is useful
1380 to avoid inlining on functions having too many of these. */
1381 case TRUNC_DIV_EXPR:
1382 case CEIL_DIV_EXPR:
1383 case FLOOR_DIV_EXPR:
1384 case ROUND_DIV_EXPR:
1385 case EXACT_DIV_EXPR:
1386 case TRUNC_MOD_EXPR:
1387 case CEIL_MOD_EXPR:
1388 case FLOOR_MOD_EXPR:
1389 case ROUND_MOD_EXPR:
1390 case RDIV_EXPR:
1391 *count += 10;
1392 break;
1393 case CALL_EXPR:
1395 tree decl = get_callee_fndecl (x);
1396 tree arg;
1398 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1399 switch (DECL_FUNCTION_CODE (decl))
1401 case BUILT_IN_CONSTANT_P:
1402 *walk_subtrees = 0;
1403 return NULL_TREE;
1404 case BUILT_IN_EXPECT:
1405 return NULL_TREE;
1406 default:
1407 break;
1410 /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
1411 that does use function declaration to figure out the arguments. */
1412 if (!decl)
1414 for (arg = TREE_OPERAND (x, 1); arg; arg = TREE_CHAIN (arg))
1415 *count += estimate_move_cost (TREE_TYPE (TREE_VALUE (arg)));
1417 else
1419 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1420 *count += estimate_move_cost (TREE_TYPE (arg));
1423 *count += PARAM_VALUE (PARAM_INLINE_CALL_COST);
1424 break;
1426 default:
1427 gcc_unreachable ();
1429 return NULL;
1432 /* Estimate number of instructions that will be created by expanding EXPR. */
1435 estimate_num_insns (tree expr)
1437 int num = 0;
1438 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
1439 return num;
1442 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
1444 static tree
1445 expand_call_inline (tree *tp, int *walk_subtrees, void *data)
1447 inline_data *id;
1448 tree t;
1449 tree expr;
1450 tree stmt;
1451 tree use_retvar;
1452 tree fn;
1453 tree arg_inits;
1454 tree *inlined_body;
1455 splay_tree st;
1456 tree args;
1457 tree return_slot_addr;
1458 tree modify_dest;
1459 location_t saved_location;
1460 struct cgraph_edge *edge;
1461 const char *reason;
1463 /* See what we've got. */
1464 id = (inline_data *) data;
1465 t = *tp;
1467 /* Set input_location here so we get the right instantiation context
1468 if we call instantiate_decl from inlinable_function_p. */
1469 saved_location = input_location;
1470 if (EXPR_HAS_LOCATION (t))
1471 input_location = EXPR_LOCATION (t);
1473 /* Recurse, but letting recursive invocations know that we are
1474 inside the body of a TARGET_EXPR. */
1475 if (TREE_CODE (*tp) == TARGET_EXPR)
1477 #if 0
1478 int i, len = TREE_CODE_LENGTH (TARGET_EXPR);
1480 /* We're walking our own subtrees. */
1481 *walk_subtrees = 0;
1483 /* Actually walk over them. This loop is the body of
1484 walk_trees, omitting the case where the TARGET_EXPR
1485 itself is handled. */
1486 for (i = 0; i < len; ++i)
1488 if (i == 2)
1489 ++id->in_target_cleanup_p;
1490 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data,
1491 id->tree_pruner);
1492 if (i == 2)
1493 --id->in_target_cleanup_p;
1496 goto egress;
1497 #endif
1500 if (TYPE_P (t))
1501 /* Because types were not copied in copy_body, CALL_EXPRs beneath
1502 them should not be expanded. This can happen if the type is a
1503 dynamic array type, for example. */
1504 *walk_subtrees = 0;
1506 /* From here on, we're only interested in CALL_EXPRs. */
1507 if (TREE_CODE (t) != CALL_EXPR)
1508 goto egress;
1510 /* First, see if we can figure out what function is being called.
1511 If we cannot, then there is no hope of inlining the function. */
1512 fn = get_callee_fndecl (t);
1513 if (!fn)
1514 goto egress;
1516 /* Turn forward declarations into real ones. */
1517 fn = cgraph_node (fn)->decl;
1519 /* If fn is a declaration of a function in a nested scope that was
1520 globally declared inline, we don't set its DECL_INITIAL.
1521 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1522 C++ front-end uses it for cdtors to refer to their internal
1523 declarations, that are not real functions. Fortunately those
1524 don't have trees to be saved, so we can tell by checking their
1525 DECL_SAVED_TREE. */
1526 if (! DECL_INITIAL (fn)
1527 && DECL_ABSTRACT_ORIGIN (fn)
1528 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
1529 fn = DECL_ABSTRACT_ORIGIN (fn);
1531 /* Objective C and fortran still calls tree_rest_of_compilation directly.
1532 Kill this check once this is fixed. */
1533 if (!id->current_node->analyzed)
1534 goto egress;
1536 edge = cgraph_edge (id->current_node, t);
1538 /* Constant propagation on argument done during previous inlining
1539 may create new direct call. Produce an edge for it. */
1540 if (!edge)
1542 struct cgraph_node *dest = cgraph_node (fn);
1544 /* We have missing edge in the callgraph. This can happen in one case
1545 where previous inlining turned indirect call into direct call by
1546 constant propagating arguments. In all other cases we hit a bug
1547 (incorrect node sharing is most common reason for missing edges. */
1548 gcc_assert (dest->needed || !flag_unit_at_a_time);
1549 cgraph_create_edge (id->node, dest, t)->inline_failed
1550 = N_("originally indirect function call not considered for inlining");
1551 goto egress;
1554 /* Don't try to inline functions that are not well-suited to
1555 inlining. */
1556 if (!cgraph_inline_p (edge, &reason))
1558 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1560 sorry ("%Jinlining failed in call to %qF: %s", fn, fn, reason);
1561 sorry ("called from here");
1563 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
1564 && !DECL_IN_SYSTEM_HEADER (fn)
1565 && strlen (reason)
1566 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn)))
1568 warning (0, "%Jinlining failed in call to %qF: %s", fn, fn, reason);
1569 warning (0, "called from here");
1571 goto egress;
1574 #ifdef ENABLE_CHECKING
1575 if (edge->callee->decl != id->node->decl)
1576 verify_cgraph_node (edge->callee);
1577 #endif
1579 if (! lang_hooks.tree_inlining.start_inlining (fn))
1580 goto egress;
1582 /* Build a block containing code to initialize the arguments, the
1583 actual inline expansion of the body, and a label for the return
1584 statements within the function to jump to. The type of the
1585 statement expression is the return type of the function call. */
1586 stmt = NULL;
1587 expr = build (BIND_EXPR, void_type_node, NULL_TREE,
1588 stmt, make_node (BLOCK));
1589 BLOCK_ABSTRACT_ORIGIN (BIND_EXPR_BLOCK (expr)) = fn;
1591 /* Local declarations will be replaced by their equivalents in this
1592 map. */
1593 st = id->decl_map;
1594 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
1595 NULL, NULL);
1597 /* Initialize the parameters. */
1598 args = TREE_OPERAND (t, 1);
1599 return_slot_addr = NULL_TREE;
1600 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (t))
1602 return_slot_addr = TREE_VALUE (args);
1603 args = TREE_CHAIN (args);
1604 TREE_TYPE (expr) = void_type_node;
1607 arg_inits = initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2),
1608 fn, expr);
1609 if (arg_inits)
1611 /* Expand any inlined calls in the initializers. Do this before we
1612 push FN on the stack of functions we are inlining; we want to
1613 inline calls to FN that appear in the initializers for the
1614 parameters.
1616 Note we need to save and restore the saved tree statement iterator
1617 to avoid having it clobbered by expand_calls_inline. */
1618 tree_stmt_iterator save_tsi;
1620 save_tsi = id->tsi;
1621 expand_calls_inline (&arg_inits, id);
1622 id->tsi = save_tsi;
1624 /* And add them to the tree. */
1625 append_to_statement_list (arg_inits, &BIND_EXPR_BODY (expr));
1628 /* Record the function we are about to inline so that we can avoid
1629 recursing into it. */
1630 VARRAY_PUSH_TREE (id->fns, fn);
1632 /* Return statements in the function body will be replaced by jumps
1633 to the RET_LABEL. */
1634 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1635 DECL_ARTIFICIAL (id->ret_label) = 1;
1636 DECL_IGNORED_P (id->ret_label) = 1;
1637 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
1638 insert_decl_map (id, id->ret_label, id->ret_label);
1640 gcc_assert (DECL_INITIAL (fn));
1641 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
1643 /* Find the lhs to which the result of this call is assigned. */
1644 modify_dest = tsi_stmt (id->tsi);
1645 if (TREE_CODE (modify_dest) == MODIFY_EXPR)
1647 modify_dest = TREE_OPERAND (modify_dest, 0);
1649 /* The function which we are inlining might not return a value,
1650 in which case we should issue a warning that the function
1651 does not return a value. In that case the optimizers will
1652 see that the variable to which the value is assigned was not
1653 initialized. We do not want to issue a warning about that
1654 uninitialized variable. */
1655 if (DECL_P (modify_dest))
1656 TREE_NO_WARNING (modify_dest) = 1;
1658 else
1659 modify_dest = NULL;
1661 /* Declare the return variable for the function. */
1662 declare_return_variable (id, return_slot_addr,
1663 modify_dest, &use_retvar);
1665 /* After we've initialized the parameters, we insert the body of the
1666 function itself. */
1668 struct cgraph_node *old_node = id->current_node;
1669 tree copy;
1671 id->current_node = edge->callee;
1672 copy = copy_body (id);
1674 /* If the function uses a return slot, then it may legitimately
1675 fall through while still returning a value, so we have to skip
1676 the warning here. */
1677 if (warn_return_type
1678 && !TREE_NO_WARNING (fn)
1679 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
1680 && return_slot_addr == NULL_TREE
1681 && block_may_fallthru (copy))
1683 warning (0, "control may reach end of non-void function %qD being inlined",
1684 fn);
1685 TREE_NO_WARNING (fn) = 1;
1688 append_to_statement_list (copy, &BIND_EXPR_BODY (expr));
1689 id->current_node = old_node;
1691 inlined_body = &BIND_EXPR_BODY (expr);
1693 /* After the body of the function comes the RET_LABEL. This must come
1694 before we evaluate the returned value below, because that evaluation
1695 may cause RTL to be generated. */
1696 if (TREE_USED (id->ret_label))
1698 tree label = build1 (LABEL_EXPR, void_type_node, id->ret_label);
1699 append_to_statement_list (label, &BIND_EXPR_BODY (expr));
1702 /* Clean up. */
1703 splay_tree_delete (id->decl_map);
1704 id->decl_map = st;
1706 /* Although, from the semantic viewpoint, the new expression has
1707 side-effects only if the old one did, it is not possible, from
1708 the technical viewpoint, to evaluate the body of a function
1709 multiple times without serious havoc. */
1710 TREE_SIDE_EFFECTS (expr) = 1;
1712 tsi_link_before (&id->tsi, expr, TSI_SAME_STMT);
1714 /* If the inlined function returns a result that we care about,
1715 then we're going to need to splice in a MODIFY_EXPR. Otherwise
1716 the call was a standalone statement and we can just replace it
1717 with the BIND_EXPR inline representation of the called function. */
1718 if (!use_retvar || !modify_dest)
1719 *tsi_stmt_ptr (id->tsi) = build_empty_stmt ();
1720 else
1721 *tp = use_retvar;
1723 /* When we gimplify a function call, we may clear TREE_SIDE_EFFECTS on
1724 the call if it is to a "const" function. Thus the copy of
1725 TREE_SIDE_EFFECTS from the CALL_EXPR to the BIND_EXPR above with
1726 result in TREE_SIDE_EFFECTS not being set for the inlined copy of a
1727 "const" function.
1729 Unfortunately, that is wrong as inlining the function can create/expose
1730 interesting side effects (such as setting of a return value).
1732 The easiest solution is to simply recalculate TREE_SIDE_EFFECTS for
1733 the toplevel expression. */
1734 recalculate_side_effects (expr);
1736 /* Output the inlining info for this abstract function, since it has been
1737 inlined. If we don't do this now, we can lose the information about the
1738 variables in the function when the blocks get blown away as soon as we
1739 remove the cgraph node. */
1740 (*debug_hooks->outlining_inline_function) (edge->callee->decl);
1742 /* Update callgraph if needed. */
1743 cgraph_remove_node (edge->callee);
1745 /* Recurse into the body of the just inlined function. */
1746 expand_calls_inline (inlined_body, id);
1747 VARRAY_POP (id->fns);
1749 /* Don't walk into subtrees. We've already handled them above. */
1750 *walk_subtrees = 0;
1752 lang_hooks.tree_inlining.end_inlining (fn);
1754 /* Keep iterating. */
1755 egress:
1756 input_location = saved_location;
1757 return NULL_TREE;
1760 static void
1761 expand_calls_inline (tree *stmt_p, inline_data *id)
1763 tree stmt = *stmt_p;
1764 enum tree_code code = TREE_CODE (stmt);
1765 int dummy;
1767 switch (code)
1769 case STATEMENT_LIST:
1771 tree_stmt_iterator i;
1772 tree new;
1774 for (i = tsi_start (stmt); !tsi_end_p (i); )
1776 id->tsi = i;
1777 expand_calls_inline (tsi_stmt_ptr (i), id);
1779 new = tsi_stmt (i);
1780 if (TREE_CODE (new) == STATEMENT_LIST)
1782 tsi_link_before (&i, new, TSI_SAME_STMT);
1783 tsi_delink (&i);
1785 else
1786 tsi_next (&i);
1789 break;
1791 case COND_EXPR:
1792 expand_calls_inline (&COND_EXPR_THEN (stmt), id);
1793 expand_calls_inline (&COND_EXPR_ELSE (stmt), id);
1794 break;
1796 case CATCH_EXPR:
1797 expand_calls_inline (&CATCH_BODY (stmt), id);
1798 break;
1800 case EH_FILTER_EXPR:
1801 expand_calls_inline (&EH_FILTER_FAILURE (stmt), id);
1802 break;
1804 case TRY_CATCH_EXPR:
1805 case TRY_FINALLY_EXPR:
1806 expand_calls_inline (&TREE_OPERAND (stmt, 0), id);
1807 expand_calls_inline (&TREE_OPERAND (stmt, 1), id);
1808 break;
1810 case BIND_EXPR:
1811 expand_calls_inline (&BIND_EXPR_BODY (stmt), id);
1812 break;
1814 case COMPOUND_EXPR:
1815 /* We're gimple. We should have gotten rid of all these. */
1816 gcc_unreachable ();
1818 case RETURN_EXPR:
1819 stmt_p = &TREE_OPERAND (stmt, 0);
1820 stmt = *stmt_p;
1821 if (!stmt || TREE_CODE (stmt) != MODIFY_EXPR)
1822 break;
1824 /* FALLTHRU */
1826 case MODIFY_EXPR:
1827 stmt_p = &TREE_OPERAND (stmt, 1);
1828 stmt = *stmt_p;
1829 if (TREE_CODE (stmt) == WITH_SIZE_EXPR)
1831 stmt_p = &TREE_OPERAND (stmt, 0);
1832 stmt = *stmt_p;
1834 if (TREE_CODE (stmt) != CALL_EXPR)
1835 break;
1837 /* FALLTHRU */
1839 case CALL_EXPR:
1840 expand_call_inline (stmt_p, &dummy, id);
1841 break;
1843 default:
1844 break;
1848 /* Expand calls to inline functions in the body of FN. */
1850 void
1851 optimize_inline_calls (tree fn)
1853 inline_data id;
1854 tree prev_fn;
1856 /* There is no point in performing inlining if errors have already
1857 occurred -- and we might crash if we try to inline invalid
1858 code. */
1859 if (errorcount || sorrycount)
1860 return;
1862 /* Clear out ID. */
1863 memset (&id, 0, sizeof (id));
1865 id.current_node = id.node = cgraph_node (fn);
1866 /* Don't allow recursion into FN. */
1867 VARRAY_TREE_INIT (id.fns, 32, "fns");
1868 VARRAY_PUSH_TREE (id.fns, fn);
1869 /* Or any functions that aren't finished yet. */
1870 prev_fn = NULL_TREE;
1871 if (current_function_decl)
1873 VARRAY_PUSH_TREE (id.fns, current_function_decl);
1874 prev_fn = current_function_decl;
1877 prev_fn = lang_hooks.tree_inlining.add_pending_fn_decls (&id.fns, prev_fn);
1879 /* Keep track of the low-water mark, i.e., the point where the first
1880 real inlining is represented in ID.FNS. */
1881 id.first_inlined_fn = VARRAY_ACTIVE_SIZE (id.fns);
1883 /* Replace all calls to inline functions with the bodies of those
1884 functions. */
1885 id.tree_pruner = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
1886 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
1888 /* Clean up. */
1889 htab_delete (id.tree_pruner);
1891 #ifdef ENABLE_CHECKING
1893 struct cgraph_edge *e;
1895 verify_cgraph_node (id.node);
1897 /* Double check that we inlined everything we are supposed to inline. */
1898 for (e = id.node->callees; e; e = e->next_callee)
1899 gcc_assert (e->inline_failed);
1901 #endif
1904 /* FN is a function that has a complete body, and CLONE is a function whose
1905 body is to be set to a copy of FN, mapping argument declarations according
1906 to the ARG_MAP splay_tree. */
1908 void
1909 clone_body (tree clone, tree fn, void *arg_map)
1911 inline_data id;
1913 /* Clone the body, as if we were making an inline call. But, remap the
1914 parameters in the callee to the parameters of caller. If there's an
1915 in-charge parameter, map it to an appropriate constant. */
1916 memset (&id, 0, sizeof (id));
1917 VARRAY_TREE_INIT (id.fns, 2, "fns");
1918 VARRAY_PUSH_TREE (id.fns, clone);
1919 VARRAY_PUSH_TREE (id.fns, fn);
1920 id.decl_map = (splay_tree)arg_map;
1922 /* Cloning is treated slightly differently from inlining. Set
1923 CLONING_P so that it's clear which operation we're performing. */
1924 id.cloning_p = true;
1926 /* Actually copy the body. */
1927 append_to_statement_list_force (copy_body (&id), &DECL_SAVED_TREE (clone));
1930 /* Make and return duplicate of body in FN. Put copies of DECL_ARGUMENTS
1931 in *arg_copy and of the static chain, if any, in *sc_copy. */
1933 tree
1934 save_body (tree fn, tree *arg_copy, tree *sc_copy)
1936 inline_data id;
1937 tree body, *parg;
1939 memset (&id, 0, sizeof (id));
1940 VARRAY_TREE_INIT (id.fns, 1, "fns");
1941 VARRAY_PUSH_TREE (id.fns, fn);
1942 id.node = cgraph_node (fn);
1943 id.saving_p = true;
1944 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
1945 *arg_copy = DECL_ARGUMENTS (fn);
1947 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
1949 tree new = copy_node (*parg);
1951 lang_hooks.dup_lang_specific_decl (new);
1952 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*parg);
1953 insert_decl_map (&id, *parg, new);
1954 TREE_CHAIN (new) = TREE_CHAIN (*parg);
1955 *parg = new;
1958 *sc_copy = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1959 if (*sc_copy)
1961 tree new = copy_node (*sc_copy);
1963 lang_hooks.dup_lang_specific_decl (new);
1964 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*sc_copy);
1965 insert_decl_map (&id, *sc_copy, new);
1966 TREE_CHAIN (new) = TREE_CHAIN (*sc_copy);
1967 *sc_copy = new;
1970 insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn));
1972 /* Actually copy the body. */
1973 body = copy_body (&id);
1975 /* Clean up. */
1976 splay_tree_delete (id.decl_map);
1977 return body;
1980 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
1982 tree
1983 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1985 enum tree_code code = TREE_CODE (*tp);
1987 /* We make copies of most nodes. */
1988 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
1989 || code == TREE_LIST
1990 || code == TREE_VEC
1991 || code == TYPE_DECL)
1993 /* Because the chain gets clobbered when we make a copy, we save it
1994 here. */
1995 tree chain = TREE_CHAIN (*tp);
1996 tree new;
1998 /* Copy the node. */
1999 new = copy_node (*tp);
2001 /* Propagate mudflap marked-ness. */
2002 if (flag_mudflap && mf_marked_p (*tp))
2003 mf_mark (new);
2005 *tp = new;
2007 /* Now, restore the chain, if appropriate. That will cause
2008 walk_tree to walk into the chain as well. */
2009 if (code == PARM_DECL || code == TREE_LIST)
2010 TREE_CHAIN (*tp) = chain;
2012 /* For now, we don't update BLOCKs when we make copies. So, we
2013 have to nullify all BIND_EXPRs. */
2014 if (TREE_CODE (*tp) == BIND_EXPR)
2015 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2018 else if (TREE_CODE_CLASS (code) == tcc_type)
2019 *walk_subtrees = 0;
2020 else if (TREE_CODE_CLASS (code) == tcc_declaration)
2021 *walk_subtrees = 0;
2022 else if (TREE_CODE_CLASS (code) == tcc_constant)
2023 *walk_subtrees = 0;
2024 else
2025 gcc_assert (code != STATEMENT_LIST);
2026 return NULL_TREE;
2029 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2030 information indicating to what new SAVE_EXPR this one should be mapped,
2031 use that one. Otherwise, create a new node and enter it in ST. */
2033 static void
2034 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2036 splay_tree st = (splay_tree) st_;
2037 splay_tree_node n;
2038 tree t;
2040 /* See if we already encountered this SAVE_EXPR. */
2041 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2043 /* If we didn't already remap this SAVE_EXPR, do so now. */
2044 if (!n)
2046 t = copy_node (*tp);
2048 /* Remember this SAVE_EXPR. */
2049 splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2050 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2051 splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2053 else
2055 /* We've already walked into this SAVE_EXPR; don't do it again. */
2056 *walk_subtrees = 0;
2057 t = (tree) n->value;
2060 /* Replace this SAVE_EXPR with the copy. */
2061 *tp = t;
2064 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
2065 copies the declaration and enters it in the splay_tree in DATA (which is
2066 really an `inline_data *'). */
2068 static tree
2069 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2070 void *data)
2072 inline_data *id = (inline_data *) data;
2074 /* Don't walk into types. */
2075 if (TYPE_P (*tp))
2076 *walk_subtrees = 0;
2078 else if (TREE_CODE (*tp) == LABEL_EXPR)
2080 tree decl = TREE_OPERAND (*tp, 0);
2082 /* Copy the decl and remember the copy. */
2083 insert_decl_map (id, decl,
2084 copy_decl_for_inlining (decl, DECL_CONTEXT (decl),
2085 DECL_CONTEXT (decl)));
2088 return NULL_TREE;
2091 /* Perform any modifications to EXPR required when it is unsaved. Does
2092 not recurse into EXPR's subtrees. */
2094 static void
2095 unsave_expr_1 (tree expr)
2097 switch (TREE_CODE (expr))
2099 case TARGET_EXPR:
2100 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
2101 It's OK for this to happen if it was part of a subtree that
2102 isn't immediately expanded, such as operand 2 of another
2103 TARGET_EXPR. */
2104 if (TREE_OPERAND (expr, 1))
2105 break;
2107 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
2108 TREE_OPERAND (expr, 3) = NULL_TREE;
2109 break;
2111 default:
2112 break;
2116 /* Called via walk_tree when an expression is unsaved. Using the
2117 splay_tree pointed to by ST (which is really a `splay_tree'),
2118 remaps all local declarations to appropriate replacements. */
2120 static tree
2121 unsave_r (tree *tp, int *walk_subtrees, void *data)
2123 inline_data *id = (inline_data *) data;
2124 splay_tree st = id->decl_map;
2125 splay_tree_node n;
2127 /* Only a local declaration (variable or label). */
2128 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
2129 || TREE_CODE (*tp) == LABEL_DECL)
2131 /* Lookup the declaration. */
2132 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2134 /* If it's there, remap it. */
2135 if (n)
2136 *tp = (tree) n->value;
2139 else if (TREE_CODE (*tp) == STATEMENT_LIST)
2140 copy_statement_list (tp);
2141 else if (TREE_CODE (*tp) == BIND_EXPR)
2142 copy_bind_expr (tp, walk_subtrees, id);
2143 else if (TREE_CODE (*tp) == SAVE_EXPR)
2144 remap_save_expr (tp, st, walk_subtrees);
2145 else
2147 copy_tree_r (tp, walk_subtrees, NULL);
2149 /* Do whatever unsaving is required. */
2150 unsave_expr_1 (*tp);
2153 /* Keep iterating. */
2154 return NULL_TREE;
2157 /* Copies everything in EXPR and replaces variables, labels
2158 and SAVE_EXPRs local to EXPR. */
2160 tree
2161 unsave_expr_now (tree expr)
2163 inline_data id;
2165 /* There's nothing to do for NULL_TREE. */
2166 if (expr == 0)
2167 return expr;
2169 /* Set up ID. */
2170 memset (&id, 0, sizeof (id));
2171 VARRAY_TREE_INIT (id.fns, 1, "fns");
2172 VARRAY_PUSH_TREE (id.fns, current_function_decl);
2173 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2175 /* Walk the tree once to find local labels. */
2176 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
2178 /* Walk the tree again, copying, remapping, and unsaving. */
2179 walk_tree (&expr, unsave_r, &id, NULL);
2181 /* Clean up. */
2182 splay_tree_delete (id.decl_map);
2184 return expr;
2187 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
2189 static tree
2190 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
2192 if (*tp == data)
2193 return (tree) data;
2194 else
2195 return NULL;
2198 bool
2199 debug_find_tree (tree top, tree search)
2201 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
2204 /* Declare the variables created by the inliner. Add all the variables in
2205 VARS to BIND_EXPR. */
2207 static void
2208 declare_inline_vars (tree bind_expr, tree vars)
2210 tree t;
2211 for (t = vars; t; t = TREE_CHAIN (t))
2212 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
2214 add_var_to_bind_expr (bind_expr, vars);